private ParsedDocument parseFetchedDoc( PercolateContext context, BytesReference fetchedDoc, IndexService documentIndexService, String type) { ParsedDocument doc = null; XContentParser parser = null; try { parser = XContentFactory.xContent(fetchedDoc).createParser(fetchedDoc); MapperService mapperService = documentIndexService.mapperService(); DocumentMapper docMapper = mapperService.documentMapperWithAutoCreate(type); doc = docMapper.parse(source(parser).type(type).flyweight(true)); if (context.highlight() != null) { doc.setSource(fetchedDoc); } } catch (Throwable e) { throw new ElasticsearchParseException("failed to parse request", e); } finally { if (parser != null) { parser.close(); } } if (doc == null) { throw new ElasticsearchParseException("No doc to percolate in the request"); } return doc; }
public void testNoParentNullFieldCreatedIfNoParentSpecified() throws Exception { Index index = new Index("_index", "testUUID"); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, Settings.EMPTY); NamedAnalyzer namedAnalyzer = new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer()); IndexAnalyzers indexAnalyzers = new IndexAnalyzers( indexSettings, namedAnalyzer, namedAnalyzer, namedAnalyzer, Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); MapperService mapperService = new MapperService( indexSettings, indexAnalyzers, similarityService, new IndicesModule(emptyList()).getMapperRegistry(), () -> null); XContentBuilder mappingSource = jsonBuilder() .startObject() .startObject("some_type") .startObject("properties") .endObject() .endObject() .endObject(); mapperService.merge( "some_type", new CompressedXContent(mappingSource.string()), MergeReason.MAPPING_UPDATE, false); Set<String> allFields = new HashSet<>(mapperService.simpleMatchToIndexNames("*")); assertTrue(allFields.contains("_parent")); assertFalse(allFields.contains("_parent#null")); }
@Test public void testDefaultMappingAndWithMappingOverrideWithMapperService() throws Exception { String defaultMapping = XContentFactory.jsonBuilder() .startObject() .startObject(MapperService.DEFAULT_MAPPING) .startObject("_source") .field("enabled", false) .endObject() .endObject() .endObject() .string(); MapperService mapperService = MapperTests.newMapperService(); mapperService.add(MapperService.DEFAULT_MAPPING, defaultMapping); String mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("_source") .field("enabled", true) .endObject() .endObject() .endObject() .string(); mapperService.add("my_type", mapping); DocumentMapper mapper = mapperService.documentMapper("my_type"); assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.sourceMapper().enabled(), equalTo(true)); }
@Test public void testPutMappingWithDocumentMapperBuilder() throws IOException { RootObjectMapper.Builder rootObjectMapperBuilder = new RootObjectMapper.Builder(INDEX_TYPE) .add(new StringFieldMapper.Builder("message_2").store(true)); GetSettingsResponse getSettingsResponse = client() .admin() .indices() .getSettings(new GetSettingsRequest().indices(INDEX_NAME)) .actionGet(); MapperService mapperService = getMapperService(); DocumentMapper documentMapper = new DocumentMapper.Builder( getSettingsResponse.getIndexToSettings().get(INDEX_NAME), rootObjectMapperBuilder, mapperService) .build(mapperService, mapperService.documentMapperParser()); String expectedMappingSource = documentMapper.mappingSource().toString(); PutMapping putMapping = new PutMapping.Builder(INDEX_NAME, INDEX_TYPE, expectedMappingSource).build(); JestResult result = client.execute(putMapping); assertTrue(result.getErrorMessage(), result.isSucceeded()); }
public void testPercolatorFieldMapper_noQuery() throws Exception { addQueryMapping(); ParsedDocument doc = mapperService .documentMapper(typeName) .parse( "test", typeName, "1", XContentFactory.jsonBuilder().startObject().endObject().bytes()); assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(0)); try { mapperService .documentMapper(typeName) .parse( "test", typeName, "1", XContentFactory.jsonBuilder().startObject().nullField(fieldName).endObject().bytes()); } catch (MapperParsingException e) { assertThat( e.getDetailedMessage(), containsString("query malformed, must start with start_object")); } }
/** Checks the mappings for compatibility with the current version */ private void checkMappingsCompatibility(IndexMetaData indexMetaData) { Index index = new Index(indexMetaData.getIndex()); Settings settings = indexMetaData.getSettings(); try { SimilarityService similarityService = new SimilarityService(index, settings); // We cannot instantiate real analysis server at this point because the node might not have // been started yet. However, we don't really need real analyzers at this stage - so we can // fake it try (AnalysisService analysisService = new FakeAnalysisService(index, settings)) { try (MapperService mapperService = new MapperService(index, settings, analysisService, similarityService, scriptService)) { for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) { MappingMetaData mappingMetaData = cursor.value; mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), false, false); } } } } catch (Exception ex) { // Wrap the inner exception so we have the index name in the exception message throw new IllegalStateException( "unable to upgrade the mappings for the index [" + indexMetaData.getIndex() + "], reason: [" + ex.getMessage() + "]", ex); } }
public void testNestedRangeQuery() throws IOException { // create a nested geo_point type with a subfield named "geohash" (explicit testing for ISSUE // #15179) MapperService mapperService = createShardContext().getMapperService(); String nestedMapping = "{\"nested_doc\" : {\"properties\" : {" + "\"locations\": {\"properties\": {" + "\"geohash\": {\"type\": \"geo_point\"}}," + "\"type\": \"nested\"}" + "}}}"; mapperService.merge( "nested_doc", new CompressedXContent(nestedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); // create a range query on the nested locations.geohash sub-field String queryJson = "{\n" + " \"nested\": {\n" + " \"path\": \"locations\",\n" + " \"query\": {\n" + " \"geo_distance_range\": {\n" + " \"from\": \"0.0km\",\n" + " \"to\" : \"200.0km\",\n" + " \"locations.geohash\": \"s7ws01wyd7ws\"\n" + " }\n" + " }\n" + " }\n" + "}\n"; NestedQueryBuilder builder = (NestedQueryBuilder) parseQuery(queryJson); QueryShardContext context = createShardContext(); builder.toQuery(context); }
public void testConflictNewType() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() .endObject() .endObject(); MapperService mapperService = createIndex("test", Settings.settingsBuilder().build(), "type1", mapping).mapperService(); XContentBuilder update = XContentFactory.jsonBuilder() .startObject() .startObject("type2") .startObject("properties") .startObject("foo") .field("type", "double") .endObject() .endObject() .endObject() .endObject(); try { mapperService.merge( "type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected assertTrue( e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]")); } try { mapperService.merge( "type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected assertTrue( e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [long] to [double]")); } assertTrue( mapperService.documentMapper("type1").mapping().root().getMapper("foo") instanceof LongFieldMapper); assertNull(mapperService.documentMapper("type2")); }
static SearchContext createSearchContext(String indexName, String parentType, String childType) throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_6_0).build(); IndexService indexService = createIndex(indexName, settings); MapperService mapperService = indexService.mapperService(); // Parent/child parsers require that the parent and child type to be presented in mapping // Sometimes we want a nested object field in the parent type that triggers nonNestedDocsFilter // to be used mapperService.merge( parentType, new CompressedXContent( PutMappingRequest.buildFromSimplifiedDef( parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object") .string()), true, false); mapperService.merge( childType, new CompressedXContent( PutMappingRequest.buildFromSimplifiedDef( childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false") .string()), true, false); return createSearchContext(indexService); }
public void testAllowNoAdditionalSettings() throws Exception { addQueryMapping(); IndexService indexService = createIndex("test1", Settings.EMPTY); MapperService mapperService = indexService.mapperService(); String percolatorMapper = XContentFactory.jsonBuilder() .startObject() .startObject(typeName) .startObject("properties") .startObject(fieldName) .field("type", "percolator") .field("index", "no") .endObject() .endObject() .endObject() .endObject() .string(); try { mapperService.merge( typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); fail("MapperParsingException expected"); } catch (MapperParsingException e) { assertThat( e.getMessage(), equalTo( "Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); } }
public void testConflictSameType() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() .endObject() .endObject(); MapperService mapperService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping).mapperService(); XContentBuilder update = XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") .startObject("foo") .field("type", "double") .endObject() .endObject() .endObject() .endObject(); try { mapperService.merge( "type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat( e.getMessage(), containsString( "mapper [foo] of different type, current_type [long], merged_type [double]")); } try { mapperService.merge( "type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat( e.getMessage(), containsString( "mapper [foo] of different type, current_type [long], merged_type [double]")); } assertTrue( mapperService.documentMapper("type").mapping().root().getMapper("foo") instanceof LongFieldMapper); }
public Builder( String index, Settings indexSettings, RootObjectMapper.Builder builder, MapperService mapperService) { this.index = index; this.indexSettings = indexSettings; this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); // TODO: find a cleaner way to handle existing root mappings and using their field type as the // default. // the vast majority of these root mappers only need the existing type for backwards // compatibility, since // the pre 2.0 field type settings could be modified // UID first so it will be the first stored field to load (so will benefit from "fields: []" // early termination this.rootMappers.put( UidFieldMapper.class, new UidFieldMapper(indexSettings, mapperService.fullName(UidFieldMapper.NAME))); this.rootMappers.put( IdFieldMapper.class, new IdFieldMapper(indexSettings, mapperService.fullName(IdFieldMapper.NAME))); this.rootMappers.put( RoutingFieldMapper.class, new RoutingFieldMapper(indexSettings, mapperService.fullName(RoutingFieldMapper.NAME))); // add default mappers, order is important (for example analyzer should come before the rest // to set context.analyzer) this.rootMappers.put( SizeFieldMapper.class, new SizeFieldMapper(indexSettings, mapperService.fullName(SizeFieldMapper.NAME))); this.rootMappers.put( IndexFieldMapper.class, new IndexFieldMapper(indexSettings, mapperService.fullName(IndexFieldMapper.NAME))); this.rootMappers.put(SourceFieldMapper.class, new SourceFieldMapper(indexSettings)); this.rootMappers.put( TypeFieldMapper.class, new TypeFieldMapper(indexSettings, mapperService.fullName(TypeFieldMapper.NAME))); this.rootMappers.put( AllFieldMapper.class, new AllFieldMapper(indexSettings, mapperService.fullName(AllFieldMapper.NAME))); this.rootMappers.put( TimestampFieldMapper.class, new TimestampFieldMapper( indexSettings, mapperService.fullName(TimestampFieldMapper.NAME))); this.rootMappers.put(TTLFieldMapper.class, new TTLFieldMapper(indexSettings)); this.rootMappers.put(VersionFieldMapper.class, new VersionFieldMapper(indexSettings)); this.rootMappers.put( ParentFieldMapper.class, new ParentFieldMapper(indexSettings, mapperService.fullName(ParentFieldMapper.NAME))); // _field_names last so that it can see all other fields this.rootMappers.put( FieldNamesFieldMapper.class, new FieldNamesFieldMapper( indexSettings, mapperService.fullName(FieldNamesFieldMapper.NAME))); }
@Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { mapperService.merge( PARENT_TYPE, new CompressedXContent( PutMappingRequest.buildFromSimplifiedDef( PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object") .string()), MapperService.MergeReason.MAPPING_UPDATE, false); mapperService.merge( CHILD_TYPE, new CompressedXContent( PutMappingRequest.buildFromSimplifiedDef( CHILD_TYPE, "_parent", "type=" + PARENT_TYPE, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object") .string()), MapperService.MergeReason.MAPPING_UPDATE, false); mapperService.merge( "just_a_type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("just_a_type").string()), MapperService.MergeReason.MAPPING_UPDATE, false); }
public void testPercolatorFieldMapper() throws Exception { addQueryMapping(); QueryBuilder queryBuilder = termQuery("field", "value"); ParsedDocument doc = mapperService .documentMapper(typeName) .parse( "test", typeName, "1", XContentFactory.jsonBuilder() .startObject() .field(fieldName, queryBuilder) .endObject() .bytes()); assertThat(doc.rootDoc().getFields(fieldType.getExtractedTermsField()).length, equalTo(1)); assertThat( doc.rootDoc().getFields(fieldType.getExtractedTermsField())[0].binaryValue().utf8ToString(), equalTo("field\0value")); assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(1)); assertThat( doc.rootDoc().getFields(fieldType.getExtractionResultFieldName()).length, equalTo(1)); assertThat( doc.rootDoc().getFields(fieldType.getExtractionResultFieldName())[0].stringValue(), equalTo(EXTRACTION_COMPLETE)); BytesRef qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder); // add an query for which we don't extract terms from queryBuilder = rangeQuery("field").from("a").to("z"); doc = mapperService .documentMapper(typeName) .parse( "test", typeName, "1", XContentFactory.jsonBuilder() .startObject() .field(fieldName, queryBuilder) .endObject() .bytes()); assertThat( doc.rootDoc().getFields(fieldType.getExtractionResultFieldName()).length, equalTo(1)); assertThat( doc.rootDoc().getFields(fieldType.getExtractionResultFieldName())[0].stringValue(), equalTo(EXTRACTION_FAILED)); assertThat(doc.rootDoc().getFields(fieldType.getExtractedTermsField()).length, equalTo(0)); assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(1)); qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder); }
private void updateMappingOnMaster(final String index, final String type) { try { MapperService mapperService = indicesService.indexServiceSafe(index).mapperService(); final DocumentMapper documentMapper = mapperService.documentMapper(type); if (documentMapper == null) { // should not happen return; } IndexMetaData metaData = clusterService.state().metaData().index(index); if (metaData == null) { return; } long orderId = mappingUpdatedAction.generateNextMappingUpdateOrder(); documentMapper.refreshSource(); DiscoveryNode node = clusterService.localNode(); final MappingUpdatedAction.MappingUpdatedRequest request = new MappingUpdatedAction.MappingUpdatedRequest( index, metaData.uuid(), type, documentMapper.mappingSource(), orderId, node != null ? node.id() : null); mappingUpdatedAction.execute( request, new ActionListener<MappingUpdatedAction.MappingUpdatedResponse>() { @Override public void onResponse( MappingUpdatedAction.MappingUpdatedResponse mappingUpdatedResponse) { // all is well } @Override public void onFailure(Throwable e) { try { logger.warn( "failed to update master on updated mapping for index [{}], type [{}] and source [{}]", e, index, type, documentMapper.mappingSource().string()); } catch (IOException e1) { // ignore } } }); } catch (Exception e) { logger.warn( "failed to update master on updated mapping for index [{}], type [{}]", e, index, type); } }
private ParsedDocument parseDocument(String index, String type, BytesReference doc) { MapperService mapperService = indexShard.mapperService(); IndexService indexService = indexShard.indexService(); // TODO: make parsing not dynamically create fields not in the original mapping Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(type); ParsedDocument parsedDocument = docMapper.v1().parse(source(doc).type(type).flyweight(true)).setMappingsModified(docMapper); if (parsedDocument.mappingsModified()) { mappingUpdatedAction.updateMappingOnMaster(index, docMapper.v1(), indexService.indexUUID()); } return parsedDocument; }
private Analyzer getAnalyzerAtField( String field, @Nullable Map<String, String> perFieldAnalyzer) { MapperService mapperService = indexShard.mapperService(); Analyzer analyzer; if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) { analyzer = mapperService.analysisService().analyzer(perFieldAnalyzer.get(field).toString()); } else { analyzer = mapperService.smartNameFieldMapper(field).indexAnalyzer(); } if (analyzer == null) { analyzer = mapperService.analysisService().defaultIndexAnalyzer(); } return analyzer; }
// multiple percolator fields are allowed in the mapping, but only one field can be used at index // time. public void testMultiplePercolatorFields() throws Exception { String typeName = "another_type"; String percolatorMapper = XContentFactory.jsonBuilder() .startObject() .startObject(typeName) .startObject("_field_names") .field("enabled", false) .endObject() // makes testing easier .startObject("properties") .startObject("query_field1") .field("type", "percolator") .endObject() .startObject("query_field2") .field("type", "percolator") .endObject() .endObject() .endObject() .endObject() .string(); mapperService.merge( typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); QueryBuilder queryBuilder = matchQuery("field", "value"); ParsedDocument doc = mapperService .documentMapper(typeName) .parse( "test", typeName, "1", jsonBuilder() .startObject() .field("query_field1", queryBuilder) .field("query_field2", queryBuilder) .endObject() .bytes()); assertThat( doc.rootDoc().getFields().size(), equalTo(11)); // also includes _uid (1), type (2), source (1) BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); queryBuilderAsBytes = doc.rootDoc().getField("query_field2.query_builder_field").binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); }
@Override public TerminationHandle warmReader( final IndexShard indexShard, final Engine.Searcher searcher) { final MapperService mapperService = indexShard.mapperService(); final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { final MappedFieldType fieldType = fieldMapper.fieldType(); final String indexName = fieldType.name(); if (fieldType.eagerGlobalOrdinals() == false) { continue; } warmUpGlobalOrdinals.put(indexName, fieldType); } } final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService(); final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size()); for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) { executor.execute( () -> { try { final long start = System.nanoTime(); IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType); DirectoryReader reader = searcher.getDirectoryReader(); IndexFieldData<?> global = ifd.loadGlobal(reader); if (reader.leaves().isEmpty() == false) { global.load(reader.leaves().get(0)); } if (indexShard.warmerService().logger().isTraceEnabled()) { indexShard .warmerService() .logger() .trace( "warmed global ordinals for [{}], took [{}]", fieldType.name(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Exception e) { indexShard .warmerService() .logger() .warn("failed to warm-up global ordinals for [{}]", e, fieldType.name()); } finally { latch.countDown(); } }); } return () -> latch.await(); }
@Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { mapperService.merge( "nested_doc", new CompressedXContent( PutMappingRequest.buildFromSimplifiedDef( "nested_doc", STRING_FIELD_NAME, "type=text", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object", GEO_POINT_FIELD_NAME, "type=geo_point", "nested1", "type=nested") .string()), MapperService.MergeReason.MAPPING_UPDATE, false); }
ObjectMapper( String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers, @Nullable @IndexSettings Settings settings) { this.name = name; this.fullPath = fullPath; this.enabled = enabled; this.nested = nested; this.dynamic = dynamic; this.pathType = pathType; this.mappers = UpdateInPlaceMap.of(MapperService.getFieldMappersCollectionSwitch(settings)); if (mappers != null) { UpdateInPlaceMap<String, Mapper>.Mutator mappersMutator = this.mappers.mutator(); mappersMutator.putAll(mappers); mappersMutator.close(); } this.nestedTypePathAsString = "__" + fullPath; this.nestedTypePathAsBytes = new BytesRef(nestedTypePathAsString); this.nestedTypeFilter = new TermFilter(new Term(TypeFieldMapper.NAME, nestedTypePathAsBytes)); }
private FieldLookup loadFieldData(String name) { FieldLookup data = cachedFieldData.get(name); if (data == null) { MappedFieldType fieldType = mapperService.smartNameFieldType(name, types); if (fieldType == null) { throw new IllegalArgumentException( "No field found for [" + name + "] in mapping with types " + Arrays.toString(types) + ""); } data = new FieldLookup(fieldType); cachedFieldData.put(name, data); } if (data.fields() == null) { String fieldName = data.fieldType().names().indexName(); fieldVisitor.reset(fieldName); try { reader.document(docId, fieldVisitor); fieldVisitor.postProcess(data.fieldType()); data.fields( ImmutableMap.of(name, fieldVisitor.fields().get(data.fieldType().names().indexName()))); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [{}]", e, name); } } return data; }
@Before public void init() throws Exception { indexService = createIndex("test", Settings.EMPTY); mapperService = indexService.mapperService(); String mapper = XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("_field_names") .field("enabled", false) .endObject() // makes testing easier .startObject("properties") .startObject("field") .field("type", "text") .endObject() .startObject("number_field") .field("type", "long") .endObject() .startObject("date_field") .field("type", "date") .endObject() .endObject() .endObject() .endObject() .string(); mapperService.merge( "type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true); }
private IndexShard newShard( boolean primary, DiscoveryNode node, IndexMetaData indexMetaData, Path homePath) throws IOException { // add node name to settings for propper logging final Settings nodeSettings = Settings.builder().put("node.name", node.getName()).build(); final IndexSettings indexSettings = new IndexSettings(indexMetaData, nodeSettings); ShardRouting shardRouting = TestShardRouting.newShardRouting( shardId, node.getId(), primary, ShardRoutingState.INITIALIZING); final Path path = Files.createDirectories(homePath.resolve(node.getId())); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(path); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); Store store = createStore(indexSettings, shardPath); IndexCache indexCache = new IndexCache(indexSettings, new DisabledQueryCache(indexSettings), null); MapperService mapperService = MapperTestUtils.newMapperService(homePath, indexSettings.getSettings()); for (Map.Entry<String, String> type : indexMapping.entrySet()) { mapperService.merge( type.getKey(), new CompressedXContent(type.getValue()), MapperService.MergeReason.MAPPING_RECOVERY, true); } SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); final IndexEventListener indexEventListener = new IndexEventListener() {}; final Engine.Warmer warmer = searcher -> {}; return new IndexShard( shardRouting, indexSettings, shardPath, store, indexCache, mapperService, similarityService, null, null, indexEventListener, null, threadPool, BigArrays.NON_RECYCLING_INSTANCE, warmer, Collections.emptyList(), Collections.emptyList()); }
public void setUp() throws Exception { super.setUp(); MapperService mapperService = queryParserService().mapperService; mapperService.merge( PARENT_TYPE, new CompressedXContent( PutMappingRequest.buildFromSimplifiedDef( PARENT_TYPE, STRING_FIELD_NAME, "type=string", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object") .string()), false, false); mapperService.merge( CHILD_TYPE, new CompressedXContent( PutMappingRequest.buildFromSimplifiedDef( CHILD_TYPE, "_parent", "type=" + PARENT_TYPE, STRING_FIELD_NAME, "type=string", INT_FIELD_NAME, "type=integer", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", OBJECT_FIELD_NAME, "type=object") .string()), false, false); }
public void testDefaultFloatingPointMappings() throws IOException { MapperService mapperService = createIndex("test").mapperService(); String mapping = jsonBuilder() .startObject() .startObject("type") .field("numeric_detection", true) .endObject() .endObject() .string(); mapperService.merge( "type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); DocumentMapper mapper = mapperService.documentMapper("type"); doTestDefaultFloatingPointMappings(mapper, XContentFactory.jsonBuilder()); doTestDefaultFloatingPointMappings(mapper, XContentFactory.yamlBuilder()); doTestDefaultFloatingPointMappings(mapper, XContentFactory.smileBuilder()); doTestDefaultFloatingPointMappings(mapper, XContentFactory.cborBuilder()); }
@Override public PostingsFormat getPostingsFormatForField(String field) { final FieldMappers indexName = mapperService.indexName(field); if (indexName == null) { logger.warn("no index mapper found for field: [{}] returning default postings format", field); return defaultPostingFormat; } PostingsFormatProvider postingsFormat = indexName.mapper().postingsFormatProvider(); return postingsFormat != null ? postingsFormat.get() : defaultPostingFormat; }
public void testNestedRangeQuery() throws IOException { // geo distance range queries are no longer supported in 5.0 they are replaced by using // aggregations or sort if (createShardContext() .indexVersionCreated() .onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { return; } // create a nested geo_point type with a subfield named "geohash" (explicit testing for ISSUE // #15179) MapperService mapperService = createShardContext().getMapperService(); String nestedMapping = "{\"nested_doc\" : {\"properties\" : {" + "\"locations\": {\"properties\": {" + "\"geohash\": {\"type\": \"geo_point\"}}," + "\"type\": \"nested\"}" + "}}}"; mapperService.merge( "nested_doc", new CompressedXContent(nestedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); // create a range query on the nested locations.geohash sub-field String queryJson = "{\n" + " \"nested\": {\n" + " \"path\": \"locations\",\n" + " \"query\": {\n" + " \"geo_distance_range\": {\n" + " \"from\": \"0.0km\",\n" + " \"to\" : \"200.0km\",\n" + " \"locations.geohash\": \"s7ws01wyd7ws\"\n" + " }\n" + " }\n" + " }\n" + "}\n"; NestedQueryBuilder builder = (NestedQueryBuilder) parseQuery(queryJson); QueryShardContext context = createShardContext(); builder.toQuery(context); }
@Override public DocValuesFormat getDocValuesFormatForField(String field) { final FieldMappers indexName = mapperService.indexName(field); if (indexName == null) { logger.warn( "no index mapper found for field: [{}] returning default doc values format", field); return defaultDocValuesFormat; } DocValuesFormatProvider docValuesFormat = indexName.mapper().docValuesFormatProvider(); return docValuesFormat != null ? docValuesFormat.get() : defaultDocValuesFormat; }
private void addMappers( Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) { assert mappingLock.isWriteLockedByCurrentThread(); // first ensure we don't have any incompatible new fields mapperService.checkNewMappersCompatibility(objectMappers, fieldMappers, true); // update mappers for this document type MapBuilder<String, ObjectMapper> builder = MapBuilder.newMapBuilder(this.objectMappers); for (ObjectMapper objectMapper : objectMappers) { builder.put(objectMapper.fullPath(), objectMapper); if (objectMapper.nested().isNested()) { hasNestedObjects = true; } } this.objectMappers = builder.immutableMap(); this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers); // finally update for the entire index mapperService.addMappers(objectMappers, fieldMappers); }