private void addFieldData( ParseContext context, FieldMapper<String> mapper, Collection<String> data) throws IOException { if (data != null && !data.isEmpty()) { if (mappers.get(mapper.names().indexName()) == null) { // New mapper context.setWithinNewMapper(); try { parseData(context, mapper, data); FieldMapperListener.Aggregator newFields = new FieldMapperListener.Aggregator(); ObjectMapperListener.Aggregator newObjects = new ObjectMapperListener.Aggregator(); mapper.traverse(newFields); mapper.traverse(newObjects); // callback on adding those fields! context.docMapper().addFieldMappers(newFields.mappers); context.docMapper().addObjectMappers(newObjects.mappers); context.setMappingsModified(); synchronized (mutex) { UpdateInPlaceMap<String, FieldMapper<String>>.Mutator mappingMutator = this.mappers.mutator(); mappingMutator.put(mapper.names().indexName(), mapper); mappingMutator.close(); } } finally { context.clearWithinNewMapper(); } } else { // Mapper already added parseData(context, mapper, data); } } }
private void removeObjectAndFieldMappers(DocumentMapper docMapper) { // we need to remove those mappers MapBuilder<String, FieldMappers> nameFieldMappers = newMapBuilder(this.nameFieldMappers); MapBuilder<String, FieldMappers> indexNameFieldMappers = newMapBuilder(this.indexNameFieldMappers); MapBuilder<String, FieldMappers> fullNameFieldMappers = newMapBuilder(this.fullNameFieldMappers); for (FieldMapper mapper : docMapper.mappers()) { FieldMappers mappers = nameFieldMappers.get(mapper.names().name()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { nameFieldMappers.remove(mapper.names().name()); } else { nameFieldMappers.put(mapper.names().name(), mappers); } } mappers = indexNameFieldMappers.get(mapper.names().indexName()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { indexNameFieldMappers.remove(mapper.names().indexName()); } else { indexNameFieldMappers.put(mapper.names().indexName(), mappers); } } mappers = fullNameFieldMappers.get(mapper.names().fullName()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { fullNameFieldMappers.remove(mapper.names().fullName()); } else { fullNameFieldMappers.put(mapper.names().fullName(), mappers); } } } this.nameFieldMappers = nameFieldMappers.map(); this.indexNameFieldMappers = indexNameFieldMappers.map(); this.fullNameFieldMappers = fullNameFieldMappers.map(); MapBuilder<String, ObjectMappers> fullPathObjectMappers = newMapBuilder(this.fullPathObjectMappers); for (ObjectMapper mapper : docMapper.objectMappers().values()) { ObjectMappers mappers = fullPathObjectMappers.get(mapper.fullPath()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { fullPathObjectMappers.remove(mapper.fullPath()); } else { fullPathObjectMappers.put(mapper.fullPath(), mappers); } } } this.fullPathObjectMappers = fullPathObjectMappers.map(); }
private void removeObjectFieldMappers(DocumentMapper docMapper) { // we need to remove those mappers for (FieldMapper mapper : docMapper.mappers()) { FieldMappers mappers = nameFieldMappers.get(mapper.names().name()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { nameFieldMappers = newMapBuilder(nameFieldMappers).remove(mapper.names().name()).immutableMap(); } else { nameFieldMappers = newMapBuilder(nameFieldMappers).put(mapper.names().name(), mappers).immutableMap(); } } mappers = indexNameFieldMappers.get(mapper.names().indexName()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { indexNameFieldMappers = newMapBuilder(indexNameFieldMappers) .remove(mapper.names().indexName()) .immutableMap(); } else { indexNameFieldMappers = newMapBuilder(indexNameFieldMappers) .put(mapper.names().indexName(), mappers) .immutableMap(); } } mappers = fullNameFieldMappers.get(mapper.names().fullName()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { fullNameFieldMappers = newMapBuilder(fullNameFieldMappers).remove(mapper.names().fullName()).immutableMap(); } else { fullNameFieldMappers = newMapBuilder(fullNameFieldMappers) .put(mapper.names().fullName(), mappers) .immutableMap(); } } } for (ObjectMapper mapper : docMapper.objectMappers().values()) { ObjectMappers mappers = objectMappers.get(mapper.fullPath()); if (mappers != null) { mappers = mappers.remove(mapper); if (mappers.isEmpty()) { objectMappers = newMapBuilder(objectMappers).remove(mapper.fullPath()).immutableMap(); } else { objectMappers = newMapBuilder(objectMappers).put(mapper.fullPath(), mappers).immutableMap(); } } } }
@Override public IndexFieldData<?> build( Index index, Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService, GlobalOrdinalsBuilder globalOrdinalBuilder) { return new IndexIndexFieldData(index, mapper.names()); }
private void parseData( ParseContext context, FieldMapper<String> mapper, Collection<String> values) throws IOException { for (String value : values) { Field field = new Field( mapper.names().indexName(), value, isUriField(mapper.name()) ? Defaults.URI_FIELD_TYPE : Defaults.LABEL_FIELD_TYPE); context.doc().add(field); } }
public Set<String> simpleMatchToIndexNames(String pattern) { int dotIndex = pattern.indexOf('.'); if (dotIndex != -1) { String possibleType = pattern.substring(0, dotIndex); DocumentMapper possibleDocMapper = mappers.get(possibleType); if (possibleDocMapper != null) { Set<String> typedFields = Sets.newHashSet(); for (String indexName : possibleDocMapper.mappers().simpleMatchToIndexNames(pattern)) { typedFields.add(possibleType + "." + indexName); } return typedFields; } } Set<String> fields = Sets.newHashSet(); for (Map.Entry<String, FieldMappers> entry : fullNameFieldMappers.entrySet()) { if (Regex.simpleMatch(pattern, entry.getKey())) { for (FieldMapper mapper : entry.getValue()) { fields.add(mapper.names().indexName()); } } } for (Map.Entry<String, FieldMappers> entry : indexNameFieldMappers.entrySet()) { if (Regex.simpleMatch(pattern, entry.getKey())) { for (FieldMapper mapper : entry.getValue()) { fields.add(mapper.names().indexName()); } } } for (Map.Entry<String, FieldMappers> entry : nameFieldMappers.entrySet()) { if (Regex.simpleMatch(pattern, entry.getKey())) { for (FieldMapper mapper : entry.getValue()) { fields.add(mapper.names().indexName()); } } } return fields; }
@Override public IndexFieldData<AtomicNumericFieldData> build( Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { return new PackedArrayIndexFieldData( index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, numericType, breakerService); }
private void addFieldMapper( String field, FieldMapper<?> fieldMapper, MapBuilder<String, FieldMappingMetaData> fieldMappings, boolean includeDefaults) { if (fieldMappings.containsKey(field)) { return; } try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject(); fieldMapper.toXContent( builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS); builder.endObject(); fieldMappings.put( field, new FieldMappingMetaData(fieldMapper.names().fullName(), builder.bytes())); } catch (IOException e) { throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); } }
private void addFieldMappers(FieldMapper[] fieldMappers) { synchronized (mutex) { MapBuilder<String, FieldMappers> nameFieldMappers = newMapBuilder(this.nameFieldMappers); MapBuilder<String, FieldMappers> indexNameFieldMappers = newMapBuilder(this.indexNameFieldMappers); MapBuilder<String, FieldMappers> fullNameFieldMappers = newMapBuilder(this.fullNameFieldMappers); for (FieldMapper fieldMapper : fieldMappers) { FieldMappers mappers = nameFieldMappers.get(fieldMapper.names().name()); if (mappers == null) { mappers = new FieldMappers(fieldMapper); } else { mappers = mappers.concat(fieldMapper); } nameFieldMappers.put(fieldMapper.names().name(), mappers); mappers = indexNameFieldMappers.get(fieldMapper.names().indexName()); if (mappers == null) { mappers = new FieldMappers(fieldMapper); } else { mappers = mappers.concat(fieldMapper); } indexNameFieldMappers.put(fieldMapper.names().indexName(), mappers); mappers = fullNameFieldMappers.get(fieldMapper.names().fullName()); if (mappers == null) { mappers = new FieldMappers(fieldMapper); } else { mappers = mappers.concat(fieldMapper); } fullNameFieldMappers.put(fieldMapper.names().fullName(), mappers); } this.nameFieldMappers = nameFieldMappers.map(); this.indexNameFieldMappers = indexNameFieldMappers.map(); this.fullNameFieldMappers = fullNameFieldMappers.map(); } }
@Override public void fieldMapper(FieldMapper fieldMapper) { synchronized (mutex) { FieldMappers mappers = nameFieldMappers.get(fieldMapper.names().name()); if (mappers == null) { mappers = new FieldMappers(fieldMapper); } else { mappers = mappers.concat(fieldMapper); } nameFieldMappers = newMapBuilder(nameFieldMappers).put(fieldMapper.names().name(), mappers).immutableMap(); mappers = indexNameFieldMappers.get(fieldMapper.names().indexName()); if (mappers == null) { mappers = new FieldMappers(fieldMapper); } else { mappers = mappers.concat(fieldMapper); } indexNameFieldMappers = newMapBuilder(indexNameFieldMappers) .put(fieldMapper.names().indexName(), mappers) .immutableMap(); mappers = fullNameFieldMappers.get(fieldMapper.names().fullName()); if (mappers == null) { mappers = new FieldMappers(fieldMapper); } else { mappers = mappers.concat(fieldMapper); } fullNameFieldMappers = newMapBuilder(fullNameFieldMappers) .put(fieldMapper.names().fullName(), mappers) .immutableMap(); } }
private ImmutableMap<String, FieldMappingMetaData> findFieldMappingsByType( DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) throws ElasticsearchException { MapBuilder<String, FieldMappingMetaData> fieldMappings = new MapBuilder<>(); final DocumentFieldMappers allFieldMappers = documentMapper.mappers(); for (String field : request.fields()) { if (Regex.isMatchAllPattern(field)) { for (FieldMapper<?> fieldMapper : allFieldMappers) { addFieldMapper( fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); } } else if (Regex.isSimpleMatchPattern(field)) { // go through the field mappers 3 times, to make sure we give preference to the resolve // order: full name, index name, name. // also make sure we only store each mapper once. Collection<FieldMapper<?>> remainingFieldMappers = new LinkedList<>(allFieldMappers); for (Iterator<FieldMapper<?>> it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper<?> fieldMapper = it.next(); if (Regex.simpleMatch(field, fieldMapper.names().fullName())) { addFieldMapper( fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } for (Iterator<FieldMapper<?>> it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper<?> fieldMapper = it.next(); if (Regex.simpleMatch(field, fieldMapper.names().indexName())) { addFieldMapper( fieldMapper.names().indexName(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } for (Iterator<FieldMapper<?>> it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper<?> fieldMapper = it.next(); if (Regex.simpleMatch(field, fieldMapper.names().name())) { addFieldMapper( fieldMapper.names().name(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } } else { // not a pattern FieldMapper<?> fieldMapper = allFieldMappers.smartNameFieldMapper(field); if (fieldMapper != null) { addFieldMapper(field, fieldMapper, fieldMappings, request.includeDefaults()); } else if (request.probablySingleFieldRequest()) { fieldMappings.put(field, FieldMappingMetaData.NULL); } } } return fieldMappings.immutableMap(); }
@Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); XContentParser.Token token; boolean cache = false; CacheKeyFilter.Key cacheKey = null; String filterName = null; String currentFieldName = null; double lat = 0; double lon = 0; String fieldName = null; Object vFrom = null; Object vTo = null; boolean includeLower = true; boolean includeUpper = true; DistanceUnit unit = DistanceUnit.KILOMETERS; // default unit GeoDistance geoDistance = GeoDistance.ARC; String optimizeBbox = "memory"; boolean normalizeLon = true; boolean normalizeLat = true; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { token = parser.nextToken(); lon = parser.doubleValue(); token = parser.nextToken(); lat = parser.doubleValue(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {} fieldName = currentFieldName; } else if (token == XContentParser.Token.START_OBJECT) { // the json in the format of -> field : { lat : 30, lon : 12 } String currentName = parser.currentName(); fieldName = currentFieldName; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentName = parser.currentName(); } else if (token.isValue()) { if (currentName.equals(GeoPointFieldMapper.Names.LAT)) { lat = parser.doubleValue(); } else if (currentName.equals(GeoPointFieldMapper.Names.LON)) { lon = parser.doubleValue(); } else if (currentName.equals(GeoPointFieldMapper.Names.GEOHASH)) { double[] values = GeoHashUtils.decode(parser.text()); lat = values[0]; lon = values[1]; } } } } else if (token.isValue()) { if (currentFieldName.equals("from")) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vFrom = parser.text(); // a String } else { vFrom = parser.numberValue(); // a Number } } else if (currentFieldName.equals("to")) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vTo = parser.text(); // a String } else { vTo = parser.numberValue(); // a Number } } else if ("include_lower".equals(currentFieldName) || "includeLower".equals(currentFieldName)) { includeLower = parser.booleanValue(); } else if ("include_upper".equals(currentFieldName) || "includeUpper".equals(currentFieldName)) { includeUpper = parser.booleanValue(); } else if ("gt".equals(currentFieldName)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vFrom = parser.text(); // a String } else { vFrom = parser.numberValue(); // a Number } includeLower = false; } else if ("gte".equals(currentFieldName) || "ge".equals(currentFieldName)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vFrom = parser.text(); // a String } else { vFrom = parser.numberValue(); // a Number } includeLower = true; } else if ("lt".equals(currentFieldName)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vTo = parser.text(); // a String } else { vTo = parser.numberValue(); // a Number } includeUpper = false; } else if ("lte".equals(currentFieldName) || "le".equals(currentFieldName)) { if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_STRING) { vTo = parser.text(); // a String } else { vTo = parser.numberValue(); // a Number } includeUpper = true; } else if (currentFieldName.equals("unit")) { unit = DistanceUnit.fromString(parser.text()); } else if (currentFieldName.equals("distance_type") || currentFieldName.equals("distanceType")) { geoDistance = GeoDistance.fromString(parser.text()); } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) { lat = parser.doubleValue(); fieldName = currentFieldName.substring( 0, currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length()); } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) { lon = parser.doubleValue(); fieldName = currentFieldName.substring( 0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length()); } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.GEOHASH_SUFFIX)) { double[] values = GeoHashUtils.decode(parser.text()); lat = values[0]; lon = values[1]; fieldName = currentFieldName.substring( 0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length()); } else if ("_name".equals(currentFieldName)) { filterName = parser.text(); } else if ("_cache".equals(currentFieldName)) { cache = parser.booleanValue(); } else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) { cacheKey = new CacheKeyFilter.Key(parser.text()); } else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) { optimizeBbox = parser.textOrNull(); } else if ("normalize".equals(currentFieldName)) { normalizeLat = parser.booleanValue(); normalizeLon = parser.booleanValue(); } else { // assume the value is the actual value String value = parser.text(); int comma = value.indexOf(','); if (comma != -1) { lat = Double.parseDouble(value.substring(0, comma).trim()); lon = Double.parseDouble(value.substring(comma + 1).trim()); } else { double[] values = GeoHashUtils.decode(value); lat = values[0]; lon = values[1]; } fieldName = currentFieldName; } } } Double from = null; Double to = null; if (vFrom != null) { if (vFrom instanceof Number) { from = unit.toMiles(((Number) vFrom).doubleValue()); } else { from = DistanceUnit.parse((String) vFrom, unit, DistanceUnit.MILES); } from = geoDistance.normalize(from, DistanceUnit.MILES); } if (vTo != null) { if (vTo instanceof Number) { to = unit.toMiles(((Number) vTo).doubleValue()); } else { to = DistanceUnit.parse((String) vTo, unit, DistanceUnit.MILES); } to = geoDistance.normalize(to, DistanceUnit.MILES); } if (normalizeLat) { lat = GeoUtils.normalizeLat(lat); } if (normalizeLon) { lon = GeoUtils.normalizeLon(lon); } MapperService.SmartNameFieldMappers smartMappers = parseContext.smartFieldMappers(fieldName); if (smartMappers == null || !smartMappers.hasMapper()) { throw new QueryParsingException( parseContext.index(), "failed to find geo_point field [" + fieldName + "]"); } FieldMapper mapper = smartMappers.mapper(); if (mapper.fieldDataType() != GeoPointFieldDataType.TYPE) { throw new QueryParsingException( parseContext.index(), "field [" + fieldName + "] is not a geo_point field"); } GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper(); fieldName = mapper.names().indexName(); Filter filter = new GeoDistanceRangeFilter( lat, lon, from, to, includeLower, includeUpper, geoDistance, fieldName, geoMapper, parseContext.indexCache().fieldData(), optimizeBbox); if (cache) { filter = parseContext.cacheFilter(filter, cacheKey); } filter = wrapSmartNameFilter(filter, smartMappers, parseContext); if (filterName != null) { parseContext.addNamedFilter(filterName, filter); } return filter; }
private ImmutableMap<String, FieldMappingMetaData> findFieldMappingsByType( DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) throws ElasticsearchException { MapBuilder<String, FieldMappingMetaData> fieldMappings = new MapBuilder<>(); final List<FieldMapper> allFieldMappers = documentMapper.mappers().mappers(); for (String field : request.fields()) { if (Regex.isMatchAllPattern(field)) { for (FieldMapper fieldMapper : allFieldMappers) { addFieldMapper( fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); } } else if (Regex.isSimpleMatchPattern(field)) { // go through the field mappers 3 times, to make sure we give preference to the resolve // order: full name, index name, name. // also make sure we only store each mapper once. boolean[] resolved = new boolean[allFieldMappers.size()]; for (int i = 0; i < allFieldMappers.size(); i++) { FieldMapper fieldMapper = allFieldMappers.get(i); if (Regex.simpleMatch(field, fieldMapper.names().fullName())) { addFieldMapper( fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); resolved[i] = true; } } for (int i = 0; i < allFieldMappers.size(); i++) { if (resolved[i]) { continue; } FieldMapper fieldMapper = allFieldMappers.get(i); if (Regex.simpleMatch(field, fieldMapper.names().indexName())) { addFieldMapper( fieldMapper.names().indexName(), fieldMapper, fieldMappings, request.includeDefaults()); resolved[i] = true; } } for (int i = 0; i < allFieldMappers.size(); i++) { if (resolved[i]) { continue; } FieldMapper fieldMapper = allFieldMappers.get(i); if (Regex.simpleMatch(field, fieldMapper.names().name())) { addFieldMapper( fieldMapper.names().name(), fieldMapper, fieldMappings, request.includeDefaults()); resolved[i] = true; } } } else { // not a pattern FieldMapper fieldMapper = documentMapper.mappers().smartNameFieldMapper(field); if (fieldMapper != null) { addFieldMapper(field, fieldMapper, fieldMappings, request.includeDefaults()); } else if (request.probablySingleFieldRequest()) { fieldMappings.put(field, FieldMappingMetaData.NULL); } } } return fieldMappings.immutableMap(); }
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper<?> mapper) { final FieldMapper.Names fieldNames = mapper.names(); final FieldDataType type = mapper.fieldDataType(); final boolean docValues = mapper.hasDocValues(); IndexFieldData<?> fieldData = loadedFieldData.get(fieldNames.indexName()); if (fieldData == null) { synchronized (loadedFieldData) { fieldData = loadedFieldData.get(fieldNames.indexName()); if (fieldData == null) { IndexFieldData.Builder builder = null; String format = type.getFormat(indexSettings); if (format != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(format) && !docValues) { logger.warn( "field [" + fieldNames.fullName() + "] has no doc values, will use default field data format"); format = null; } if (format != null) { builder = buildersByTypeAndFormat.get(Tuple.tuple(type.getType(), format)); if (builder == null) { logger.warn( "failed to find format [" + format + "] for field [" + fieldNames.fullName() + "], will use default"); } } if (builder == null && docValues) { builder = docValuesBuildersByType.get(type.getType()); } if (builder == null) { builder = buildersByType.get(type.getType()); } if (builder == null) { throw new ElasticsearchIllegalArgumentException( "failed to find field data builder for field " + fieldNames.fullName() + ", and type " + type.getType()); } IndexFieldDataCache cache = fieldDataCaches.get(fieldNames.indexName()); if (cache == null) { // we default to node level cache, which in turn defaults to be unbounded // this means changing the node level settings is simple, just set the bounds there String cacheType = type.getSettings().get("cache", indexSettings.get("index.fielddata.cache", "node")); if ("resident".equals(cacheType)) { cache = new IndexFieldDataCache.Resident( indexService, fieldNames, type, indicesFieldDataCacheListener); } else if ("soft".equals(cacheType)) { cache = new IndexFieldDataCache.Soft( indexService, fieldNames, type, indicesFieldDataCacheListener); } else if ("node".equals(cacheType)) { cache = indicesFieldDataCache.buildIndexFieldDataCache( indexService, index, fieldNames, type); } else { throw new ElasticsearchIllegalArgumentException( "cache type not supported [" + cacheType + "] for field [" + fieldNames.fullName() + "]"); } fieldDataCaches.put(fieldNames.indexName(), cache); } GlobalOrdinalsBuilder globalOrdinalBuilder = new InternalGlobalOrdinalsBuilder(index(), indexSettings); fieldData = builder.build( index, indexSettings, mapper, cache, circuitBreakerService, indexService.mapperService(), globalOrdinalBuilder); loadedFieldData.put(fieldNames.indexName(), fieldData); } } } return (IFD) fieldData; }
private void addSortField( SearchContext context, List<SortField> sortFields, String fieldName, boolean reverse, boolean ignoreUnmapped, @Nullable final String missing, SortMode sortMode, String nestedPath, Filter nestedFilter) { if (SCORE_FIELD_NAME.equals(fieldName)) { if (reverse) { sortFields.add(SORT_SCORE_REVERSE); } else { sortFields.add(SORT_SCORE); } } else if (DOC_FIELD_NAME.equals(fieldName)) { if (reverse) { sortFields.add(SORT_DOC_REVERSE); } else { sortFields.add(SORT_DOC); } } else { FieldMapper fieldMapper = context.smartNameFieldMapper(fieldName); if (fieldMapper == null) { if (ignoreUnmapped) { return; } throw new SearchParseException( context, "No mapping found for [" + fieldName + "] in order to sort on"); } // Enable when we also know how to detect fields that do tokenize, but only emit one token /*if (fieldMapper instanceof StringFieldMapper) { StringFieldMapper stringFieldMapper = (StringFieldMapper) fieldMapper; if (stringFieldMapper.fieldType().tokenized()) { // Fail early throw new SearchParseException(context, "Can't sort on tokenized string field[" + fieldName + "]"); } }*/ // We only support AVG and SUM on number based fields if (!(fieldMapper instanceof NumberFieldMapper) && (sortMode == SortMode.SUM || sortMode == SortMode.AVG)) { sortMode = null; } if (sortMode == null) { sortMode = resolveDefaultSortMode(reverse); } IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.fieldData().getForField(fieldMapper).comparatorSource(missing, sortMode); ObjectMapper objectMapper; if (nestedPath != null) { ObjectMappers objectMappers = context.mapperService().objectMapper(nestedPath); if (objectMappers == null) { throw new ElasticSearchIllegalArgumentException( "failed to find nested object mapping for explicit nested path [" + nestedPath + "]"); } objectMapper = objectMappers.mapper(); if (!objectMapper.nested().isNested()) { throw new ElasticSearchIllegalArgumentException( "mapping for explicit nested path is not mapped as nested: [" + nestedPath + "]"); } } else { objectMapper = context.mapperService().resolveClosestNestedObjectMapper(fieldName); } if (objectMapper != null && objectMapper.nested().isNested()) { Filter rootDocumentsFilter = context.filterCache().cache(NonNestedDocsFilter.INSTANCE); Filter innerDocumentsFilter; if (nestedFilter != null) { innerDocumentsFilter = context.filterCache().cache(nestedFilter); } else { innerDocumentsFilter = context.filterCache().cache(objectMapper.nestedTypeFilter()); } fieldComparatorSource = new NestedFieldComparatorSource( sortMode, fieldComparatorSource, rootDocumentsFilter, innerDocumentsFilter); } sortFields.add( new SortField(fieldMapper.names().indexName(), fieldComparatorSource, reverse)); } }
@Override protected GetResponse shardOperation(GetRequest request, int shardId) throws ElasticSearchException { IndexService indexService = indicesService.indexServiceSafe(request.index()); BloomCache bloomCache = indexService.cache().bloomCache(); IndexShard indexShard = indexService.shardSafe(shardId); DocumentMapper docMapper = indexService.mapperService().documentMapper(request.type()); if (docMapper == null) { throw new TypeMissingException(new Index(request.index()), request.type()); } if (request.refresh()) { indexShard.refresh(new Engine.Refresh(false)); } Engine.Searcher searcher = indexShard.searcher(); boolean exists = false; byte[] source = null; Map<String, GetField> fields = null; long version = -1; try { UidField.DocIdAndVersion docIdAndVersion = loadCurrentVersionFromIndex( bloomCache, searcher, docMapper.uidMapper().term(request.type(), request.id())); if (docIdAndVersion != null && docIdAndVersion.docId != Lucene.NO_DOC) { if (docIdAndVersion.version > 0) { version = docIdAndVersion.version; } exists = true; FieldSelector fieldSelector = buildFieldSelectors(docMapper, request.fields()); if (fieldSelector != null) { Document doc = docIdAndVersion.reader.document(docIdAndVersion.docId, fieldSelector); source = extractSource(doc, docMapper); for (Object oField : doc.getFields()) { Fieldable field = (Fieldable) oField; String name = field.name(); Object value = null; FieldMappers fieldMappers = docMapper.mappers().indexName(field.name()); if (fieldMappers != null) { FieldMapper mapper = fieldMappers.mapper(); if (mapper != null) { name = mapper.names().fullName(); value = mapper.valueForSearch(field); } } if (value == null) { if (field.isBinary()) { value = field.getBinaryValue(); } else { value = field.stringValue(); } } if (fields == null) { fields = newHashMapWithExpectedSize(2); } GetField getField = fields.get(name); if (getField == null) { getField = new GetField(name, new ArrayList<Object>(2)); fields.put(name, getField); } getField.values().add(value); } } // now, go and do the script thingy if needed if (request.fields() != null && request.fields().length > 0) { SearchLookup searchLookup = null; for (String field : request.fields()) { String script = null; if (field.contains("_source.") || field.contains("doc[")) { script = field; } else { FieldMappers x = docMapper.mappers().smartName(field); if (x != null && !x.mapper().stored()) { script = "_source." + x.mapper().names().fullName(); } } if (script != null) { if (searchLookup == null) { searchLookup = new SearchLookup( indexService.mapperService(), indexService.cache().fieldData()); } SearchScript searchScript = scriptService.search(searchLookup, "mvel", script, null); searchScript.setNextReader(docIdAndVersion.reader); searchScript.setNextDocId(docIdAndVersion.docId); try { Object value = searchScript.run(); if (fields == null) { fields = newHashMapWithExpectedSize(2); } GetField getField = fields.get(field); if (getField == null) { getField = new GetField(field, new ArrayList<Object>(2)); fields.put(field, getField); } getField.values().add(value); } catch (RuntimeException e) { if (logger.isTraceEnabled()) { logger.trace("failed to execute get request script field [{}]", e, script); } // ignore } } } } } } catch (IOException e) { throw new ElasticSearchException( "Failed to get type [" + request.type() + "] and id [" + request.id() + "]", e); } finally { searcher.release(); } return new GetResponse( request.index(), request.type(), request.id(), version, exists, source, fields); }