public ModsType mapPairs(ModsType mods, List<ExtentPair> pairs) { pairs = MapperUtils.noNull(pairs); List<ArrayItem> oldies = map(mods); List<ArrayItem> unknowns = filter(oldies, false, ExtentItem.class); List<ExtentItem> toExtents = toExtents(pairs); List<ArrayItem> news = MapperUtils.mergeList(toExtents, unknowns); return map(mods, pairs, NO_PRESERVATION, NO_PRESERVATION); }
@Override public Attachment map(int i, ResultSet resultSet, StatementContext statementContext) throws SQLException { Attachment attachment = new Attachment(); attachment.setId((UUID) resultSet.getObject("id")); attachment.setTitle(resultSet.getString("title")); attachment.setDescription(resultSet.getString("description")); attachment.setSlug(resultSet.getString("slug")); attachment.setData(resultSet.getBinaryStream("data")); attachment.setExtension(resultSet.getString("extension")); attachment.setParentId((UUID) resultSet.getObject("parent_id")); if (MapperUtils.hasColumn("localization_data", resultSet) && !Strings.isNullOrEmpty(resultSet.getString("localization_data"))) { ObjectMapper mapper = new ObjectMapper(); try { Map<Locale, Map<String, Object>> localizedVersions = Maps.newHashMap(); Map[] data = mapper.readValue(resultSet.getString("localization_data"), Map[].class); for (Map map : data) { localizedVersions.put( LocaleUtils.toLocale((String) map.get("locale")), (Map) map.get("entity")); } attachment.setLocalizedVersions(localizedVersions); } catch (IOException e) { throw new SQLException("Failed to de-serialize localization JSON data", e); } } return attachment; }
/** * Parse the given {@code context} with the given {@code mapper} and apply * the potential mapping update in-place. This method is useful when * composing mapping updates. */ private static <M extends Mapper> M parseAndMergeUpdate(M mapper, ParseContext context) throws IOException { final Mapper update = parseObjectOrField(context, mapper); if (update != null) { MapperUtils.merge(mapper, update); } return mapper; }
public ModsType map(ModsType mods, List<ArrayItem> items) { PhysicalDescriptionType pd = MapperUtils.findFirst(mods.getModsGroup(), PhysicalDescriptionType.class); if (pd == null) { if (items.isEmpty()) { return mods; } else { pd = new PhysicalDescriptionType(); MapperUtils.add(mods, pd); } } List<JAXBElement<?>> pdSubelements = pd.getFormOrReformattingQualityOrInternetMediaType(); List<JAXBElement<?>> news = arrayMapper.map(items, pdSubelements); pdSubelements.clear(); pdSubelements.addAll(news); return mods; }
public List<ArrayItem> map(ModsType mods) { PhysicalDescriptionType pd = MapperUtils.findFirst(mods.getModsGroup(), PhysicalDescriptionType.class); if (pd == null) { return new ArrayList<ArrayItem>(); } List<JAXBElement<?>> group = pd.getFormOrReformattingQualityOrInternetMediaType(); return arrayMapper.map(group); }
private boolean assertMappersShareSameFieldType() { for (DocumentMapper mapper : docMappers(false)) { List<FieldMapper> fieldMappers = new ArrayList<>(); Collections.addAll(fieldMappers, mapper.mapping().metadataMappers); MapperUtils.collect(mapper.root(), new ArrayList<ObjectMapper>(), fieldMappers); for (FieldMapper fieldMapper : fieldMappers) { assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name(); } } return true; }
public ModsType map( ModsType mods, List<ExtentPair> pairs, String preservationTreatment, String preservationStateOfArt) { pairs = MapperUtils.noNull(pairs); List<ArrayItem> oldies = map(mods); List<ExtentItem> toExtents = toExtents(pairs); List<ArrayItem> news; if (preservationTreatment == NO_PRESERVATION) { List<ArrayItem> unknowns = filter(oldies, false, ExtentItem.class); news = MapperUtils.mergeList(toExtents, unknowns); } else { List<ArrayItem> unknowns = filter(oldies, false, ExtentItem.class, NoteItem.class); List<NoteItem> notes = MapperUtils.find(oldies, NoteItem.class); updateTreatmentItem(oldies, notes, preservationTreatment, PRESERVATION_TREATMENT); updateTreatmentItem(oldies, notes, preservationStateOfArt, PRESERVATION_STATE_OF_ART); news = MapperUtils.mergeList(toExtents, notes, unknowns); } return map(mods, news); }
public static List<ExtentPair> toPairs(List<ArrayItem> items) { ArrayList<ExtentPair> pairs = new ArrayList<ExtentPair>(); List<ExtentItem> extents = MapperUtils.find(items, ExtentItem.class); for (Iterator<ExtentItem> it = extents.iterator(); it.hasNext(); ) { ExtentItem item = it.next(); ExtentPair pair = new ExtentPair(item.getValue(), item.getArrayIndex(), null, null); pairs.add(pair); if (it.hasNext()) { item = it.next(); pair.setSize(item.getValue()); pair.setSizeIndex(item.getArrayIndex()); } } return pairs; }
private synchronized DocumentMapper merge( DocumentMapper mapper, MergeReason reason, boolean updateAllTypes) { if (mapper.type().length() == 0) { throw new InvalidTypeNameException("mapping type name is empty"); } if (mapper.type().length() > 255) { throw new InvalidTypeNameException( "mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]"); } if (mapper.type().charAt(0) == '_') { throw new InvalidTypeNameException( "mapping type name [" + mapper.type() + "] can't start with '_'"); } if (mapper.type().contains("#")) { throw new InvalidTypeNameException( "mapping type name [" + mapper.type() + "] should not include '#' in it"); } if (mapper.type().contains(",")) { throw new InvalidTypeNameException( "mapping type name [" + mapper.type() + "] should not include ',' in it"); } if (mapper.type().equals(mapper.parentFieldMapper().type())) { throw new IllegalArgumentException("The [_parent.type] option can't point to the same type"); } if (typeNameStartsWithIllegalDot(mapper)) { throw new IllegalArgumentException( "mapping type name [" + mapper.type() + "] must not start with a '.'"); } // 1. compute the merged DocumentMapper DocumentMapper oldMapper = mappers.get(mapper.type()); DocumentMapper newMapper; if (oldMapper != null) { newMapper = oldMapper.merge(mapper.mapping(), updateAllTypes); } else { newMapper = mapper; } // 2. check basic sanity of the new mapping List<ObjectMapper> objectMappers = new ArrayList<>(); List<FieldMapper> fieldMappers = new ArrayList<>(); Collections.addAll(fieldMappers, newMapper.mapping().metadataMappers); MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers); checkFieldUniqueness(newMapper.type(), objectMappers, fieldMappers); checkObjectsCompatibility(newMapper.type(), objectMappers, fieldMappers, updateAllTypes); // 3. update lookup data-structures // this will in particular make sure that the merged fields are compatible with other types FieldTypeLookup fieldTypes = this.fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers, updateAllTypes); boolean hasNested = this.hasNested; Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers); for (ObjectMapper objectMapper : objectMappers) { fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper); if (objectMapper.nested().isNested()) { hasNested = true; } } fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); if (reason == MergeReason.MAPPING_UPDATE) { // this check will only be performed on the master node when there is // a call to the update mapping API. For all other cases like // the master node restoring mappings from disk or data nodes // deserializing cluster state that was sent by the master node, // this check will be skipped. checkNestedFieldsLimit(fullPathObjectMappers); checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size()); checkDepthLimit(fullPathObjectMappers.keySet()); checkPercolatorFieldLimit(fieldTypes); } Set<String> parentTypes = this.parentTypes; if (oldMapper == null && newMapper.parentFieldMapper().active()) { parentTypes = new HashSet<>(parentTypes.size() + 1); parentTypes.addAll(this.parentTypes); parentTypes.add(mapper.parentFieldMapper().type()); parentTypes = Collections.unmodifiableSet(parentTypes); } Map<String, DocumentMapper> mappers = new HashMap<>(this.mappers); mappers.put(newMapper.type(), newMapper); for (Map.Entry<String, DocumentMapper> entry : mappers.entrySet()) { if (entry.getKey().equals(DEFAULT_MAPPING)) { continue; } DocumentMapper m = entry.getValue(); // apply changes to the field types back m = m.updateFieldType(fieldTypes.fullNameToFieldType); entry.setValue(m); } mappers = Collections.unmodifiableMap(mappers); // 4. commit the change this.mappers = mappers; this.fieldTypes = fieldTypes; this.hasNested = hasNested; this.fullPathObjectMappers = fullPathObjectMappers; this.parentTypes = parentTypes; assert assertSerialization(newMapper); assert assertMappersShareSameFieldType(); return newMapper; }
public DocumentMapper( MapperService mapperService, String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser, RootObjectMapper rootObjectMapper, ImmutableMap<String, Object> meta, Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> rootMappers, List<SourceTransform> sourceTransforms, ReentrantReadWriteLock mappingLock) { this.mapperService = mapperService; this.type = rootObjectMapper.name(); this.typeText = new StringAndBytesText(this.type); this.mapping = new Mapping( Version.indexCreated(indexSettings), rootObjectMapper, rootMappers.values().toArray(new MetadataFieldMapper[rootMappers.values().size()]), sourceTransforms.toArray(new SourceTransform[sourceTransforms.size()]), meta); this.documentParser = new DocumentParser( index, indexSettings, docMapperParser, this, new ReleasableLock(mappingLock.readLock())); this.typeFilter = typeMapper().fieldType().termQuery(type, null); this.mappingWriteLock = new ReleasableLock(mappingLock.writeLock()); this.mappingLock = mappingLock; if (rootMapper(ParentFieldMapper.class).active()) { // mark the routing field mapper as required rootMapper(RoutingFieldMapper.class).markAsRequired(); } // collect all the mappers for this type List<ObjectMapper> newObjectMappers = new ArrayList<>(); List<FieldMapper> newFieldMappers = new ArrayList<>(); for (MetadataFieldMapper metadataMapper : this.mapping.metadataMappers) { if (metadataMapper instanceof FieldMapper) { newFieldMappers.add((FieldMapper) metadataMapper); } } MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers); this.fieldMappers = new DocumentFieldMappers(docMapperParser.analysisService).copyAndAllAll(newFieldMappers); this.objectMappers = Maps.uniqueIndex( newObjectMappers, new Function<ObjectMapper, String>() { @Override public String apply(ObjectMapper mapper) { return mapper.fullPath(); } }); for (ObjectMapper objectMapper : newObjectMappers) { if (objectMapper.nested().isNested()) { hasNestedObjects = true; } } refreshSource(); }
static ObjectMapper parseObject(ParseContext context, ObjectMapper mapper) throws IOException { if (mapper.isEnabled() == false) { context.parser().skipChildren(); return null; } XContentParser parser = context.parser(); String currentFieldName = parser.currentName(); XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.VALUE_NULL) { // the object is null ("obj1" : null), simply bail return null; } if (token.isValue()) { throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but found a concrete value"); } ObjectMapper.Nested nested = mapper.nested(); if (nested.isNested()) { context = context.createNestedContext(mapper.fullPath()); ParseContext.Document nestedDoc = context.doc(); ParseContext.Document parentDoc = nestedDoc.getParent(); // pre add the uid field if possible (id was already provided) IndexableField uidField = parentDoc.getField(UidFieldMapper.NAME); if (uidField != null) { // we don't need to add it as a full uid field in nested docs, since we don't need versioning // we also rely on this for UidField#loadVersion // this is a deeply nested field nestedDoc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), UidFieldMapper.Defaults.NESTED_FIELD_TYPE)); } // the type of the nested doc starts with __, so we can identify that its a nested one in filters // note, we don't prefix it with the type of the doc since it allows us to execute a nested query // across types (for example, with similar nested objects) nestedDoc.add(new Field(TypeFieldMapper.NAME, mapper.nestedTypePathAsString(), TypeFieldMapper.Defaults.FIELD_TYPE)); } ContentPath.Type origPathType = context.path().pathType(); context.path().pathType(mapper.pathType()); // if we are at the end of the previous object, advance if (token == XContentParser.Token.END_OBJECT) { token = parser.nextToken(); } if (token == XContentParser.Token.START_OBJECT) { // if we are just starting an OBJECT, advance, this is the object we are parsing, we need the name first token = parser.nextToken(); } ObjectMapper update = null; while (token != XContentParser.Token.END_OBJECT) { ObjectMapper newUpdate = null; if (token == XContentParser.Token.START_OBJECT) { newUpdate = parseObject(context, mapper, currentFieldName); } else if (token == XContentParser.Token.START_ARRAY) { newUpdate = parseArray(context, mapper, currentFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { parseNullValue(context, mapper, currentFieldName); } else if (token == null) { throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?"); } else if (token.isValue()) { newUpdate = parseValue(context, mapper, currentFieldName, token); } token = parser.nextToken(); if (newUpdate != null) { if (update == null) { update = newUpdate; } else { MapperUtils.merge(update, newUpdate); } } } // restore the enable path flag context.path().pathType(origPathType); if (nested.isNested()) { ParseContext.Document nestedDoc = context.doc(); ParseContext.Document parentDoc = nestedDoc.getParent(); if (nested.isIncludeInParent()) { for (IndexableField field : nestedDoc.getFields()) { if (field.name().equals(UidFieldMapper.NAME) || field.name().equals(TypeFieldMapper.NAME)) { continue; } else { parentDoc.add(field); } } } if (nested.isIncludeInRoot()) { ParseContext.Document rootDoc = context.rootDoc(); // don't add it twice, if its included in parent, and we are handling the master doc... if (!nested.isIncludeInParent() || parentDoc != rootDoc) { for (IndexableField field : nestedDoc.getFields()) { if (field.name().equals(UidFieldMapper.NAME) || field.name().equals(TypeFieldMapper.NAME)) { continue; } else { rootDoc.add(field); } } } } } return update; }
// never expose this to the outside world, we need to reparse the doc mapper so we get fresh // instances of field mappers to properly remove existing doc mapper private DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) { try (ReleasableLock lock = mappingWriteLock.acquire()) { if (mapper.type().length() == 0) { throw new InvalidTypeNameException("mapping type name is empty"); } if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) && mapper.type().length() > 255) { throw new InvalidTypeNameException( "mapping type name [" + mapper.type() + "] is too long; limit is length 255 but was [" + mapper.type().length() + "]"); } if (mapper.type().charAt(0) == '_') { throw new InvalidTypeNameException( "mapping type name [" + mapper.type() + "] can't start with '_'"); } if (mapper.type().contains("#")) { throw new InvalidTypeNameException( "mapping type name [" + mapper.type() + "] should not include '#' in it"); } if (mapper.type().contains(",")) { throw new InvalidTypeNameException( "mapping type name [" + mapper.type() + "] should not include ',' in it"); } if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) && mapper.type().equals(mapper.parentFieldMapper().type())) { throw new IllegalArgumentException( "The [_parent.type] option can't point to the same type"); } if (typeNameStartsWithIllegalDot(mapper)) { if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1)) { throw new IllegalArgumentException( "mapping type name [" + mapper.type() + "] must not start with a '.'"); } else { logger.warn( "Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type()); } } // we can add new field/object mappers while the old ones are there // since we get new instances of those, and when we remove, we remove // by instance equality DocumentMapper oldMapper = mappers.get(mapper.type()); if (oldMapper != null) { MergeResult result = oldMapper.merge(mapper.mapping(), false, updateAllTypes); if (result.hasConflicts()) { // TODO: What should we do??? if (logger.isDebugEnabled()) { logger.debug( "merging mapping for type [{}] resulted in conflicts: [{}]", mapper.type(), Arrays.toString(result.buildConflicts())); } } return oldMapper; } else { List<ObjectMapper> newObjectMappers = new ArrayList<>(); List<FieldMapper> newFieldMappers = new ArrayList<>(); for (MetadataFieldMapper metadataMapper : mapper.mapping().metadataMappers) { newFieldMappers.add(metadataMapper); } MapperUtils.collect(mapper.mapping().root, newObjectMappers, newFieldMappers); checkNewMappersCompatibility(newObjectMappers, newFieldMappers, updateAllTypes); addMappers(newObjectMappers, newFieldMappers); for (DocumentTypeListener typeListener : typeListeners) { typeListener.beforeCreate(mapper); } mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map(); if (mapper.parentFieldMapper().active()) { ImmutableSet.Builder<String> parentTypesCopy = ImmutableSet.builder(); parentTypesCopy.addAll(parentTypes); parentTypesCopy.add(mapper.parentFieldMapper().type()); parentTypes = parentTypesCopy.build(); } assert assertSerialization(mapper); return mapper; } } }
public void setValue(String value) { this.value = MapperUtils.normalize(value); }