diff --git a/pom.xml b/pom.xml index 28784cc464..4b7c541237 100644 --- a/pom.xml +++ b/pom.xml @@ -1,11 +1,13 @@ - + 4.0.0 org.springframework.data spring-data-mongodb-parent - 3.2.0-SNAPSHOT + 3.2.0-GH-3571-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index f0fbb601c8..0c8d061f09 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0-SNAPSHOT + 3.2.0-GH-3571-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index 1a17321782..9a9674fb16 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0-SNAPSHOT + 3.2.0-GH-3571-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 0248517caf..44f86c4b46 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0-SNAPSHOT + 3.2.0-GH-3571-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java index 53dd03f06f..ee29fea509 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java @@ -154,6 +154,7 @@ public Object get(MongoPersistentProperty property) { * @param entity must not be {@literal null}. * @return */ + @Nullable public Object getRawId(MongoPersistentEntity entity) { return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.asMap(document).get("_id"); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index fd3a832e56..4dcb825606 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -70,6 +70,7 @@ import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.util.ClassTypeInformation; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; @@ -102,6 +103,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions. Parent object was: %4$s"; private static final String INVALID_TYPE_TO_READ = "Expected to read Document %s into type %s but didn't find a PersistentEntity for the latter!"; + public static final ClassTypeInformation BSON = ClassTypeInformation.from(Bson.class); + protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class); protected final MappingContext, MongoPersistentProperty> mappingContext; @@ -137,9 +140,28 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, this::getWriteTarget); this.idMapper = new QueryMapper(this); + this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE); this.dbRefProxyHandler = new DefaultDbRefProxyHandler(spELContext, mappingContext, - MappingMongoConverter.this::getValueInternal); + (prop, bson, evaluator, path) -> { + + ConversionContext context = getConversionContext(path); + return MappingMongoConverter.this.getValueInternal(context, prop, bson, evaluator); + }); + } + + /** + * Creates a new {@link ConversionContext} given {@link ObjectPath}. + * + * @param path the current {@link ObjectPath}, must not be {@literal null}. + * @return the {@link ConversionContext}. + */ + protected ConversionContext getConversionContext(ObjectPath path) { + + Assert.notNull(path, "ObjectPath must not be null"); + + return new ConversionContext(path, this::readDocument, this::readCollectionOrArray, this::readMap, this::readDBRef, + this::getPotentiallyConvertedSimpleRead); } /** @@ -249,20 +271,30 @@ public S read(Class clazz, final Bson bson) { } protected S read(TypeInformation type, Bson bson) { - return read(type, bson, ObjectPath.ROOT); + return readDocument(getConversionContext(ObjectPath.ROOT), bson, type); } - @Nullable + /** + * Conversion method to materialize an object from a {@link Bson document}. Can be overridden by subclasses. + * + * @param context must not be {@literal null} + * @param bson must not be {@literal null} + * @param typeHint the {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @return the converted object, will never be {@literal null}. + * @since 3.2 + */ @SuppressWarnings("unchecked") - private S read(TypeInformation type, Bson bson, ObjectPath path) { + protected S readDocument(ConversionContext context, Bson bson, + TypeInformation typeHint) { - Assert.notNull(bson, "Bson must not be null!"); + // TODO: Cleanup duplication - TypeInformation typeToUse = typeMapper.readType(bson, type); - Class rawType = typeToUse.getType(); + Document document = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson; + TypeInformation typeToRead = typeMapper.readType(document, typeHint); + Class rawType = typeToRead.getType(); if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { - return conversionService.convert(bson, rawType); + return doConvert(bson, rawType); } if (Document.class.isAssignableFrom(rawType)) { @@ -282,26 +314,15 @@ private S read(TypeInformation type, Bson bson, ObjectPath return (S) bson; } - if (typeToUse.isCollectionLike() && bson instanceof List) { - return (S) readCollectionOrArray(typeToUse, (List) bson, path); - } - - if (typeToUse.isMap()) { - return (S) readMap(typeToUse, bson, path); + if (typeToRead.isMap()) { + return context.convert(bson, typeToRead); } - if (bson instanceof Collection) { - throw new MappingException(String.format(INCOMPATIBLE_TYPES, bson, BasicDBList.class, typeToUse.getType(), path)); - } - - if (typeToUse.equals(ClassTypeInformation.OBJECT)) { + if (BSON.isAssignableFrom(typeHint)) { return (S) bson; } - // Retrieve persistent entity info - - Document target = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson; - MongoPersistentEntity entity = mappingContext.getPersistentEntity(typeToUse); + MongoPersistentEntity entity = mappingContext.getPersistentEntity(typeToRead); if (entity == null) { @@ -309,29 +330,29 @@ private S read(TypeInformation type, Bson bson, ObjectPath Optional> codec = codecRegistryProvider.getCodecFor(rawType); if (codec.isPresent()) { - return codec.get().decode(new JsonReader(target.toJson()), DecoderContext.builder().build()); + return codec.get().decode(new JsonReader(document.toJson()), DecoderContext.builder().build()); } } - throw new MappingException(String.format(INVALID_TYPE_TO_READ, target, typeToUse.getType())); + throw new MappingException(String.format(INVALID_TYPE_TO_READ, document, rawType)); } - return read((MongoPersistentEntity) entity, target, path); + return read(context, (MongoPersistentEntity) entity, document); } - private ParameterValueProvider getParameterProvider(MongoPersistentEntity entity, - DocumentAccessor source, SpELExpressionEvaluator evaluator, ObjectPath path) { + private ParameterValueProvider getParameterProvider(ConversionContext context, + MongoPersistentEntity entity, DocumentAccessor source, SpELExpressionEvaluator evaluator) { - AssociationAwareMongoDbPropertyValueProvider provider = new AssociationAwareMongoDbPropertyValueProvider(source, - evaluator, path); + AssociationAwareMongoDbPropertyValueProvider provider = new AssociationAwareMongoDbPropertyValueProvider(context, + source, evaluator); PersistentEntityParameterValueProvider parameterProvider = new PersistentEntityParameterValueProvider<>( - entity, provider, path.getCurrentObject()); + entity, provider, context.getPath().getCurrentObject()); - return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider, - path); + return new ConverterAwareSpELExpressionParameterValueProvider(context, evaluator, conversionService, + parameterProvider); } - private S read(final MongoPersistentEntity entity, final Document bson, final ObjectPath path) { + private S read(ConversionContext context, MongoPersistentEntity entity, Document bson) { SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(bson, spELContext); DocumentAccessor documentAccessor = new DocumentAccessor(bson); @@ -339,20 +360,21 @@ private S read(final MongoPersistentEntity entity, final D PreferredConstructor persistenceConstructor = entity.getPersistenceConstructor(); ParameterValueProvider provider = persistenceConstructor != null - && persistenceConstructor.hasParameters() ? getParameterProvider(entity, documentAccessor, evaluator, path) + && persistenceConstructor.hasParameters() ? getParameterProvider(context, entity, documentAccessor, evaluator) : NoOpParameterValueProvider.INSTANCE; EntityInstantiator instantiator = instantiators.getInstantiatorFor(entity); S instance = instantiator.createInstance(entity, provider); if (entity.requiresPropertyPopulation()) { - return populateProperties(entity, documentAccessor, path, evaluator, instance); + return populateProperties(context, entity, documentAccessor, evaluator, instance); } return instance; } - private S populateProperties(MongoPersistentEntity entity, DocumentAccessor documentAccessor, ObjectPath path, + private S populateProperties(ConversionContext context, MongoPersistentEntity entity, + DocumentAccessor documentAccessor, SpELExpressionEvaluator evaluator, S instance) { PersistentPropertyAccessor accessor = new ConvertingPropertyAccessor<>(entity.getPropertyAccessor(instance), @@ -360,13 +382,14 @@ private S populateProperties(MongoPersistentEntity entity, DocumentAccess // Make sure id property is set before all other properties - Object rawId = readAndPopulateIdentifier(accessor, documentAccessor, entity, path, evaluator); - ObjectPath currentPath = path.push(accessor.getBean(), entity, rawId); + Object rawId = readAndPopulateIdentifier(context, accessor, documentAccessor, entity, evaluator); + ObjectPath currentPath = context.getPath().push(accessor.getBean(), entity, rawId); + ConversionContext contextToUse = context.withPath(currentPath); - MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(documentAccessor, evaluator, - currentPath); + MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(contextToUse, documentAccessor, + evaluator); - readProperties(entity, accessor, documentAccessor, valueProvider, currentPath, evaluator); + readProperties(contextToUse, entity, accessor, documentAccessor, valueProvider, evaluator); return accessor.getBean(); } @@ -374,16 +397,10 @@ private S populateProperties(MongoPersistentEntity entity, DocumentAccess /** * Reads the identifier from either the bean backing the {@link PersistentPropertyAccessor} or the source document in * case the identifier has not be populated yet. In this case the identifier is set on the bean for further reference. - * - * @param accessor must not be {@literal null}. - * @param document must not be {@literal null}. - * @param entity must not be {@literal null}. - * @param path - * @param evaluator - * @return */ - private Object readAndPopulateIdentifier(PersistentPropertyAccessor accessor, DocumentAccessor document, - MongoPersistentEntity entity, ObjectPath path, SpELExpressionEvaluator evaluator) { + @Nullable + private Object readAndPopulateIdentifier(ConversionContext context, PersistentPropertyAccessor accessor, + DocumentAccessor document, MongoPersistentEntity entity, SpELExpressionEvaluator evaluator) { Object rawId = document.getRawId(entity); @@ -397,22 +414,25 @@ private Object readAndPopulateIdentifier(PersistentPropertyAccessor accessor, return rawId; } - accessor.setProperty(idProperty, readIdValue(path, evaluator, idProperty, rawId)); + accessor.setProperty(idProperty, readIdValue(context, evaluator, idProperty, rawId)); return rawId; } - private Object readIdValue(ObjectPath path, SpELExpressionEvaluator evaluator, MongoPersistentProperty idProperty, + @Nullable + private Object readIdValue(ConversionContext context, SpELExpressionEvaluator evaluator, + MongoPersistentProperty idProperty, Object rawId) { String expression = idProperty.getSpelExpression(); Object resolvedValue = expression != null ? evaluator.evaluate(expression) : rawId; - return resolvedValue != null ? readValue(resolvedValue, idProperty.getTypeInformation(), path) : null; + return resolvedValue != null ? readValue(context, resolvedValue, idProperty.getTypeInformation()) : null; } - private void readProperties(MongoPersistentEntity entity, PersistentPropertyAccessor accessor, - DocumentAccessor documentAccessor, MongoDbPropertyValueProvider valueProvider, ObjectPath currentPath, + private void readProperties(ConversionContext context, MongoPersistentEntity entity, + PersistentPropertyAccessor accessor, DocumentAccessor documentAccessor, + MongoDbPropertyValueProvider valueProvider, SpELExpressionEvaluator evaluator) { DbRefResolverCallback callback = null; @@ -422,7 +442,7 @@ private void readProperties(MongoPersistentEntity entity, PersistentPropertyA if (prop.isAssociation() && !entity.isConstructorArgument(prop)) { if (callback == null) { - callback = getDbRefResolverCallback(documentAccessor, currentPath, evaluator); + callback = getDbRefResolverCallback(context, documentAccessor, evaluator); } readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback); @@ -432,7 +452,7 @@ private void readProperties(MongoPersistentEntity entity, PersistentPropertyA if (prop.isEmbedded()) { accessor.setProperty(prop, - readEmbedded(documentAccessor, currentPath, prop, mappingContext.getPersistentEntity(prop))); + readEmbedded(context, documentAccessor, prop, mappingContext.getRequiredPersistentEntity(prop))); continue; } @@ -449,7 +469,7 @@ private void readProperties(MongoPersistentEntity entity, PersistentPropertyA if (prop.isAssociation()) { if (callback == null) { - callback = getDbRefResolverCallback(documentAccessor, currentPath, evaluator); + callback = getDbRefResolverCallback(context, documentAccessor, evaluator); } readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback); @@ -460,11 +480,11 @@ private void readProperties(MongoPersistentEntity entity, PersistentPropertyA } } - private DbRefResolverCallback getDbRefResolverCallback(DocumentAccessor documentAccessor, ObjectPath currentPath, + private DbRefResolverCallback getDbRefResolverCallback(ConversionContext context, DocumentAccessor documentAccessor, SpELExpressionEvaluator evaluator) { - return new DefaultDbRefResolverCallback(documentAccessor.getDocument(), currentPath, evaluator, - MappingMongoConverter.this::getValueInternal); + return new DefaultDbRefResolverCallback(documentAccessor.getDocument(), context.getPath(), evaluator, + (prop, bson, e, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, e)); } private void readAssociation(Association association, PersistentPropertyAccessor accessor, @@ -482,16 +502,17 @@ private void readAssociation(Association association, P } @Nullable - private Object readEmbedded(DocumentAccessor documentAccessor, ObjectPath currentPath, MongoPersistentProperty prop, + private Object readEmbedded(ConversionContext context, DocumentAccessor documentAccessor, + MongoPersistentProperty prop, MongoPersistentEntity embeddedEntity) { if (prop.findAnnotation(Embedded.class).onEmpty().equals(OnEmpty.USE_EMPTY)) { - return read(embeddedEntity, (Document) documentAccessor.getDocument(), currentPath); + return read(context, embeddedEntity, (Document) documentAccessor.getDocument()); } for (MongoPersistentProperty persistentProperty : embeddedEntity) { if (documentAccessor.hasValue(persistentProperty)) { - return read(embeddedEntity, (Document) documentAccessor.getDocument(), currentPath); + return read(context, embeddedEntity, (Document) documentAccessor.getDocument()); } } return null; @@ -536,9 +557,7 @@ public void write(Object obj, Bson bson) { Object target = obj instanceof LazyLoadingProxy ? ((LazyLoadingProxy) obj).getTarget() : obj; writeInternal(target, bson, type); - if (asMap(bson).containsKey("_id") && asMap(bson).get("_id") == null) { - removeFromMap(bson, "_id"); - } + BsonUtils.removeNullId(bson); if (requiresTypeHint(entityType)) { typeMapper.writeType(type, bson); @@ -559,10 +578,6 @@ private boolean requiresTypeHint(Class type) { /** * Internal write conversion method which should be used for nested invocations. - * - * @param obj - * @param bson - * @param typeHint */ @SuppressWarnings("unchecked") protected void writeInternal(@Nullable Object obj, Bson bson, @Nullable TypeInformation typeHint) { @@ -575,8 +590,8 @@ protected void writeInternal(@Nullable Object obj, Bson bson, @Nullable TypeInfo Optional> customTarget = conversions.getCustomWriteTarget(entityType, Document.class); if (customTarget.isPresent()) { - Document result = conversionService.convert(obj, Document.class); - addAllToMap(bson, result); + Document result = doConvert(obj, Document.class); + BsonUtils.addAllToMap(bson, result); return; } @@ -677,12 +692,14 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce } if (valueType.isCollectionLike()) { - List collectionInternal = createCollection(asCollection(obj), prop); + + List collectionInternal = createCollection(BsonUtils.asCollection(obj), prop); accessor.put(prop, collectionInternal); return; } if (valueType.isMap()) { + Bson mapDbObj = createMap((Map) obj, prop); accessor.put(prop, mapDbObj); return; @@ -702,10 +719,8 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce dbRefObj = dbRefObj != null ? dbRefObj : createDBRef(obj, prop); - if (null != dbRefObj) { - accessor.put(prop, dbRefObj); - return; - } + accessor.put(prop, dbRefObj); + return; } /* @@ -720,7 +735,7 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce if (basicTargetType.isPresent()) { - accessor.put(prop, conversionService.convert(obj, basicTargetType.get())); + accessor.put(prop, doConvert(obj, basicTargetType.get())); return; } @@ -736,36 +751,18 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce accessor.put(prop, document); } - /** - * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a - * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element - * collection for everything else. - * - * @param source - * @return - */ - private static Collection asCollection(Object source) { - - if (source instanceof Collection) { - return (Collection) source; - } - - return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); - } - /** * Writes the given {@link Collection} using the given {@link MongoPersistentProperty} information. * * @param collection must not be {@literal null}. * @param property must not be {@literal null}. - * @return */ protected List createCollection(Collection collection, MongoPersistentProperty property) { if (!property.isDbReference()) { if (property.hasExplicitWriteTarget()) { - return writeCollectionInternal(collection, new TypeInformationWrapper<>(property), new ArrayList<>()); + return writeCollectionInternal(collection, new FieldTypeInformation<>(property), new ArrayList<>()); } return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList()); } @@ -790,7 +787,6 @@ protected List createCollection(Collection collection, MongoPersisten * * @param map must not {@literal null}. * @param property must not be {@literal null}. - * @return */ protected Bson createMap(Map map, MongoPersistentProperty property) { @@ -827,7 +823,6 @@ protected Bson createMap(Map map, MongoPersistentProperty proper * @param source the collection to create a {@link Collection} for, must not be {@literal null}. * @param type the {@link TypeInformation} to consider or {@literal null} if unknown. * @param sink the {@link Collection} to write to. - * @return */ @SuppressWarnings("unchecked") private List writeCollectionInternal(Collection source, @Nullable TypeInformation type, @@ -849,7 +844,7 @@ private List writeCollectionInternal(Collection source, @Nullable Typ collection.add(getPotentiallyConvertedSimpleWrite(element, componentType != null ? componentType.getType() : Object.class)); } else if (element instanceof Collection || elementType.isArray()) { - collection.add(writeCollectionInternal(asCollection(element), componentType, new BasicDBList())); + collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new BasicDBList())); } else { Document document = new Document(); writeInternal(element, document, componentType); @@ -866,7 +861,6 @@ private List writeCollectionInternal(Collection source, @Nullable Typ * @param obj must not be {@literal null}. * @param bson must not be {@literal null}. * @param propertyType must not be {@literal null}. - * @return */ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformation propertyType) { @@ -881,14 +875,14 @@ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformat if (val == null || conversions.isSimpleType(val.getClass())) { writeSimpleInternal(val, bson, simpleKey); } else if (val instanceof Collection || val.getClass().isArray()) { - addToMap(bson, simpleKey, - writeCollectionInternal(asCollection(val), propertyType.getMapValueType(), new BasicDBList())); + BsonUtils.addToMap(bson, simpleKey, + writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new BasicDBList())); } else { Document document = new Document(); TypeInformation valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType() : ClassTypeInformation.OBJECT; writeInternal(val, document, valueTypeInfo); - addToMap(bson, simpleKey, document); + BsonUtils.addToMap(bson, simpleKey, document); } } else { throw new MappingException("Cannot use a complex object as a key value."); @@ -903,7 +897,6 @@ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformat * conversions and escape dots from the result as they're not supported as {@link Map} key in MongoDB. * * @param key must not be {@literal null}. - * @return */ private String prepareMapKey(Object key) { @@ -918,8 +911,7 @@ private String prepareMapKey(Object key) { * conversion if none is configured. * * @see #setMapKeyDotReplacement(String) - * @param source - * @return + * @param source must not be {@literal null}. */ protected String potentiallyEscapeMapKey(String source) { @@ -941,7 +933,6 @@ protected String potentiallyEscapeMapKey(String source) { * Returns a {@link String} representation of the given {@link Map} key * * @param key - * @return */ private String potentiallyConvertMapKey(Object key) { @@ -958,8 +949,7 @@ private String potentiallyConvertMapKey(Object key) { * Translates the map key replacements in the given key just read with a dot in case a map key replacement has been * configured. * - * @param source - * @return + * @param source must not be {@literal null}. */ protected String potentiallyUnescapeMapKey(String source) { return mapKeyDotReplacement == null ? source : source.replaceAll(mapKeyDotReplacement, "\\."); @@ -969,13 +959,13 @@ protected String potentiallyUnescapeMapKey(String source) { * Adds custom type information to the given {@link Document} if necessary. That is if the value is not the same as * the one given. This is usually the case if you store a subtype of the actual declared type of the property. * - * @param type + * @param type can be {@literal null}. * @param value must not be {@literal null}. * @param bson must not be {@literal null}. */ protected void addCustomTypeKeyIfNecessary(@Nullable TypeInformation type, Object value, Bson bson) { - Class reference = type != null ? type.getActualType().getType() : Object.class; + Class reference = type != null ? type.getRequiredActualType().getType() : Object.class; Class valueType = ClassUtils.getUserClass(value.getClass()); boolean notTheSameClass = !valueType.equals(reference); @@ -987,15 +977,15 @@ protected void addCustomTypeKeyIfNecessary(@Nullable TypeInformation type, Ob /** * Writes the given simple value to the given {@link Document}. Will store enum names for enum values. * - * @param value + * @param value can be {@literal null}. * @param bson must not be {@literal null}. * @param key must not be {@literal null}. */ - private void writeSimpleInternal(Object value, Bson bson, String key) { - addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class)); + private void writeSimpleInternal(@Nullable Object value, Bson bson, String key) { + BsonUtils.addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class)); } - private void writeSimpleInternal(Object value, Bson bson, MongoPersistentProperty property) { + private void writeSimpleInternal(@Nullable Object value, Bson bson, MongoPersistentProperty property) { DocumentAccessor accessor = new DocumentAccessor(bson); accessor.put(property, getPotentiallyConvertedSimpleWrite(value, property.hasExplicitWriteTarget() ? property.getFieldType() : Object.class)); @@ -1004,9 +994,6 @@ private void writeSimpleInternal(Object value, Bson bson, MongoPersistentPropert /** * Checks whether we have a custom conversion registered for the given value into an arbitrary simple Mongo type. * Returns the converted value if so. If not, we perform special enum handling or simply return the value as is. - * - * @param value - * @return */ @Nullable private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nullable Class typeHint) { @@ -1018,14 +1005,14 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nulla if (typeHint != null && Object.class != typeHint) { if (conversionService.canConvert(value.getClass(), typeHint)) { - value = conversionService.convert(value, typeHint); + value = doConvert(value, typeHint); } } Optional> customTarget = conversions.getCustomWriteTarget(value.getClass()); if (customTarget.isPresent()) { - return conversionService.convert(value, customTarget.get()); + return doConvert(value, customTarget.get()); } if (ObjectUtils.isArray(value)) { @@ -1033,7 +1020,7 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nulla if (value instanceof byte[]) { return value; } - return asCollection(value); + return BsonUtils.asCollection(value); } return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum) value).name() : value; @@ -1041,32 +1028,37 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nulla /** * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies - * {@link Enum} handling or returns the value as is. + * {@link Enum} handling or returns the value as is. Can be overridden by subclasses. * - * @param value - * @param target must not be {@literal null}. - * @return + * @since 3.2 + */ + protected Object getPotentiallyConvertedSimpleRead(Object value, TypeInformation target) { + return getPotentiallyConvertedSimpleRead(value, target.getType()); + } + + /** + * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies + * {@link Enum} handling or returns the value as is. */ - @Nullable @SuppressWarnings({ "rawtypes", "unchecked" }) - private Object getPotentiallyConvertedSimpleRead(@Nullable Object value, @Nullable Class target) { + private Object getPotentiallyConvertedSimpleRead(Object value, @Nullable Class target) { - if (value == null || target == null || ClassUtils.isAssignableValue(target, value)) { + if (target == null || ClassUtils.isAssignableValue(target, value)) { return value; } if (conversions.hasCustomReadTarget(value.getClass(), target)) { - return conversionService.convert(value, target); + return doConvert(value, target); } if (Enum.class.isAssignableFrom(target)) { return Enum.valueOf((Class) target, value.toString()); } - return conversionService.convert(value, target); + return doConvert(value, target); } - protected DBRef createDBRef(Object target, MongoPersistentProperty property) { + protected DBRef createDBRef(Object target, @Nullable MongoPersistentProperty property) { Assert.notNull(target, "Target object must not be null!"); @@ -1102,24 +1094,26 @@ protected DBRef createDBRef(Object target, MongoPersistentProperty property) { } @Nullable - private Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator, - ObjectPath path) { - return new MongoDbPropertyValueProvider(bson, evaluator, path).getPropertyValue(prop); + private Object getValueInternal(ConversionContext context, MongoPersistentProperty prop, Bson bson, + SpELExpressionEvaluator evaluator) { + return new MongoDbPropertyValueProvider(context, bson, evaluator).getPropertyValue(prop); } /** - * Reads the given {@link BasicDBList} into a collection of the given {@link TypeInformation}. + * Reads the given {@link Collection} into a collection of the given {@link TypeInformation}. Can be overridden by + * subclasses. * - * @param targetType must not be {@literal null}. - * @param source must not be {@literal null}. - * @param path must not be {@literal null}. + * @param context must not be {@literal null} + * @param source must not be {@literal null} + * @param targetType the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @since 3.2 * @return the converted {@link Collection} or array, will never be {@literal null}. */ @SuppressWarnings("unchecked") - private Object readCollectionOrArray(TypeInformation targetType, Collection source, ObjectPath path) { + protected Object readCollectionOrArray(ConversionContext context, Collection source, + TypeInformation targetType) { Assert.notNull(targetType, "Target type must not be null!"); - Assert.notNull(path, "Object path must not be null!"); Class collectionType = targetType.isSubTypeOf(Collection.class) // ? targetType.getType() // @@ -1140,33 +1134,12 @@ private Object readCollectionOrArray(TypeInformation targetType, Collection objects = bulkReadAndConvertDBRefs((List) source, componentType, path, rawComponentType); + List objects = bulkReadAndConvertDBRefs(context, (List) source, componentType); return getPotentiallyConvertedSimpleRead(objects, targetType.getType()); } for (Object element : source) { - - if (element instanceof DBRef) { - items.add(DBRef.class.equals(rawComponentType) ? element - : readAndConvertDBRef((DBRef) element, componentType, path, rawComponentType)); - } else if (element instanceof Document) { - items.add(read(componentType, (Document) element, path)); - } else if (element instanceof BasicDBObject) { - items.add(read(componentType, (BasicDBObject) element, path)); - } else { - - if (!Object.class.equals(rawComponentType) && element instanceof Collection) { - if (!rawComponentType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawComponentType)) { - throw new MappingException( - String.format(INCOMPATIBLE_TYPES, element, element.getClass(), rawComponentType, path)); - } - } - if (element instanceof List) { - items.add(readCollectionOrArray(componentType, (Collection) element, path)); - } else { - items.add(getPotentiallyConvertedSimpleRead(element, rawComponentType)); - } - } + items.add(context.convert(element, componentType)); } return getPotentiallyConvertedSimpleRead(items, targetType.getType()); @@ -1179,26 +1152,42 @@ private Object readCollectionOrArray(TypeInformation targetType, Collection readMap(TypeInformation type, Bson bson, ObjectPath path) { + return readMap(getConversionContext(path), bson, type); + } + + /** + * Reads the given {@link Document} into a {@link Map}. will recursively resolve nested {@link Map}s as well. Can be + * overridden by subclasses. + * + * @param context must not be {@literal null} + * @param bson must not be {@literal null} + * @param targetType the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @return the converted {@link Map}, will never be {@literal null}. + * @since 3.2 + */ + protected Map readMap(ConversionContext context, Bson bson, TypeInformation targetType) { Assert.notNull(bson, "Document must not be null!"); - Assert.notNull(path, "Object path must not be null!"); + Assert.notNull(targetType, "TypeInformation must not be null!"); - Class mapType = typeMapper.readType(bson, type).getType(); + Class mapType = typeMapper.readType(bson, targetType).getType(); - TypeInformation keyType = type.getComponentType(); - TypeInformation valueType = type.getMapValueType(); + TypeInformation keyType = targetType.getComponentType(); + TypeInformation valueType = targetType.getMapValueType() == null ? ClassTypeInformation.OBJECT + : targetType.getRequiredMapValueType(); - Class rawKeyType = keyType != null ? keyType.getType() : null; - Class rawValueType = valueType != null ? valueType.getType() : null; + Class rawKeyType = keyType != null ? keyType.getType() : Object.class; + Class rawValueType = valueType.getType(); - Map sourceMap = asMap(bson); + Map sourceMap = BsonUtils.asMap(bson); Map map = CollectionFactory.createMap(mapType, rawKeyType, sourceMap.keySet().size()); if (!DBRef.class.equals(rawValueType) && isCollectionOfDbRefWhereBulkFetchIsPossible(sourceMap.values())) { - bulkReadAndConvertDBRefMapIntoTarget(valueType, rawValueType, sourceMap, map); + bulkReadAndConvertDBRefMapIntoTarget(context, valueType, sourceMap, map); return map; } @@ -1210,92 +1199,17 @@ protected Map readMap(TypeInformation type, Bson bson, Object Object key = potentiallyUnescapeMapKey(entry.getKey()); - if (rawKeyType != null && !rawKeyType.isAssignableFrom(key.getClass())) { - key = conversionService.convert(key, rawKeyType); + if (!rawKeyType.isAssignableFrom(key.getClass())) { + key = doConvert(key, rawKeyType); } Object value = entry.getValue(); - TypeInformation defaultedValueType = valueType != null ? valueType : ClassTypeInformation.OBJECT; - - if (value instanceof Document) { - map.put(key, read(defaultedValueType, (Document) value, path)); - } else if (value instanceof BasicDBObject) { - map.put(key, read(defaultedValueType, (BasicDBObject) value, path)); - } else if (value instanceof DBRef) { - map.put(key, DBRef.class.equals(rawValueType) ? value - : readAndConvertDBRef((DBRef) value, defaultedValueType, ObjectPath.ROOT, rawValueType)); - } else if (value instanceof List) { - map.put(key, readCollectionOrArray(valueType != null ? valueType : ClassTypeInformation.LIST, - (List) value, path)); - } else { - map.put(key, getPotentiallyConvertedSimpleRead(value, rawValueType)); - } + map.put(key, context.convert(value, valueType)); } return map; } - @SuppressWarnings("unchecked") - private static Map asMap(Bson bson) { - - if (bson instanceof Document) { - return (Document) bson; - } - - if (bson instanceof DBObject) { - return ((DBObject) bson).toMap(); - } - - throw new IllegalArgumentException( - String.format("Cannot read %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); - } - - private static void addToMap(Bson bson, String key, @Nullable Object value) { - - if (bson instanceof Document) { - ((Document) bson).put(key, value); - return; - } - if (bson instanceof DBObject) { - ((DBObject) bson).put(key, value); - return; - } - throw new IllegalArgumentException(String.format( - "Cannot add key/value pair to %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); - } - - private static void addAllToMap(Bson bson, Map value) { - - if (bson instanceof Document) { - ((Document) bson).putAll(value); - return; - } - - if (bson instanceof DBObject) { - ((DBObject) bson).putAll(value); - return; - } - - throw new IllegalArgumentException( - String.format("Cannot add all to %s. Given Bson must be a Document or DBObject.", bson.getClass())); - } - - private static void removeFromMap(Bson bson, String key) { - - if (bson instanceof Document) { - ((Document) bson).remove(key); - return; - } - - if (bson instanceof DBObject) { - ((DBObject) bson).removeField(key); - return; - } - - throw new IllegalArgumentException( - String.format("Cannot remove from %s. Given Bson must be a Document or DBObject.", bson.getClass())); - } - /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.convert.MongoWriter#convertToMongoType(java.lang.Object, org.springframework.data.util.TypeInformation) @@ -1303,7 +1217,7 @@ private static void removeFromMap(Bson bson, String key) { @Nullable @SuppressWarnings("unchecked") @Override - public Object convertToMongoType(@Nullable Object obj, TypeInformation typeInformation) { + public Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation typeInformation) { if (obj == null) { return null; @@ -1311,7 +1225,7 @@ public Object convertToMongoType(@Nullable Object obj, TypeInformation typeIn Optional> target = conversions.getCustomWriteTarget(obj.getClass()); if (target.isPresent()) { - return conversionService.convert(obj, target.get()); + return doConvert(obj, target.get()); } if (conversions.isSimpleType(obj.getClass())) { @@ -1386,7 +1300,6 @@ public Object convertToMongoType(@Nullable Object obj, TypeInformation typeIn return !obj.getClass().equals(typeInformation.getType()) ? newDocument : removeTypeInfo(newDocument, true); } - @Nullable @Override public Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity entity) { Document newDocument = new Document(); @@ -1394,7 +1307,8 @@ public Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity ent return newDocument; } - public List maybeConvertList(Iterable source, TypeInformation typeInformation) { + // TODO: hide + public List maybeConvertList(Iterable source, @Nullable TypeInformation typeInformation) { List newDbl = new ArrayList<>(); @@ -1458,203 +1372,52 @@ private Object removeTypeInfo(Object object, boolean recursively) { return document; } - /** - * {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field - * of the configured source {@link Document}. - * - * @author Oliver Gierke - * @author Mark Paluch - * @author Christoph Strobl - */ - class MongoDbPropertyValueProvider implements PropertyValueProvider { - - final DocumentAccessor accessor; - final SpELExpressionEvaluator evaluator; - final ObjectPath path; - - /** - * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and - * {@link ObjectPath}. - * - * @param source must not be {@literal null}. - * @param evaluator must not be {@literal null}. - * @param path must not be {@literal null}. - */ - MongoDbPropertyValueProvider(Bson source, SpELExpressionEvaluator evaluator, ObjectPath path) { - this(new DocumentAccessor(source), evaluator, path); - } - - /** - * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and - * {@link ObjectPath}. - * - * @param accessor must not be {@literal null}. - * @param evaluator must not be {@literal null}. - * @param path must not be {@literal null}. - */ - MongoDbPropertyValueProvider(DocumentAccessor accessor, SpELExpressionEvaluator evaluator, ObjectPath path) { - - Assert.notNull(accessor, "DocumentAccessor must no be null!"); - Assert.notNull(evaluator, "SpELExpressionEvaluator must not be null!"); - Assert.notNull(path, "ObjectPath must not be null!"); - - this.accessor = accessor; - this.evaluator = evaluator; - this.path = path; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) - */ - @Nullable - public T getPropertyValue(MongoPersistentProperty property) { - - String expression = property.getSpelExpression(); - Object value = expression != null ? evaluator.evaluate(expression) : accessor.get(property); - - if (value == null) { - return null; - } - - return readValue(value, property.getTypeInformation(), path); - } - } - - /** - * {@link PropertyValueProvider} that is aware of {@link MongoPersistentProperty#isAssociation()} and that delegates - * resolution to {@link DbRefResolver}. - * - * @author Mark Paluch - * @author Christoph Strobl - * @since 2.1 - */ - class AssociationAwareMongoDbPropertyValueProvider extends MongoDbPropertyValueProvider { - - /** - * Creates a new {@link AssociationAwareMongoDbPropertyValueProvider} for the given source, - * {@link SpELExpressionEvaluator} and {@link ObjectPath}. - * - * @param source must not be {@literal null}. - * @param evaluator must not be {@literal null}. - * @param path must not be {@literal null}. - */ - AssociationAwareMongoDbPropertyValueProvider(DocumentAccessor source, SpELExpressionEvaluator evaluator, - ObjectPath path) { - super(source, evaluator, path); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) - */ - @Nullable - @SuppressWarnings("unchecked") - public T getPropertyValue(MongoPersistentProperty property) { - - if (property.isDbReference() && property.getDBRef().lazy()) { - - Object rawRefValue = accessor.get(property); - if (rawRefValue == null) { - return null; - } - - DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), path, evaluator, - MappingMongoConverter.this::getValueInternal); - - DBRef dbref = rawRefValue instanceof DBRef ? (DBRef) rawRefValue : null; - return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler); - } - - return super.getPropertyValue(property); - } - } - - /** - * Extension of {@link SpELExpressionParameterValueProvider} to recursively trigger value conversion on the raw - * resolved SpEL value. - * - * @author Oliver Gierke - */ - private class ConverterAwareSpELExpressionParameterValueProvider - extends SpELExpressionParameterValueProvider { - - private final ObjectPath path; - - /** - * Creates a new {@link ConverterAwareSpELExpressionParameterValueProvider}. - * - * @param evaluator must not be {@literal null}. - * @param conversionService must not be {@literal null}. - * @param delegate must not be {@literal null}. - */ - public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluator evaluator, - ConversionService conversionService, ParameterValueProvider delegate, - ObjectPath path) { - - super(evaluator, conversionService, delegate); - this.path = path; - } + @Nullable + @SuppressWarnings("unchecked") + T readValue(ConversionContext context, @Nullable Object value, TypeInformation type) { - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.SpELExpressionParameterValueProvider#potentiallyConvertSpelValue(java.lang.Object, org.springframework.data.mapping.PreferredConstructor.Parameter) - */ - @Override - protected T potentiallyConvertSpelValue(Object object, Parameter parameter) { - return readValue(object, parameter.getType(), path); + if (value == null) { + return null; } - } - @Nullable - @SuppressWarnings("unchecked") - T readValue(Object value, TypeInformation type, ObjectPath path) { + Assert.notNull(type, "TypeInformation must not be null"); Class rawType = type.getType(); if (conversions.hasCustomReadTarget(value.getClass(), rawType)) { - return (T) conversionService.convert(value, rawType); + return (T) doConvert(value, rawType); } else if (value instanceof DBRef) { - return potentiallyReadOrResolveDbRef((DBRef) value, type, path, rawType); - } else if (value instanceof List) { - return (T) readCollectionOrArray(type, (List) value, path); - } else if (value instanceof Document) { - return (T) read(type, (Document) value, path); - } else if (value instanceof DBObject) { - return (T) read(type, (BasicDBObject) value, path); - } else { - return (T) getPotentiallyConvertedSimpleRead(value, rawType); + return (T) readDBRef(context, (DBRef) value, type); } + + return (T) context.convert(value, type); } @Nullable - @SuppressWarnings("unchecked") - private T potentiallyReadOrResolveDbRef(@Nullable DBRef dbref, TypeInformation type, ObjectPath path, - Class rawType) { + private Object readDBRef(ConversionContext context, @Nullable DBRef dbref, TypeInformation type) { - if (rawType.equals(DBRef.class)) { - return (T) dbref; + if (type.getType().equals(DBRef.class)) { + return dbref; } - T object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName(), (Class) rawType); - return object != null ? object : readAndConvertDBRef(dbref, type, path, rawType); - } + ObjectPath path = context.getPath(); - @Nullable - private T readAndConvertDBRef(@Nullable DBRef dbref, TypeInformation type, ObjectPath path, - @Nullable Class rawType) { + Object object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName(), type.getType()); + if (object != null) { + return object; + } - List result = bulkReadAndConvertDBRefs(Collections.singletonList(dbref), type, path, rawType); + List result = bulkReadAndConvertDBRefs(context, Collections.singletonList(dbref), type); return CollectionUtils.isEmpty(result) ? null : result.iterator().next(); } @SuppressWarnings({ "unchecked", "rawtypes" }) - private void bulkReadAndConvertDBRefMapIntoTarget(TypeInformation valueType, Class rawValueType, + private void bulkReadAndConvertDBRefMapIntoTarget(ConversionContext context, TypeInformation valueType, Map sourceMap, Map targetMap) { LinkedHashMap referenceMap = new LinkedHashMap<>(sourceMap); - List convertedObjects = bulkReadAndConvertDBRefs((List) new ArrayList(referenceMap.values()), - valueType, ObjectPath.ROOT, rawValueType); + List convertedObjects = bulkReadAndConvertDBRefs(context.withPath(ObjectPath.ROOT), + (List) new ArrayList(referenceMap.values()), valueType); int index = 0; for (String key : referenceMap.keySet()) { @@ -1664,8 +1427,7 @@ private void bulkReadAndConvertDBRefMapIntoTarget(TypeInformation valueType, } @SuppressWarnings("unchecked") - private List bulkReadAndConvertDBRefs(List dbrefs, TypeInformation type, ObjectPath path, - @Nullable Class rawType) { + private List bulkReadAndConvertDBRefs(ConversionContext context, List dbrefs, TypeInformation type) { if (CollectionUtils.isEmpty(dbrefs)) { return Collections.emptyList(); @@ -1684,8 +1446,8 @@ private List bulkReadAndConvertDBRefs(List dbrefs, TypeInformation if (document != null) { maybeEmitEvent( - new AfterLoadEvent<>(document, (Class) (rawType != null ? rawType : Object.class), collectionName)); - target = (T) read(type, document, path); + new AfterLoadEvent<>(document, (Class) type.getType(), collectionName)); + target = (T) readDocument(context, document, type); } if (target != null) { @@ -1772,6 +1534,11 @@ public MappingMongoConverter with(MongoDatabaseFactory dbFactory) { return target; } + @SuppressWarnings("ConstantConditions") + private T doConvert(Object value, Class target) { + return conversionService.convert(value, target); + } + /** * Returns whether the given {@link Iterable} contains {@link DBRef} instances all pointing to the same collection. * @@ -1800,6 +1567,160 @@ private static boolean isCollectionOfDbRefWhereBulkFetchIsPossible(Iterable s return true; } + /** + * {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field + * of the configured source {@link Document}. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + */ + static class MongoDbPropertyValueProvider implements PropertyValueProvider { + + final ConversionContext context; + final DocumentAccessor accessor; + final SpELExpressionEvaluator evaluator; + + /** + * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and + * {@link ObjectPath}. + * + * @param context must not be {@literal null}. + * @param source must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + MongoDbPropertyValueProvider(ConversionContext context, Bson source, SpELExpressionEvaluator evaluator) { + this(context, new DocumentAccessor(source), evaluator); + } + + /** + * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and + * {@link ObjectPath}. + * + * @param context must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + MongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor accessor, + SpELExpressionEvaluator evaluator) { + + Assert.notNull(context, "ConversionContext must no be null!"); + Assert.notNull(accessor, "DocumentAccessor must no be null!"); + Assert.notNull(evaluator, "SpELExpressionEvaluator must not be null!"); + + this.context = context; + this.accessor = accessor; + this.evaluator = evaluator; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) + */ + @Nullable + @SuppressWarnings("unchecked") + public T getPropertyValue(MongoPersistentProperty property) { + + String expression = property.getSpelExpression(); + Object value = expression != null ? evaluator.evaluate(expression) : accessor.get(property); + + if (value == null) { + return null; + } + + return (T) context.convert(value, property.getTypeInformation()); + } + } + + /** + * {@link PropertyValueProvider} that is aware of {@link MongoPersistentProperty#isAssociation()} and that delegates + * resolution to {@link DbRefResolver}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + class AssociationAwareMongoDbPropertyValueProvider extends MongoDbPropertyValueProvider { + + /** + * Creates a new {@link AssociationAwareMongoDbPropertyValueProvider} for the given source, + * {@link SpELExpressionEvaluator} and {@link ObjectPath}. + * + * @param source must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + AssociationAwareMongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor source, + SpELExpressionEvaluator evaluator) { + super(context, source, evaluator); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) + */ + @Nullable + @SuppressWarnings("unchecked") + public T getPropertyValue(MongoPersistentProperty property) { + + if (property.isDbReference() && property.getDBRef().lazy()) { + + Object rawRefValue = accessor.get(property); + if (rawRefValue == null) { + return null; + } + + DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), context.getPath(), + evaluator, (prop, bson, evaluator, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, + evaluator)); + + DBRef dbref = rawRefValue instanceof DBRef ? (DBRef) rawRefValue : null; + return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler); + } + + return super.getPropertyValue(property); + } + } + + /** + * Extension of {@link SpELExpressionParameterValueProvider} to recursively trigger value conversion on the raw + * resolved SpEL value. + * + * @author Oliver Gierke + */ + private static class ConverterAwareSpELExpressionParameterValueProvider + extends SpELExpressionParameterValueProvider { + + private final ConversionContext context; + + /** + * Creates a new {@link ConverterAwareSpELExpressionParameterValueProvider}. + * + * @param context must not be {@literal null}. + * @param evaluator must not be {@literal null}. + * @param conversionService must not be {@literal null}. + * @param delegate must not be {@literal null}. + */ + public ConverterAwareSpELExpressionParameterValueProvider(ConversionContext context, + SpELExpressionEvaluator evaluator, ConversionService conversionService, + ParameterValueProvider delegate) { + + super(evaluator, conversionService, delegate); + + Assert.notNull(context, "ConversionContext must no be null!"); + + this.context = context; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mapping.model.SpELExpressionParameterValueProvider#potentiallyConvertSpelValue(java.lang.Object, org.springframework.data.mapping.PreferredConstructor.Parameter) + */ + @Override + protected T potentiallyConvertSpelValue(Object object, Parameter parameter) { + return context.convert(object, parameter.getType()); + } + } + /** * Marker class used to indicate we have a non root document object here that might be used within an update - so we * need to preserve type hints for potential nested elements but need to remove it on top level. @@ -1821,15 +1742,21 @@ public T getParameterValue(Parameter parameter) } } - private static class TypeInformationWrapper implements TypeInformation { + /** + * {@link TypeInformation} considering {@link MongoPersistentProperty#getFieldType()} as type source. + * + * @param + */ + private static class FieldTypeInformation implements TypeInformation { - private MongoPersistentProperty persistentProperty; - private TypeInformation delegate; + private final MongoPersistentProperty persistentProperty; + private final TypeInformation delegate; - public TypeInformationWrapper(MongoPersistentProperty property) { + @SuppressWarnings("unchecked") + public FieldTypeInformation(MongoPersistentProperty property) { this.persistentProperty = property; - this.delegate = property.getTypeInformation(); + this.delegate = (TypeInformation) property.getTypeInformation(); } @Override @@ -1863,7 +1790,7 @@ public org.springframework.data.util.TypeInformation getMapValueType() { } @Override - public Class getType() { + public Class getType() { return delegate.getType(); } @@ -1903,8 +1830,125 @@ public List> getTypeArguments() } @Override - public org.springframework.data.util.TypeInformation specialize(ClassTypeInformation type) { + public org.springframework.data.util.TypeInformation specialize(ClassTypeInformation type) { return delegate.specialize(type); } } + + /** + * Conversion context holding references to simple {@link ValueConverter} and {@link ContainerValueConverter}. + * Entrypoint for recursive conversion of {@link Document} and other types. + * + * @since 3.2 + */ + protected static class ConversionContext { + + private final ObjectPath path; + private final ContainerValueConverter documentConverter; + private final ContainerValueConverter> collectionConverter; + private final ContainerValueConverter mapConverter; + private final ContainerValueConverter dbRefConverter; + private final ValueConverter elementConverter; + + ConversionContext(ObjectPath path, ContainerValueConverter documentConverter, + ContainerValueConverter> collectionConverter, ContainerValueConverter mapConverter, + ContainerValueConverter dbRefConverter, ValueConverter elementConverter) { + + this.path = path; + this.documentConverter = documentConverter; + this.collectionConverter = collectionConverter; + this.mapConverter = mapConverter; + this.dbRefConverter = dbRefConverter; + this.elementConverter = elementConverter; + } + + /** + * Converts a source object into {@link TypeInformation target}. + * + * @param source must not be {@literal null}. + * @param typeHint must not be {@literal null}. + * @return the converted object. + */ + @SuppressWarnings("unchecked") + public S convert(Object source, TypeInformation typeHint) { + + Assert.notNull(typeHint, "TypeInformation must not be null"); + + if (source instanceof Collection) { + + Class rawType = typeHint.getType(); + if (!Object.class.equals(rawType)) { + if (!rawType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawType)) { + throw new MappingException( + String.format(INCOMPATIBLE_TYPES, source, source.getClass(), rawType, getPath())); + } + } + + if (typeHint.isCollectionLike() || typeHint.getType().isAssignableFrom(Collection.class)) { + return (S) collectionConverter.convert(this, (Collection) source, typeHint); + } + } + + if (typeHint.isMap()) { + return (S) mapConverter.convert(this, (Bson) source, typeHint); + } + + if (source instanceof DBRef) { + return (S) dbRefConverter.convert(this, (DBRef) source, typeHint); + } + + if (source instanceof Collection) { + throw new MappingException( + String.format(INCOMPATIBLE_TYPES, source, BasicDBList.class, typeHint.getType(), getPath())); + } + + if (source instanceof Bson) { + return (S) documentConverter.convert(this, (Bson) source, typeHint); + } + + return (S) elementConverter.convert(source, typeHint); + } + + /** + * Create a new {@link ConversionContext} with {@link ObjectPath currentPath} applied. + * + * @param currentPath must not be {@literal null}. + * @return a new {@link ConversionContext} with {@link ObjectPath currentPath} applied. + */ + public ConversionContext withPath(ObjectPath currentPath) { + + Assert.notNull(currentPath, "ObjectPath must not be null"); + + return new ConversionContext(currentPath, documentConverter, collectionConverter, mapConverter, dbRefConverter, + elementConverter); + } + + public ObjectPath getPath() { + return path; + } + + /** + * Converts a simple {@code source} value into {@link TypeInformation the target type}. + * + * @param + */ + interface ValueConverter { + + Object convert(T source, TypeInformation typeHint); + + } + + /** + * Converts a container {@code source} value into {@link TypeInformation the target type}. Containers may + * recursively apply conversions for entities, collections, maps, etc. + * + * @param + */ + interface ContainerValueConverter { + + Object convert(ConversionContext context, T source, TypeInformation typeHint); + + } + + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java index 5b036072bf..d3255437dc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java @@ -17,12 +17,14 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Date; import java.util.Map; import java.util.StringJoiner; import java.util.function.Function; import java.util.stream.StreamSupport; +import org.bson.BSONObject; import org.bson.BsonBinary; import org.bson.BsonBoolean; import org.bson.BsonDouble; @@ -36,11 +38,11 @@ import org.bson.conversions.Bson; import org.bson.json.JsonParseException; import org.bson.types.ObjectId; - import org.springframework.core.convert.converter.Converter; import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; @@ -50,6 +52,8 @@ import com.mongodb.MongoClientSettings; /** + * Internal API for operations on {@link Bson} elements that can be either {@link Document} or {@link DBObject}. + * * @author Christoph Strobl * @author Mark Paluch * @since 2.0 @@ -62,6 +66,47 @@ public static T get(Bson bson, String key) { return (T) asMap(bson).get(key); } + /** + * Remove {@code _id : null} from the given {@link Bson} if present. + * + * @param bson must not be {@literal null}. + * @since 2.5 + */ + public static void removeNullId(Bson bson) { + + if (!contains(bson, "_id", null)) { + return; + } + + removeFrom(bson, "_id"); + } + + /** + * Check if a given entry (key/value pair) is present in the given {@link Bson}. + * + * @param bson must not be {@literal null}. + * @param key must not be {@literal null}. + * @param value can be {@literal null}. + * @return {@literal true} if (key/value pair) is present. + * @since 2.5 + */ + public static boolean contains(Bson bson, String key, @Nullable Object value) { + + if (bson instanceof Document) { + + Document doc = (Document) bson; + return doc.containsKey(key) && ObjectUtils.nullSafeEquals(doc.get(key), value); + } + if (bson instanceof BSONObject) { + + BSONObject bsonObject = (BSONObject) bson; + return bsonObject.containsField(key) && ObjectUtils.nullSafeEquals(bsonObject.get(key), value); + } + + Map map = asMap(bson); + return map.containsKey(key) && ObjectUtils.nullSafeEquals(map.get(key), value); + } + public static Map asMap(Bson bson) { if (bson instanceof Document) { @@ -70,6 +115,9 @@ public static Map asMap(Bson bson) { if (bson instanceof BasicDBObject) { return ((BasicDBObject) bson); } + if (bson instanceof DBObject) { + return ((DBObject) bson).toMap(); + } return (Map) bson.toBsonDocument(Document.class, MongoClientSettings.getDefaultCodecRegistry()); } @@ -77,14 +125,68 @@ public static Map asMap(Bson bson) { public static void addToMap(Bson bson, String key, @Nullable Object value) { if (bson instanceof Document) { + ((Document) bson).put(key, value); return; } - if (bson instanceof DBObject) { - ((DBObject) bson).put(key, value); + if (bson instanceof BSONObject) { + + ((BSONObject) bson).put(key, value); return; } - throw new IllegalArgumentException("o_O what's that? Cannot add value to " + bson.getClass()); + + throw new IllegalArgumentException(String.format( + "Cannot add key/value pair to %s. as map. Given Bson must be a Document or BSONObject!", bson.getClass())); + } + + /** + * Add all entries from the given {@literal source} {@link Map} to the {@literal target}. + * + * @param target must not be {@literal null}. + * @param source must not be {@literal null}. + * @since 2.5 + */ + public static void addAllToMap(Bson target, Map source) { + + if (target instanceof Document) { + + ((Document) target).putAll(source); + return; + } + + if (target instanceof BSONObject) { + + ((BSONObject) target).putAll(source); + return; + } + + throw new IllegalArgumentException( + String.format("Cannot add all to %s. Given Bson must be a Document or BSONObject.", target.getClass())); + } + + /** + * Remove the given {@literal key} from the {@link Bson} value. + * + * @param bson must not be {@literal null}. + * @param key must not be {@literal null}. + * @since 2.5 + */ + static void removeFrom(Bson bson, String key) { + + if (bson instanceof Document) { + + ((Document) bson).remove(key); + return; + } + + if (bson instanceof BSONObject) { + + ((BSONObject) bson).removeField(key); + return; + } + + throw new IllegalArgumentException( + String.format("Cannot remove from %s. Given Bson must be a Document or BSONObject.", bson.getClass())); } /** @@ -282,6 +384,23 @@ public static Document parse(String json, @Nullable CodecRegistryProvider codecR .orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry()))); } + /** + * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a + * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element + * collection for everything else. + * + * @param source + * @return + */ + public static Collection asCollection(Object source) { + + if (source instanceof Collection) { + return (Collection) source; + } + + return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); + } + @Nullable private static String toJson(@Nullable Object value) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index a4c1ab788d..2c0f8649e2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -62,7 +62,7 @@ import com.mongodb.client.MongoDatabase; /** - * Unit tests for {@link DbRefMappingMongoConverter}. + * Unit tests for {@link MappingMongoConverter}. * * @author Oliver Gierke * @author Thomas Darimont diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index abdd84a4a1..ffad28b231 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -2177,9 +2177,10 @@ public void readAndConvertDBRefNestedByMapCorrectly() { MappingMongoConverter spyConverter = spy(converter); Mockito.doReturn(cluster).when(spyConverter).readRef(dbRef); - Map result = spyConverter.readMap(ClassTypeInformation.MAP, data, ObjectPath.ROOT); + Map result = spyConverter.readMap(spyConverter.getConversionContext(ObjectPath.ROOT), data, + ClassTypeInformation.MAP); - assertThat(((LinkedHashMap) result.get("cluster")).get("_id")).isEqualTo(100L); + assertThat(((Map) result.get("cluster")).get("_id")).isEqualTo(100L); } @Test // GH-3546