diff --git a/pom.xml b/pom.xml index d00702b731..8eb8e02b68 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-1849-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index bb7d9f03cc..e477b31bfd 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-1849-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml index d3b7593d37..442f076df6 100644 --- a/spring-data-mongodb-cross-store/pom.xml +++ b/spring-data-mongodb-cross-store/pom.xml @@ -6,7 +6,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-1849-SNAPSHOT ../pom.xml @@ -50,7 +50,7 @@ org.springframework.data spring-data-mongodb - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-1849-SNAPSHOT diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index b32dcba387..3ae97a7f89 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-1849-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index b611cf01a8..44baa80cff 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-1849-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java new file mode 100644 index 0000000000..815a68994d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java @@ -0,0 +1,210 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; + +/** + * {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain + * domain type meta information which considers {@link org.springframework.data.mongodb.core.mapping.Field field names} + * and {@link org.springframework.data.mongodb.core.convert.MongoCustomConversions custom conversions}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { + + private final MongoConverter converter; + private final MappingContext, MongoPersistentProperty> mappingContext; + + /** + * Create a new instance of {@link MappingMongoJsonSchemaCreator}. + * + * @param converter must not be {@literal null}. + */ + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter) { + + Assert.notNull(converter, "Converter must not be null!"); + this.converter = converter; + this.mappingContext = (MappingContext, MongoPersistentProperty>) converter + .getMappingContext(); + } + + /* + * (non-Javadoc) + * org.springframework.data.mongodb.core.MongoJsonSchemaCreator#createSchemaFor(java.lang.Class) + */ + @Override + public MongoJsonSchema createSchemaFor(Class type) { + + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(type); + MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder(); + + List schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity); + schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0])); + + return schemaBuilder.build(); + + } + + private List computePropertiesForEntity(List path, + MongoPersistentEntity entity) { + + List schemaProperties = new ArrayList<>(); + + for (MongoPersistentProperty nested : entity) { + + List currentPath = new ArrayList<>(path); + + if (path.contains(nested)) { // cycle guard + schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)), + Object.class, false)); + break; + } + + currentPath.add(nested); + schemaProperties.add(computeSchemaForProperty(currentPath)); + } + + return schemaProperties; + } + + private JsonSchemaProperty computeSchemaForProperty(List path) { + + MongoPersistentProperty property = CollectionUtils.lastElement(path); + + boolean required = isRequiredProperty(property); + Class rawTargetType = computeTargetType(property); // target type before conversion + Class targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type + + if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) { + return createObjectSchemaPropertyForEntity(path, property, required); + } + + String fieldName = computePropertyFieldName(property); + + if (property.isCollectionLike()) { + return createSchemaProperty(fieldName, targetType, required); + } else if (property.isMap()) { + return createSchemaProperty(fieldName, Type.objectType(), required); + } else if (ClassUtils.isAssignable(Enum.class, targetType)) { + return createEnumSchemaProperty(fieldName, targetType, required); + } + + return createSchemaProperty(fieldName, targetType, required); + } + + private JsonSchemaProperty createObjectSchemaPropertyForEntity(List path, + MongoPersistentProperty property, boolean required) { + + ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName()); + List nestedProperties = computePropertiesForEntity(path, + mappingContext.getRequiredPersistentEntity(property)); + + return createPotentiallyRequiredSchemaProperty( + target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required); + } + + private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class targetType, boolean required) { + + List possibleValues = new ArrayList<>(); + + for (Object enumValue : EnumSet.allOf((Class) targetType)) { + possibleValues.add(converter.convertToMongoType(enumValue)); + } + + targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass(); + return createSchemaProperty(fieldName, targetType, required, possibleValues); + } + + JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required) { + return createSchemaProperty(fieldName, type, required, Collections.emptyList()); + } + + JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required, + Collection possibleValues) { + + TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type)) + : JsonSchemaObject.of(Class.class.cast(type)); + + if (!CollectionUtils.isEmpty(possibleValues)) { + schemaObject = schemaObject.possibleValues(possibleValues); + } + + return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required); + } + + private String computePropertyFieldName(PersistentProperty property) { + + return property instanceof MongoPersistentProperty ? ((MongoPersistentProperty) property).getFieldName() + : property.getName(); + } + + private boolean isRequiredProperty(PersistentProperty property) { + return property.getType().isPrimitive(); + } + + private Class computeTargetType(PersistentProperty property) { + + if (!(property instanceof MongoPersistentProperty)) { + return property.getType(); + } + + MongoPersistentProperty mongoProperty = (MongoPersistentProperty) property; + if (!mongoProperty.isIdProperty()) { + return mongoProperty.getFieldType(); + } + + if (mongoProperty.hasExplicitWriteTarget()) { + return mongoProperty.getRequiredAnnotation(Field.class).targetType().getJavaClass(); + } + + return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType(); + } + + static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) { + + if (!required) { + return property; + } + + return JsonSchemaProperty.required(property); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java new file mode 100644 index 0000000000..ea42d99415 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java @@ -0,0 +1,78 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.lang.NonNull; +import org.springframework.util.Assert; + +/** + * {@link MongoJsonSchemaCreator} extracts the {@link MongoJsonSchema} for a given {@link Class} by applying the + * following mapping rules. + *

+ * Required Properties + *

    + *
  • All constructor arguments annotated with {@link NonNull}
  • + *
  • {@link NonNull} properties
  • + *
  • Properties of primitive type
  • + *
+ * Ignored Properties + *
    + *
  • All properties annotated with {@link org.springframework.data.annotation.Transient}
  • + *
+ * Property Type Mapping + *
    + *
  • {@link java.lang.Object} -> {@code type : 'object'}
  • + *
  • {@link java.util.Arrays} -> {@code type : 'array'}
  • + *
  • {@link java.util.Collection} -> {@code type : 'array'}
  • + *
  • {@link java.util.Map} -> {@code type : 'object'}
  • + *
  • {@link java.lang.Enum} -> {@code type : 'string', enum : [the enum values]}
  • + *
  • Simple Types -> {@code type : 'the corresponding bson type' }
  • + *
  • Domain Types -> {@code type : 'object', properties : {the types properties} }
  • + *
+ *
+ * {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into + * {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more + * specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation. + *

+ * + * @author Christoph Strobl + * @since 2.2 + */ +public interface MongoJsonSchemaCreator { + + /** + * Create the {@link MongoJsonSchema} for the given {@link Class type}. + * + * @param type must not be {@literal null}. + * @return never {@literal null}. + */ + MongoJsonSchema createSchemaFor(Class type); + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given + * {@link MongoConverter}. + * + * @param mongoConverter must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + */ + static MongoJsonSchemaCreator create(MongoConverter mongoConverter) { + + Assert.notNull(mongoConverter, "MongoConverter must not be null!"); + return new MappingMongoJsonSchemaCreator(mongoConverter); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java index a5071ad33f..b166815615 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultMongoTypeMapper.java @@ -15,13 +15,16 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.UnaryOperator; import org.bson.Document; import org.bson.conversions.Bson; + +import org.springframework.data.convert.CustomConversions; import org.springframework.data.convert.DefaultTypeMapper; import org.springframework.data.convert.SimpleTypeInformationMapper; import org.springframework.data.convert.TypeAliasAccessor; @@ -58,21 +61,58 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper implements M private final TypeAliasAccessor accessor; private final @Nullable String typeKey; + private UnaryOperator> writeTarget = UnaryOperator.identity(); + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code _class}. + */ public DefaultMongoTypeMapper() { this(DEFAULT_TYPE_KEY); } + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + */ public DefaultMongoTypeMapper(@Nullable String typeKey) { - this(typeKey, Arrays.asList(new SimpleTypeInformationMapper())); + this(typeKey, Collections.singletonList(new SimpleTypeInformationMapper())); } + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + * @param mappingContext the mapping context. + */ public DefaultMongoTypeMapper(@Nullable String typeKey, MappingContext, ?> mappingContext) { this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext, - Arrays.asList(new SimpleTypeInformationMapper())); + Collections.singletonList(new SimpleTypeInformationMapper())); } + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses + * {@link UnaryOperator} to apply {@link CustomConversions}. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + * @param mappingContext the mapping context to look up types using type hints. + * @see MappingMongoConverter#getWriteTarget(Class) + */ + public DefaultMongoTypeMapper(@Nullable String typeKey, + MappingContext, ?> mappingContext, UnaryOperator> writeTarget) { + this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext, + Collections.singletonList(new SimpleTypeInformationMapper())); + this.writeTarget = writeTarget; + } + + /** + * Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses + * {@link TypeInformationMapper} to map type hints. + * + * @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints. + * @param mappers + */ public DefaultMongoTypeMapper(@Nullable String typeKey, List mappers) { this(typeKey, new DocumentTypeAliasAccessor(typeKey), null, mappers); } @@ -120,6 +160,15 @@ public void writeTypeRestrictions(Document result, @Nullable Set> restr accessor.writeTypeTo(result, new Document("$in", restrictedMappedTypes)); } + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.convert.MongoTypeMapper#getWriteTargetTypeFor(java.lang.Class) + */ + @Override + public Class getWriteTargetTypeFor(Class source) { + return writeTarget.apply(source); + } + /* * (non-Javadoc) * @see org.springframework.data.convert.DefaultTypeMapper#getFallbackTypeFor(java.lang.Object) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index d62baf31e9..ad821e69a3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -15,23 +15,33 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.*; +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; import org.bson.Document; import org.bson.conversions.Bson; import org.bson.types.ObjectId; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.core.CollectionFactory; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.support.DefaultConversionService; -import org.springframework.data.convert.CustomConversions; import org.springframework.data.convert.EntityInstantiator; -import org.springframework.data.convert.EntityInstantiators; import org.springframework.data.convert.TypeMapper; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.MappingException; @@ -112,7 +122,8 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, this.dbRefResolver = dbRefResolver; this.mappingContext = mappingContext; - this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext); + this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext, + this::getWriteTarget); this.idMapper = new QueryMapper(this); this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE); @@ -660,6 +671,10 @@ private static Collection asCollection(Object source) { protected List createCollection(Collection collection, MongoPersistentProperty property) { if (!property.isDbReference()) { + + if (property.hasExplicitWriteTarget()) { + return writeCollectionInternal(collection, new HijackedTypeInformation<>(property), new ArrayList<>()); + } return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList()); } @@ -739,7 +754,8 @@ private List writeCollectionInternal(Collection source, @Nullable Typ Class elementType = element == null ? null : element.getClass(); if (elementType == null || conversions.isSimpleType(elementType)) { - collection.add(getPotentiallyConvertedSimpleWrite(element)); + collection.add(getPotentiallyConvertedSimpleWrite(element, + componentType != null ? componentType.getType() : Object.class)); } else if (element instanceof Collection || elementType.isArray()) { collection.add(writeCollectionInternal(asCollection(element), componentType, new BasicDBList())); } else { @@ -842,7 +858,7 @@ private String potentiallyConvertMapKey(Object key) { } return conversions.hasCustomWriteTarget(key.getClass(), String.class) - ? (String) getPotentiallyConvertedSimpleWrite(key) + ? (String) getPotentiallyConvertedSimpleWrite(key, Object.class) : key.toString(); } @@ -884,12 +900,13 @@ protected void addCustomTypeKeyIfNecessary(@Nullable TypeInformation type, Ob * @param key must not be {@literal null}. */ private void writeSimpleInternal(Object value, Bson bson, String key) { - addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value)); + addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class)); } private void writeSimpleInternal(Object value, Bson bson, MongoPersistentProperty property) { DocumentAccessor accessor = new DocumentAccessor(bson); - accessor.put(property, getPotentiallyConvertedSimpleWrite(value)); + accessor.put(property, getPotentiallyConvertedSimpleWrite(value, + property.hasExplicitWriteTarget() ? property.getFieldType() : Object.class)); } /** @@ -900,12 +917,19 @@ private void writeSimpleInternal(Object value, Bson bson, MongoPersistentPropert * @return */ @Nullable - private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value) { + private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nullable Class typeHint) { if (value == null) { return null; } + if (typeHint != null && Object.class != typeHint) { + + if (conversionService.canConvert(value.getClass(), typeHint)) { + value = conversionService.convert(value, typeHint); + } + } + Optional> customTarget = conversions.getCustomWriteTarget(value.getClass()); if (customTarget.isPresent()) { @@ -1204,7 +1228,8 @@ public Object convertToMongoType(@Nullable Object obj, TypeInformation typeIn if (conversions.isSimpleType(obj.getClass())) { // Doesn't need conversion - return getPotentiallyConvertedSimpleWrite(obj); + return getPotentiallyConvertedSimpleWrite(obj, + typeInformation != null ? typeInformation.getType() : Object.class); } if (obj instanceof List) { @@ -1584,6 +1609,7 @@ private boolean canPublishEvent() { * @param ref * @return */ + @Nullable Document readRef(DBRef ref) { return dbRefResolver.fetch(ref); } @@ -1599,6 +1625,16 @@ List bulkReadRefs(List references) { return dbRefResolver.bulkFetch(references); } + /** + * Get the conversion target type if defined or return the {@literal source}. + * + * @param source must not be {@literal null}. + * @return + * @since 2.2 + */ + public Class getWriteTarget(Class source) { + return conversions.getCustomWriteTarget(source).orElse(source); + } /** * Create a new {@link MappingMongoConverter} using the given {@link MongoDbFactory} when loading {@link DBRef}. @@ -1667,4 +1703,91 @@ public T getParameterValue(Parameter parameter) return null; } } + + private static class HijackedTypeInformation implements TypeInformation { + + private MongoPersistentProperty persistentProperty; + private TypeInformation delegate; + + public HijackedTypeInformation(MongoPersistentProperty property) { + + this.persistentProperty = property; + this.delegate = property.getTypeInformation(); + } + + @Override + public List> getParameterTypes(Constructor constructor) { + return persistentProperty.getTypeInformation().getParameterTypes(constructor); + } + + @Override + public org.springframework.data.util.TypeInformation getProperty(String property) { + return delegate.getProperty(property); + } + + @Override + public boolean isCollectionLike() { + return delegate.isCollectionLike(); + } + + @Override + public org.springframework.data.util.TypeInformation getComponentType() { + return ClassTypeInformation.from(persistentProperty.getFieldType()); + } + + @Override + public boolean isMap() { + return delegate.isMap(); + } + + @Override + public org.springframework.data.util.TypeInformation getMapValueType() { + return ClassTypeInformation.from(persistentProperty.getFieldType()); + } + + @Override + public Class getType() { + return delegate.getType(); + } + + @Override + public ClassTypeInformation getRawTypeInformation() { + return delegate.getRawTypeInformation(); + } + + @Override + public org.springframework.data.util.TypeInformation getActualType() { + return delegate.getActualType(); + } + + @Override + public org.springframework.data.util.TypeInformation getReturnType(Method method) { + return delegate.getReturnType(method); + } + + @Override + public List> getParameterTypes(Method method) { + return delegate.getParameterTypes(method); + } + + @Override + public org.springframework.data.util.TypeInformation getSuperTypeInformation(Class superType) { + return delegate.getSuperTypeInformation(superType); + } + + @Override + public boolean isAssignableFrom(org.springframework.data.util.TypeInformation target) { + return delegate.isAssignableFrom(target); + } + + @Override + public List> getTypeArguments() { + return delegate.getTypeArguments(); + } + + @Override + public org.springframework.data.util.TypeInformation specialize(ClassTypeInformation type) { + return delegate.specialize(type); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java index d2dac0b906..f1cef3ed3e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverters.java @@ -34,6 +34,7 @@ import org.bson.Document; import org.bson.types.Binary; import org.bson.types.Code; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; import org.springframework.core.convert.ConversionFailedException; import org.springframework.core.convert.TypeDescriptor; @@ -75,7 +76,9 @@ static Collection getConvertersToRegister() { List converters = new ArrayList<>(); converters.add(BigDecimalToStringConverter.INSTANCE); + converters.add(BigDecimalToDecimal128Converter.INSTANCE); converters.add(StringToBigDecimalConverter.INSTANCE); + converters.add(Decimal128ToBigDecimalConverter.INSTANCE); converters.add(BigIntegerToStringConverter.INSTANCE); converters.add(StringToBigIntegerConverter.INSTANCE); converters.add(URLToStringConverter.INSTANCE); @@ -158,6 +161,17 @@ public String convert(BigDecimal source) { } } + /** + * @since 2.2 + */ + enum BigDecimalToDecimal128Converter implements Converter { + INSTANCE; + + public Decimal128 convert(BigDecimal source) { + return source == null ? null : new Decimal128(source); + } + } + enum StringToBigDecimalConverter implements Converter { INSTANCE; @@ -166,6 +180,17 @@ public BigDecimal convert(String source) { } } + /** + * @since 2.2 + */ + enum Decimal128ToBigDecimalConverter implements Converter { + INSTANCE; + + public BigDecimal convert(Decimal128 source) { + return source.bigDecimalValue(); + } + } + enum BigIntegerToStringConverter implements Converter { INSTANCE; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java index d7fe0c5f39..5394c45c30 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoTypeMapper.java @@ -46,4 +46,15 @@ public interface MongoTypeMapper extends TypeMapper { * @param restrictedTypes must not be {@literal null} */ void writeTypeRestrictions(Document result, Set> restrictedTypes); + + /** + * Compute the target type for a given source considering {@link org.springframework.data.convert.CustomConversions}. + * + * @param source the source type. + * @return never {@literal null}. + * @since 2.2 + */ + default Class getWriteTargetTypeFor(Class source) { + return source; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 8f57729d05..7da33a9879 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -334,6 +334,12 @@ protected Document getMappedKeyword(Field property, Keyword keyword) { @SuppressWarnings("unchecked") protected Object getMappedValue(Field documentField, Object value) { + if(documentField.getProperty() != null && documentField.getProperty().hasExplicitWriteTarget()) { + if(conversionService.canConvert(value.getClass(), documentField.getProperty().getFieldType())) { + value = conversionService.convert(value, documentField.getProperty().getFieldType()); + } + } + if (documentField.isIdField() && !documentField.isAssociation()) { if (isDBObject(value)) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 0641429493..2acc48cf7a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -150,18 +150,22 @@ public String getFieldName() { @Override public Class getFieldType() { + Field fieldAnnotation = findAnnotation(Field.class); + if (!isIdProperty()) { - return getType(); - } - MongoId idAnnotation = findAnnotation(MongoId.class); + if (fieldAnnotation == null || fieldAnnotation.targetType() == FieldType.IMPLICIT) { + return getType(); + } - if (idAnnotation == null) { - return FieldType.OBJECT_ID.getJavaClass(); + return fieldAnnotation.targetType().getJavaClass(); } - FieldType fieldType = idAnnotation.targetType(); + if (fieldAnnotation == null) { + return FieldType.OBJECT_ID.getJavaClass(); + } + FieldType fieldType = fieldAnnotation.targetType(); if (fieldType == FieldType.IMPLICIT) { return getType(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java index f59136f7e8..c91e58bb2b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java @@ -21,10 +21,13 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import org.springframework.core.annotation.AliasFor; + /** * Annotation to define custom metadata for document fields. * * @author Oliver Gierke + * @author Christoph Strobl */ @Documented @Retention(RetentionPolicy.RUNTIME) @@ -32,16 +35,34 @@ public @interface Field { /** - * The key to be used to store the field inside the document. + * The key to be used to store the field inside the document. Alias for {@link #name()}. * - * @return + * @return an empty {@link String} by default. */ + @AliasFor("name") String value() default ""; + /** + * The key to be used to store the field inside the document. Alias for {@link #value()}. + * + * @return an empty {@link String} by default. + * @since 2.2 + */ + @AliasFor("value") + String name() default ""; + /** * The order in which various fields shall be stored. Has to be a positive integer. * * @return the order the field shall have in the document or -1 if undefined. */ int order() default Integer.MAX_VALUE; + + /** + * The actual desired target type the field should be stored as. + * + * @return {@link FieldType#IMPLICIT} by default. + * @since 2.2 + */ + FieldType targetType() default FieldType.IMPLICIT; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java index e5942612b5..544d497c2e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/FieldType.java @@ -15,6 +15,13 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Date; +import java.util.regex.Pattern; + +import org.bson.types.BSONTimestamp; +import org.bson.types.Binary; +import org.bson.types.Code; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; /** @@ -34,7 +41,20 @@ public enum FieldType { /** * Implicit type that is derived from the property value. */ - IMPLICIT(-1, Object.class), STRING(2, String.class), OBJECT_ID(7, ObjectId.class); + IMPLICIT(-1, Object.class), // + DOUBLE(1, Double.class), // + STRING(2, String.class), // + ARRAY(4, Object[].class), // + BINARY(5, Binary.class), // + OBJECT_ID(7, ObjectId.class), // + BOOLEAN(8, Boolean.class), // + DATE_TIME(9, Date.class), // + PATTERN(11, Pattern.class), // + SCRIPT(13, Code.class), // + INT32(15, Integer.class), // + TIMESTAMP(16, BSONTimestamp.class), // + INT64(17, Long.class), // + DECIMAL128(18, Decimal128.class); private final int bsonType; private final Class javaClass; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java index 8dc58cf075..6b52b0d98f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoId.java @@ -37,6 +37,7 @@ * @since 2.2 */ @Id +@Field @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE }) public @interface MongoId { @@ -45,7 +46,7 @@ * @return the preferred id type. * @see #targetType() */ - @AliasFor("targetType") + @AliasFor(annotation = Field.class, attribute="targetType") FieldType value() default FieldType.IMPLICIT; /** @@ -55,7 +56,7 @@ * * @return the preferred {@literal id} type. {@link FieldType#IMPLICIT} by default. */ - @AliasFor("value") + @AliasFor(annotation = Field.class, attribute="targetType") FieldType targetType() default FieldType.IMPLICIT; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java index 45f6fb0fac..706e0277ca 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java @@ -105,12 +105,30 @@ public interface MongoPersistentProperty extends PersistentProperty { + enum PropertyToFieldNameConverter implements Converter { INSTANCE; @@ -122,12 +140,4 @@ public String convert(MongoPersistentProperty source) { return source.getFieldName(); } } - - /** - * Returns whether property access shall be used for reading the property value. This means it will use the getter - * instead of field access. - * - * @return - */ - boolean usePropertyAccess(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java index b22be62809..95c75735c9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoSimpleTypes.java @@ -24,10 +24,12 @@ import org.bson.*; import org.bson.types.Binary; +import org.bson.types.Code; import org.bson.types.CodeWScope; import org.bson.types.CodeWithScope; import org.bson.types.Decimal128; import org.bson.types.ObjectId; +import org.bson.types.Symbol; import org.springframework.data.mapping.model.SimpleTypeHolder; import com.mongodb.DBRef; @@ -50,16 +52,18 @@ public abstract class MongoSimpleTypes { AUTOGENERATED_ID_TYPES = Collections.unmodifiableSet(classes); Set> simpleTypes = new HashSet>(); - simpleTypes.add(DBRef.class); - simpleTypes.add(ObjectId.class); + simpleTypes.add(Binary.class); simpleTypes.add(BsonObjectId.class); + simpleTypes.add(DBRef.class); + simpleTypes.add(Decimal128.class); + simpleTypes.add(org.bson.Document.class); + simpleTypes.add(Code.class); simpleTypes.add(CodeWScope.class); simpleTypes.add(CodeWithScope.class); - simpleTypes.add(org.bson.Document.class); + simpleTypes.add(ObjectId.class); simpleTypes.add(Pattern.class); - simpleTypes.add(Binary.class); + simpleTypes.add(Symbol.class); simpleTypes.add(UUID.class); - simpleTypes.add(Decimal128.class); simpleTypes.add(BsonBinary.class); simpleTypes.add(BsonBoolean.class); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java index ffc263b467..bf2e801865 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java @@ -953,7 +953,7 @@ public DateJsonSchemaProperty description(String description) { /** * @return new instance of {@link DateJsonSchemaProperty}. - * @see DateJsonSchemaProperty#generateDescription() + * @see DateJsonSchemaProperty#generatedDescription() */ public DateJsonSchemaProperty generatedDescription() { return new DateJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); @@ -983,10 +983,65 @@ public TimestampJsonSchemaProperty description(String description) { /** * @return new instance of {@link TimestampJsonSchemaProperty}. - * @see TimestampJsonSchemaProperty#generateDescription() + * @see TimestampJsonSchemaProperty#generatedDescription() */ public TimestampJsonSchemaProperty generatedDescription() { return new TimestampJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); } } + + /** + * Delegating {@link JsonSchemaProperty} implementation having a {@literal required} flag for evaluation during schema + * creation process. + * + * @author Christoph Strobl + * @since 2.2 + */ + public static class RequiredJsonSchemaProperty implements JsonSchemaProperty { + + private final JsonSchemaProperty delegate; + private final boolean required; + + RequiredJsonSchemaProperty(JsonSchemaProperty delegate, boolean required) { + + this.delegate = delegate; + this.required = required; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.schema.JsonSchemaProperty#getIdentifier() + */ + @Override + public String getIdentifier() { + return delegate.getIdentifier(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#getTypes() + */ + @Override + public Set getTypes() { + return delegate.getTypes(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#toDocument() + */ + @Override + public Document toDocument() { + return delegate.toDocument(); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.schema.JsonSchemaProperty#isRequired() + */ + @Override + public boolean isRequired() { + return required; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java index bd18195683..375845ada8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaObject.java @@ -19,13 +19,19 @@ import java.math.BigDecimal; import java.util.Arrays; +import java.util.Collection; import java.util.Date; import java.util.HashSet; +import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import org.bson.BsonTimestamp; import org.bson.Document; +import org.bson.types.BSONTimestamp; +import org.bson.types.Binary; +import org.bson.types.Code; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ArrayJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.BooleanJsonSchemaObject; @@ -177,7 +183,7 @@ static TypedJsonSchemaObject of(@Nullable Class type) { return of(Type.nullType()); } - if (type.isArray()) { + if (type.isArray() || ClassUtils.isAssignable(Collection.class, type)) { if (type.equals(byte[].class)) { return of(Type.binaryType()); @@ -186,6 +192,10 @@ static TypedJsonSchemaObject of(@Nullable Class type) { return of(Type.arrayType()); } + if (type.equals(Document.class) || ClassUtils.isAssignable(Map.class, type)) { + return of(Type.objectType()); + } + if (type.equals(Object.class)) { return of(Type.objectType()); } @@ -202,7 +212,19 @@ static TypedJsonSchemaObject of(@Nullable Class type) { return of(Type.dateType()); } - if (ClassUtils.isAssignable(BsonTimestamp.class, type)) { + if (ClassUtils.isAssignable(Binary.class, type)) { + return of(Type.binaryType()); + } + + if (ClassUtils.isAssignable(Code.class, type)) { + return of(Type.javascriptType()); + } + + if (ClassUtils.isAssignable(Decimal128.class, type)) { + return of(Type.bigDecimalType()); + } + + if (ClassUtils.isAssignable(BsonTimestamp.class, type) || ClassUtils.isAssignable(BSONTimestamp.class, type)) { return of(Type.timestampType()); } @@ -210,29 +232,34 @@ static TypedJsonSchemaObject of(@Nullable Class type) { return of(Type.regexType()); } - if (ClassUtils.isAssignable(Boolean.class, type)) { + if (ClassUtils.isAssignable(Enum.class, type)) { + return of(Type.stringType()); + } + + Class resolved = ClassUtils.resolvePrimitiveIfNecessary(type); + if (ClassUtils.isAssignable(Boolean.class, resolved)) { return of(Type.booleanType()); } - if (ClassUtils.isAssignable(Number.class, type)) { + if (ClassUtils.isAssignable(Number.class, resolved)) { - if (type.equals(Long.class)) { + if (resolved.equals(Long.class)) { return of(Type.longType()); } - if (type.equals(Float.class)) { + if (resolved.equals(Float.class)) { return of(Type.doubleType()); } - if (type.equals(Double.class)) { + if (resolved.equals(Double.class)) { return of(Type.doubleType()); } - if (type.equals(Integer.class)) { + if (resolved.equals(Integer.class)) { return of(Type.intType()); } - if (type.equals(BigDecimal.class)) { + if (resolved.equals(BigDecimal.class)) { return of(Type.bigDecimalType()); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java index 1472091eff..c1fd88a601 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java @@ -24,11 +24,13 @@ import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.NullJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.NumericJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.RequiredJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.StringJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.TimestampJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.UntypedJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.NumericJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; +import org.springframework.lang.Nullable; /** * A {@literal property} or {@literal patternProperty} within a {@link JsonSchemaObject} of {@code type : 'object'}. @@ -47,6 +49,14 @@ public interface JsonSchemaProperty extends JsonSchemaObject { */ String getIdentifier(); + /** + * @return {@literal false} by default. + * @since 2.2 + */ + default boolean isRequired() { + return false; + } + /** * Creates a new {@link UntypedJsonSchemaProperty} with given {@literal identifier} without {@code type}. * @@ -201,6 +211,17 @@ static JsonSchemaPropertyBuilder named(String identifier) { return new JsonSchemaPropertyBuilder(identifier); } + /** + * Turns the given {@link JsonSchemaProperty} into a required on. + * + * @param property must not be {@literal null}. + * @return new instance of {@link JsonSchemaProperty}. + * @since 2.2 + */ + static JsonSchemaProperty required(JsonSchemaProperty property) { + return new RequiredJsonSchemaProperty(property, true); + } + /** * Builder for {@link IdentifiableJsonSchemaProperty}. */ @@ -219,6 +240,17 @@ public IdentifiableJsonSchemaProperty ofType(Type type) { return new IdentifiableJsonSchemaProperty<>(identifier, TypedJsonSchemaObject.of(type)); } + /** + * Configure the {@link Type} for the property by deriving it from the given {@link Class type}. + * + * @param type must not be {@literal null}. + * @return new instance of {@link IdentifiableJsonSchemaProperty}. + * @since 2.2 + */ + public IdentifiableJsonSchemaProperty ofType(@Nullable Class type) { + return new IdentifiableJsonSchemaProperty<>(identifier, JsonSchemaObject.of(type)); + } + /** * Configure a {@link TypedJsonSchemaObject} for the property. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java index 1f09846562..af492337cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java @@ -445,8 +445,9 @@ public ObjectJsonSchemaObject generatedDescription() { public Document toDocument() { Document doc = new Document(super.toDocument()); - if (!CollectionUtils.isEmpty(requiredProperties)) { - doc.append("required", requiredProperties); + Collection allRequiredProperties = requiredProperties(); + if (!CollectionUtils.isEmpty(allRequiredProperties)) { + doc.append("required", new ArrayList<>(allRequiredProperties)); } if (propertiesCount != null) { @@ -465,12 +466,21 @@ public Document toDocument() { if (additionalProperties != null) { - doc.append("additionalProperties", additionalProperties instanceof JsonSchemaObject - ? ((JsonSchemaObject) additionalProperties).toDocument() : additionalProperties); + doc.append("additionalProperties", + additionalProperties instanceof JsonSchemaObject ? ((JsonSchemaObject) additionalProperties).toDocument() + : additionalProperties); } return doc; } + private Collection requiredProperties() { + + Set target = new LinkedHashSet<>(); + target.addAll(requiredProperties); + properties.stream().filter(JsonSchemaProperty::isRequired).forEach(it -> target.add(it.getIdentifier())); + return target; + } + private ObjectJsonSchemaObject newInstance(@Nullable String description, boolean generateDescription, Restrictions restrictions) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java new file mode 100644 index 0000000000..6743343a99 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java @@ -0,0 +1,252 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Map; + +import org.bson.Document; +import org.junit.Before; +import org.junit.Test; + +import org.springframework.data.annotation.Transient; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; + +/** + * Unit tests for {@link MappingMongoJsonSchemaCreator}. + * + * @author Christoph Strobl + * @author Mark PAluch + */ +public class MappingMongoJsonSchemaCreatorUnitTests { + + MappingMongoConverter converter; + MongoMappingContext mappingContext; + MappingMongoJsonSchemaCreator schemaCreator; + + @Before + public void setUp() { + + mappingContext = new MongoMappingContext(); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + schemaCreator = new MappingMongoJsonSchemaCreator(converter); + } + + @Test // DATAMONGO-1849 + public void simpleTypes() { + + MongoJsonSchema schema = schemaCreator.createSchemaFor(VariousFieldTypes.class); + + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(Document.parse(VARIOUS_FIELD_TYPES)); + } + + @Test // DATAMONGO-1849 + public void withRemappedIdType() { + + MongoJsonSchema schema = schemaCreator.createSchemaFor(WithExplicitMongoIdTypeMapping.class); + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(WITH_EXPLICIT_MONGO_ID_TYPE_MAPPING); + } + + @Test // DATAMONGO-1849 + public void cyclic() { + + MongoJsonSchema schema = schemaCreator.createSchemaFor(Cyclic.class); + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(CYCLIC); + } + + @Test // DATAMONGO-1849 + public void converterRegistered() { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + MongoCustomConversions mcc = new MongoCustomConversions( + Collections.singletonList(SimpleToDocumentConverter.INSTANCE)); + converter.setCustomConversions(mcc); + converter.afterPropertiesSet(); + + schemaCreator = new MappingMongoJsonSchemaCreator(converter); + + MongoJsonSchema schema = schemaCreator.createSchemaFor(WithNestedDomainType.class); + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo( + "{ 'type' : 'object', 'properties' : { '_id' : { 'type' : 'object' }, 'nested' : { 'type' : 'object' } } }"); + } + + // --> TYPES AND JSON + + // --> ENUM + + static final String JUST_SOME_ENUM = "{ 'type' : 'string', 'enum' : ['ONE', 'TWO'] }"; + + enum JustSomeEnum { + ONE, TWO + } + + // --> VARIOUS FIELD TYPES + + static final String VARIOUS_FIELD_TYPES = "" + // + "{" + // + " 'type' : 'object'," + // + " 'required' : ['primitiveInt']," + // + " 'properties' : {" + // + " 'id' : { 'type' : 'string' }," + // + " 're-named-property' : { 'type' : 'string' }," + // + " 'retypedProperty' : { 'bsonType' : 'javascript' }," + // + " 'primitiveInt' : { 'bsonType' : 'int' }," + // + " 'booleanProperty' : { 'type' : 'boolean' }," + // + " 'longProperty' : { 'bsonType' : 'long' }," + // + " 'intProperty' : { 'bsonType' : 'int' }," + // + " 'dateProperty' : { 'bsonType' : 'date' }," + // + " 'arrayProperty' : { 'type' : 'array' }," + // + " 'binaryDataProperty' : { 'bsonType' : 'binData' }," + // + " 'collectionProperty' : { 'type' : 'array' }," + // + " 'mapProperty' : { 'type' : 'object' }," + // + " 'objectProperty' : { 'type' : 'object' }," + // + " 'enumProperty' : " + JUST_SOME_ENUM + // + " }" + // + "}"; + + static class VariousFieldTypes { + + @Field("id") String id; + @Field("re-named-property") String renamedProperty; + @Field(targetType = FieldType.SCRIPT) String retypedProperty; + @Transient String transientProperty; + int primitiveInt; + Boolean booleanProperty; + Long longProperty; + Integer intProperty; + Date dateProperty; + Object[] arrayProperty; + byte[] binaryDataProperty; + List collectionProperty; + Map mapProperty; + Object objectProperty; + JustSomeEnum enumProperty; + } + + // --> NESTED DOMAIN TYPE + + static final String WITH_NESTED_DOMAIN_TYPE = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " '_id' : { 'type' : 'object' }," + // + " 'nested' : " + VARIOUS_FIELD_TYPES + // + " }" + // + "}"; + + static class WithNestedDomainType { + + String id; + VariousFieldTypes nested; + } + + // --> EXPLICIT MONGO_ID MAPPING + + final String WITH_EXPLICIT_MONGO_ID_TYPE_MAPPING = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " '_id' : { 'bsonType' : 'objectId' }," + // + " 'nested' : " + VARIOUS_FIELD_TYPES + // + " }" + // + "}"; + + static class WithExplicitMongoIdTypeMapping { + + @MongoId(targetType = FieldType.OBJECT_ID) String id; + VariousFieldTypes nested; + } + + // --> OH NO - A CYCLIC PROPERTY RELATIONSHIP 😱 + + static final String CYCLIC_FIN = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'root' : { 'type' : 'string' }" + // + " 'cyclic' : { 'type' : 'object' }" + // + " }" + // + "}"; + + static final String CYCLIC_2 = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'nested2' : { 'type' : 'string' }," + // + " 'cyclic' : " + CYCLIC_FIN + // + " }" + // + "}"; + + class Cyclic2 { + + String nested2; + Cyclic cyclic; + } + + static final String CYCLIC_1 = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'nested1' : { 'type' : 'string' }," + // + " 'cyclic2' : " + CYCLIC_2 + // + " }" + // + "}"; + + class Cyclic1 { + + String nested1; + Cyclic2 cyclic2; + } + + static final String CYCLIC = "" + // + "{" + // + " 'type' : 'object'," + // + " 'properties' : {" + // + " 'root' : { 'type' : 'string' }," + // + " 'cyclic1' : " + CYCLIC_1 + // + " }" + // + "}"; + + class Cyclic { + + String root; + Cyclic1 cyclic1; + } + + @WritingConverter + enum SimpleToDocumentConverter + implements org.springframework.core.convert.converter.Converter { + INSTANCE; + + @Override + public org.bson.Document convert(VariousFieldTypes source) { + return null; + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 2e7e99d1cb..29c9bbce16 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -35,6 +35,8 @@ import java.time.LocalDateTime; import java.util.*; +import org.bson.types.Code; +import org.bson.types.Decimal128; import org.bson.types.ObjectId; import org.hamcrest.Matcher; import org.hamcrest.Matchers; @@ -76,6 +78,7 @@ import org.springframework.data.mongodb.core.geo.Sphere; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldType; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.PersonPojoStringId; @@ -1194,13 +1197,12 @@ public Person convert(org.bson.Document source) { @Test // DATAMONGO-743, DATAMONGO-2198 public void readsIntoStringsOutOfTheBox() { - String target = converter.read(String.class, new org.bson.Document("firstname", "Dave")); assertThat(target, Matchers.startsWith("{")); assertThat(target, Matchers.endsWith("}")); - assertThat(target, Matchers.containsString( "\"firstname\"")); - assertThat(target, Matchers.containsString( "\"Dave\"")); + assertThat(target, Matchers.containsString("\"firstname\"")); + assertThat(target, Matchers.containsString("\"Dave\"")); } @Test // DATAMONGO-766 @@ -1919,21 +1921,21 @@ public void omitsTypeHintWhenWritingSimpleTypes() { assertThat(target).doesNotContainKeys("_class"); } - @Test // DATAMONGO-1798 + @Test // DATAMONGO-1798 public void convertStringIdThatIsAnObjectIdHexToObjectIdIfTargetIsObjectId() { ObjectId source = new ObjectId(); assertThat(converter.convertId(source.toHexString(), ObjectId.class)).isEqualTo(source); } - @Test // DATAMONGO-1798 + @Test // DATAMONGO-1798 public void donNotConvertStringIdThatIsAnObjectIdHexToObjectIdIfTargetIsString() { ObjectId source = new ObjectId(); assertThat(converter.convertId(source.toHexString(), String.class)).isEqualTo(source.toHexString()); } - @Test // DATAMONGO-1798 + @Test // DATAMONGO-1798 public void donNotConvertStringIdThatIsAnObjectIdHexToObjectIdIfTargetIsObject() { ObjectId source = new ObjectId(); @@ -1952,6 +1954,43 @@ public void addsEqualObjectsToCollection() { assertThat(order.items).hasSize(3); } + @Test // DATAMONGO-1849 + public void mapsValueToExplicitTargetType() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.script = "if (a > b) a else b"; + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("script")).isEqualTo(new Code(source.script)); + } + + @Test // DATAMONGO-1849 + public void mapsCollectionValueToExplicitTargetType() { + + String script = "if (a > b) a else b"; + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.scripts = Collections.singletonList(script); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("scripts", List.class)).containsExactly(new Code(script)); + } + + @Test // DATAMONGO-1849 + public void mapsBigDecimalToDecimal128WhenAnnotatedWithFieldTargetType() { + + WithExplicitTargetTypes source = new WithExplicitTargetTypes(); + source.bigDecimal = BigDecimal.valueOf(3.14159D); + + org.bson.Document target = new org.bson.Document(); + converter.write(source, target); + + assertThat(target.get("bigDecimal")).isEqualTo(new Decimal128(source.bigDecimal)); + } + static class GenericType { T content; } @@ -2391,4 +2430,17 @@ static class SomeItem { static class Order { Collection items = new ArrayList<>(); } + + static class WithExplicitTargetTypes { + + @Field(targetType = FieldType.SCRIPT) // + String script; + + @Field(targetType = FieldType.SCRIPT) // + List scripts; + + @Field(targetType = FieldType.DECIMAL128) + BigDecimal bigDecimal; + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index 49f8a42080..473003db79 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -28,13 +28,13 @@ import java.util.Map; import java.util.Optional; +import org.bson.types.Code; import org.bson.types.ObjectId; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; - import org.springframework.data.annotation.Id; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; @@ -48,6 +48,7 @@ import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldType; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.TextScore; @@ -856,6 +857,30 @@ public void shouldNotConvertHexStringToObjectIdForUnresolvablePath() { assertThat(document).isEqualTo(new org.bson.Document("nested.unresolvablePath.id", idHex)); } + @Test // DATAMONGO-1849 + public void shouldConvertPropertyWithExplicitTargetType() { + + String script = "if (a > b) a else b"; + Query query = new Query(where("script").is(script)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + + assertThat(document).isEqualTo(new org.bson.Document("script", new Code(script))); + } + + @Test // DATAMONGO-1849 + public void shouldConvertCollectionPropertyWithExplicitTargetType() { + + String script = "if (a > b) a else b"; + Query query = new Query(where("scripts").is(script)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithExplicitTargetTypes.class)); + + assertThat(document).isEqualTo(new org.bson.Document("scripts", new Code(script))); + } + @Document public class Foo { @Id private ObjectId id; @@ -979,4 +1004,13 @@ static class EntityWithComplexValueTypeMap { static class EntityWithComplexValueTypeList { List list; } + + static class WithExplicitTargetTypes { + + @Field(targetType = FieldType.SCRIPT) // + String script; + + @Field(targetType = FieldType.SCRIPT) // + List scripts; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java index ebdb104c91..1b57d2dff5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/JsonSchemaObjectUnitTests.java @@ -22,6 +22,12 @@ import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.bson.Document; import org.junit.Test; @@ -36,6 +42,48 @@ */ public class JsonSchemaObjectUnitTests { + // ----------------- + // type from class + // ----------------- + + @Test // DATAMONGO-1849 + public void primitiveType() { + + assertThat(JsonSchemaObject.of(boolean.class).getTypes()).containsExactly(Type.booleanType()); + assertThat(JsonSchemaObject.of(int.class).getTypes()).containsExactly(Type.intType()); + assertThat(JsonSchemaObject.of(long.class).getTypes()).containsExactly(Type.longType()); + assertThat(JsonSchemaObject.of(float.class).getTypes()).containsExactly(Type.doubleType()); + assertThat(JsonSchemaObject.of(double.class).getTypes()).containsExactly(Type.doubleType()); + assertThat(JsonSchemaObject.of(short.class).getTypes()).containsExactly(Type.numberType()); + } + + @Test // DATAMONGO-1849 + public void objectType() { + + assertThat(JsonSchemaObject.of(Object.class).getTypes()).containsExactly(Type.objectType()); + assertThat(JsonSchemaObject.of(Map.class).getTypes()).containsExactly(Type.objectType()); + assertThat(JsonSchemaObject.of(Document.class).getTypes()).containsExactly(Type.objectType()); + } + + @Test // DATAMONGO-1849 + public void binaryData() { + assertThat(JsonSchemaObject.of(byte[].class).getTypes()).containsExactly(Type.binaryType()); + } + + @Test // DATAMONGO-1849 + public void collectionType() { + + assertThat(JsonSchemaObject.of(Object[].class).getTypes()).containsExactly(Type.arrayType()); + assertThat(JsonSchemaObject.of(Collection.class).getTypes()).containsExactly(Type.arrayType()); + assertThat(JsonSchemaObject.of(List.class).getTypes()).containsExactly(Type.arrayType()); + assertThat(JsonSchemaObject.of(Set.class).getTypes()).containsExactly(Type.arrayType()); + } + + @Test // DATAMONGO-1849 + public void dateType() { + assertThat(JsonSchemaObject.of(Date.class).getTypes()).containsExactly(Type.dateType()); + } + // ----------------- // type : 'object' // ----------------- @@ -58,10 +106,10 @@ public void objectObjectShouldRenderNrPropertiesCorrectly() { @Test // DATAMONGO-1835 public void objectObjectShouldRenderRequiredPropertiesCorrectly() { - assertThat(object().required("spring", "data", "mongodb").generatedDescription().toDocument()).isEqualTo( - new Document("type", "object") - .append("description", "Must be an object where spring, data, mongodb are mandatory.").append("required", - Arrays.asList("spring", "data", "mongodb"))); + assertThat(object().required("spring", "data", "mongodb").generatedDescription().toDocument()) + .isEqualTo(new Document("type", "object") + .append("description", "Must be an object where spring, data, mongodb are mandatory.") + .append("required", Arrays.asList("spring", "data", "mongodb"))); } @Test // DATAMONGO-1835 @@ -101,7 +149,6 @@ public void objectObjectShouldRenderNestedObjectPropertiesCorrectly() { .append("properties", new Document("city", new Document("type", "string") .append("description", "Must be a string with length [3-unbounded.").append("minLength", 3))))); - assertThat(object() .properties(JsonSchemaProperty.object("address") .properties(JsonSchemaProperty.string("city").minLength(3).generatedDescription()).generatedDescription()) @@ -120,6 +167,17 @@ public void objectObjectShouldRenderPatternPropertiesCorrectly() { .generatedDescription().toDocument()).isEqualTo(expected); } + @Test // DATAMONGO-1849 + public void objectShouldIncludeRequiredNestedCorrectly() { + + assertThat(object() // + .properties( // + JsonSchemaProperty.required(JsonSchemaProperty.string("lastname")) // + ).toDocument()) + .isEqualTo(new Document("type", "object").append("required", Collections.singletonList("lastname")) + .append("properties", new Document("lastname", new Document("type", "string")))); + } + // ----------------- // type : 'string' // ----------------- diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java index bf0581c6a0..edc2c6f4fd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java @@ -57,6 +57,20 @@ public void rendersDocumentBasedSchemaCorrectly() { new Document("type", "object").append("required", Arrays.asList("firstname", "lastname")))); } + @Test // DATAMONGO-1849 + public void rendersRequiredPropertiesCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder() // + .required("firstname") // + .properties( // + JsonSchemaProperty.required(JsonSchemaProperty.string("lastname")) // + ).build(); + + assertThat(schema.toDocument()).isEqualTo(new Document("$jsonSchema", + new Document("type", "object").append("required", Arrays.asList("firstname", "lastname")).append("properties", + new Document("lastname", new Document("type", "string"))))); + } + @Test(expected = IllegalArgumentException.class) // DATAMONGO-1835 public void throwsExceptionOnNullRoot() { MongoJsonSchema.of((JsonSchemaObject) null); diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc index 96a1fafddb..bae65336c4 100644 --- a/src/main/asciidoc/new-features.adoc +++ b/src/main/asciidoc/new-features.adoc @@ -12,6 +12,7 @@ * Support `Range` in repository between queries. * Kotlin extension methods accepting `KClass` are deprecated now in favor of `reified` methods. * Support of array filters in `Update` operations. +* <> from domain types. [[new-features.2-1-0]] == What's New in Spring Data MongoDB 2.1 diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc index 1e96d2a615..719f4a01a9 100644 --- a/src/main/asciidoc/reference/mapping.adoc +++ b/src/main/asciidoc/reference/mapping.adoc @@ -418,7 +418,7 @@ The MappingMongoConverter can use metadata to drive the mapping of objects to do * `@Transient`: By default all private fields are mapped to the document, this annotation excludes the field where it is applied from being stored in the database * `@PersistenceConstructor`: Marks a given constructor - even a package protected one - to use when instantiating the object from the database. Constructor arguments are mapped by name to the key values in the retrieved Document. * `@Value`: This annotation is part of the Spring Framework . Within the mapping framework it can be applied to constructor arguments. This lets you use a Spring Expression Language statement to transform a key's value retrieved in the database before it is used to construct a domain object. In order to reference a property of a given document one has to use expressions like: `@Value("#root.myProperty")` where `root` refers to the root of the given document. -* `@Field`: Applied at the field level and described the name of the field as it will be represented in the MongoDB BSON document thus allowing the name to be different than the fieldname of the class. +* `@Field`: Applied at the field level it allows to describe the name and type of the field as it will be represented in the MongoDB BSON document thus allowing the name and type to be different than the fieldname of the class as well as the property type. * `@Version`: Applied at field level is used for optimistic locking and checked for modification on save operations. The initial value is `zero` which is bumped automatically on every update. The mapping metadata infrastructure is defined in a separate spring-data-commons project that is technology agnostic. Specific subclasses are using in the MongoDB support to support annotation based metadata. Other strategies are also possible to put in place if there is demand. @@ -455,7 +455,6 @@ public class Person { private T address; - public Person(Integer ssn) { this.ssn = ssn; } @@ -482,6 +481,43 @@ public class Person { // other getters/setters omitted ---- +[TIP] +==== +`@Field(targetType=...)` can come in handy when the native MongoDB type inferred by the mapping infrastructure does not +match the expected one. Like for `BigDecimal`, which is represented as `String` instead of `Decimal128`, just because earlier +versions of MongoDB Server did not have support for it. +[source,java] +---- +public class Balance { + + @Field(targetType = DECIMAL128) + private BigDecimal value; + + // ... +} +---- + +You may even consider your own, custom annotation. + +[source,java] +---- +@Target(ElementType.FIELD) +@Retention(RetentionPolicy.RUNTIME) +@Field(targetType = FieldType.DECIMAL128) +public @interface Decimal128 { } + +// ... + +public class Balance { + + @Decimal128 + private BigDecimal value; + + // ... +} +---- +==== + [[mapping-custom-object-construction]] === Customized Object Construction diff --git a/src/main/asciidoc/reference/mongo-custom-conversions.adoc b/src/main/asciidoc/reference/mongo-custom-conversions.adoc new file mode 100644 index 0000000000..90d6be1aaf --- /dev/null +++ b/src/main/asciidoc/reference/mongo-custom-conversions.adoc @@ -0,0 +1,134 @@ +[[mongo.custom-converters]] +== Custom Conversions - Overriding Default Mapping + +The most trivial way of influencing the the mapping result is by specifying the desired native MongoDB target type via the +`@Field` annotation. This allows to work with non mongoDB types like `BigDecimal` in the domain model while persisting +values in native `org.bson.types.Decimal128` format. + +.Explicit target type mapping +==== +[source,java] +---- +public class Payment { + + @Id String id; <1> + + @Field(targetType = FieldType.DECIMAL128) <2> + BigDecimal value; + + Date date; <3> + +} +---- +[source,java] +---- +{ + "_id" : ObjectId("5ca4a34fa264a01503b36af8"), <1> + "value" : NumberDecimal(2.099), <2> + "date" : ISODate("2019-04-03T12:11:01.870Z") <3> +} +---- +<1> String _id_ values that represent a valid `ObjectId` are converted automatically. See <> +for details. +<2> The desired target type is explicitly defined as `Decimal128` which translates to `NumberDecimal`. Otherwise the +`BigDecimal` value would have been truned into a `String`. +<3> `Date` values are handled by the MongoDB driver itself an are stored as `ISODate`. +==== + +The snippet above is handy for providing simple type hints. To gain more fine-grained control over the mapping process, + you can register Spring converters with the `MongoConverter` implementations, such as the `MappingMongoConverter`. + +The `MappingMongoConverter` checks to see if any Spring converters can handle a specific class before attempting to map the object itself. To 'hijack' the normal mapping strategies of the `MappingMongoConverter`, perhaps for increased performance or other custom mapping needs, you first need to create an implementation of the Spring `Converter` interface and then register it with the `MappingConverter`. + +NOTE: For more information on the Spring type conversion service, see the reference docs https://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#validation[here]. + +[[mongo.custom-converters.writer]] +=== Saving by Using a Registered Spring Converter + +The following example shows an implementation of the `Converter` that converts from a `Person` object to a `org.bson.Document`: + +[source,java] +---- +import org.springframework.core.convert.converter.Converter; + +import org.bson.Document; + +public class PersonWriteConverter implements Converter { + + public Document convert(Person source) { + Document document = new Document(); + document.put("_id", source.getId()); + document.put("name", source.getFirstName()); + document.put("age", source.getAge()); + return document; + } +} +---- + +[[mongo.custom-converters.reader]] +=== Reading by Using a Spring Converter + +The following example shows an implementation of a `Converter` that converts from a `Document` to a `Person` object: + +[source,java] +---- +public class PersonReadConverter implements Converter { + + public Person convert(Document source) { + Person p = new Person((ObjectId) source.get("_id"), (String) source.get("name")); + p.setAge((Integer) source.get("age")); + return p; + } +} +---- + +[[mongo.custom-converters.xml]] +=== Registering Spring Converters with the `MongoConverter` + +The Mongo Spring namespace provides a convenient way to register Spring `Converter` instances with the `MappingMongoConverter`. The following configuration snippet shows how to manually register converter beans as well as configure the wrapping `MappingMongoConverter` into a `MongoTemplate`: + +[source,xml] +---- + + + + + + + + + + + + + + + + + +---- + +You can also use the `base-package` attribute of the `custom-converters` element to enable classpath scanning for all `Converter` and `GenericConverter` implementations below the given package, as the following example shows: + +[source,xml] +---- + + + +---- + +[[mongo.converter-disambiguation]] +=== Converter Disambiguation + +Generally, we inspect the `Converter` implementations for the source and target types they convert from and to. Depending on whether one of those is a type MongoDB can handle natively, we register the converter instance as a reading or a writing converter. The following examples show a writer converter and a read converter (note the difference is in the order of the qualifiers on `Converter`): + +[source,java] +---- +// Write converter as only the target type is one Mongo can handle natively +class MyConverter implements Converter { … } + +// Read converter as only the source type is one Mongo can handle natively +class MyConverter implements Converter { … } +---- + +If you write a `Converter` whose source and target type are native Mongo types, we cannot determine whether we should consider it as a reading or a writing converter. Registering the converter instance as both might lead to unwanted results. For example, a `Converter` is ambiguous, although it probably does not make sense to try to convert all `String` instances into `Long` instances when writing. To let you force the infrastructure to register a converter for only one way, we provide `@ReadingConverter` and `@WritingConverter` annotations to be used in the converter implementation. diff --git a/src/main/asciidoc/reference/mongo-json-schema.adoc b/src/main/asciidoc/reference/mongo-json-schema.adoc new file mode 100644 index 0000000000..e53d63b3eb --- /dev/null +++ b/src/main/asciidoc/reference/mongo-json-schema.adoc @@ -0,0 +1,292 @@ +[[mongo.jsonSchema]] +=== JSON Schema + +As of version 3.6, MongoDB supports collections that validate documents against a provided https://docs.mongodb.com/manual/core/schema-validation/#json-schema[JSON Schema]. +The schema itself and both validation action and level can be defined when creating the collection, as the following example shows: + +.Sample JSON schema +==== +[source,json] +---- +{ + "type": "object", <1> + + "required": [ "firstname", "lastname" ], <2> + + "properties": { <3> + + "firstname": { <4> + "type": "string", + "enum": [ "luke", "han" ] + }, + "address": { <5> + "type": "object", + "properties": { + "postCode": { "type": "string", "minLength": 4, "maxLength": 5 } + } + } + } +} +---- +<1> JSON schema documents always describe a whole document from its root. A schema is a schema object itself that can contain +embedded schema objects that describe properties and subdocuments. +<2> `required` is a property that describes which properties are required in a document. It can be specified optionally, along with other +schema constraints. See MongoDB's documentation on https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#available-keywords[available keywords]. +<3> `properties` is related to a schema object that describes an `object` type. It contains property-specific schema constraints. +<4> `firstname` specifies constraints for the `firsname` field inside the document. Here, it is a string-based `properties` element declaring + possible field values. +<5> `address` is a subdocument defining a schema for values in its `postCode` field. +==== + +You can provide a schema either by specifying a schema document (that is, by using the `Document` API to parse or build a document object) or by building it with Spring Data's JSON schema utilities in `org.springframework.data.mongodb.core.schema`. `MongoJsonSchema` is the entry point for all JSON schema-related operations. The following example shows how use `MongoJsonSchema.builder()` to create a JSON schema: + +.Creating a JSON schema +==== +[source,java] +---- +MongoJsonSchema.builder() <1> + .required("lastname") <2> + + .properties( + required(string("firstname").possibleValues("luke", "han")), <3> + + object("address") + .properties(string("postCode").minLength(4).maxLength(5))) + + .build(); <4> +---- +<1> Obtain a schema builder to configure the schema with a fluent API. +<2> Configure required properties either directly as shown here or with more details as in 3. +<3> Configure the required String-typed `firstname` field, allowing only `luke` and `han` values. Properties can be typed or untyped. Use a static import of `JsonSchemaProperty` to make the syntax slightly more compact and to get entry points such as `string(…)`. +<4> Build the schema object. Use the schema to create either a collection or <>. +==== + +There are already some predefined and strongly typed schema objects (`JsonSchemaObject` and `JsonSchemaProperty`) available +through static methods on the gateway interfaces. +However, you may need to build custom property validation rules, which can be created through the builder API, as the following example shows: + +[source,java] +---- +// "birthdate" : { "bsonType": "date" } +JsonSchemaProperty.named("birthdate").ofType(Type.dateType()); + +// "birthdate" : { "bsonType": "date", "description", "Must be a date" } +JsonSchemaProperty.named("birthdate").with(JsonSchemaObject.of(Type.dateType()).description("Must be a date")); +---- + +`CollectionOptions` provides the entry point to schema support for collections, as the following example shows: + +.Create collection with `$jsonSchema` +==== +[source,java] +---- +MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); + +template.createCollection(Person.class, CollectionOptions.empty().schema(schema)); +---- +==== + +[[mongo.jsonSchema.generated]] +==== Generating the Schema + +Setting up a schema can be a time consuming task and we encourage everyone who decides to do so, to really take the time +it takes. It's important, schema changes can be hard. However there might be times when one does not want to balked +with it, and that is where the `JsonSchemaCreator` comes into play. + +The `JsonSchemaCreator` and its default implementation generate the `MongoJsonSchema` out of the domain types metadata provided +by the mapping infrastructure. This means that <> as well as potential <> +are considered. + +.Generate Json Schema from domain type +==== +[source,java] +---- +public class Person { + + private final String firstname; <1> + private final @Nullable String lastname; <2> + private int age; <3> + private Species species; <4> + private Address address; <5> + private @Field(fieldType=SCRIPT) String theForce; <6> + private @Transient Boolean useTheForce; <7> + + public Person(String firstname, @Nullable String lastname) { <1> <2> + + this.firstname = firstname; + this.lastname = lastname; + } + + // gettter / setter omitted +} + +MongoJsonSchema schema = schemaCreator.jsonSchemaCreator(mongoOperations.getConverter()) + .createSchemaFor(DomainType.class); + +template.createCollection(Person.class, CollectionOptions.empty().schema(schema)); +---- + +[source,json] +---- +{ + 'type' : 'object', + 'required' : ['firstname', 'age'], <1> <3> + 'properties' : { + 'firstname' : { 'type' : 'string' }, <1> + 'lastname' : { 'type' : 'string' }, <2> + 'age' : { 'bsonType' : 'int' } <3> + 'species' : { <4> + 'type' : 'string', + 'enum' : ['HUMAN', 'WOOKIE', 'UNKNOWN'] + } + 'address' : { <5> + 'type' : 'object' + 'properties' : { + 'postCode' : { 'type': 'string' } + } + }, + 'theForce' : { 'type' : 'javascript'} <6> + } +} +---- +<1> Required property as **not** `@Nullable` and used in the constructor. +<2> Optional property though used in the constructor it is still `@Nullable`. +<3> Primitive types are considered required properties. +<4> Enums are restricted to possible values. +<5> Object type properties get are inspected themselfes. +<6> `String` type property that is truned into `Code` by the mapping. +<7> `@Transient` properties are left out when generating the schema. +==== + +NOTE: `_id` properties using types that can be converted into `ObjectId` like `String` are mapped to `{ type : 'object' }` +unless there is more specific information available via the `@MongoId` annotation. + +[cols="3,1,6", options="header"] +.Sepcial Schema Generation rules +|=== +| Java +| Schema Type +| Notes + +| `Object` +| type : object +| with `properties` if metadata available. + +| `Collection` +| type : array +| - + +| `Map` +| type : object +| - + +| `Enum` +| type : string +| with `enum` property holding the possible enumeration values. + +| `array` +| type : array +| simple type array unless it's a `byte[]` + +| `byte[]` +| bsonType : binData +| - + +|=== + +[[mongo.jsonSchema.query]] +==== Query a collection for matching Json Schema + +You can use a schema to query any collection for documents that match a given structure defined by a JSON schema, as the following example shows: + +.Query for Documents matching a `$jsonSchema` +==== +[source,java] +---- +MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); + +template.find(query(matchingDocumentStructure(schema)), Person.class); +---- +==== + +[[mongo.jsonSchema.types]] +==== Json Schema Types + +The following table shows the supported JSON schema types: + +[cols="3,1,6", options="header"] +.Supported JSON schema types +|=== +| Schema Type +| Java Type +| Schema Properties + +| `untyped` +| - +| `description`, generated `description`, `enum`, `allOf`, `anyOf`, `oneOf`, `not` + +| `object` +| `Object` +| `required`, `additionalProperties`, `properties`, `minProperties`, `maxProperties`, `patternProperties` + +| `array` +| any array except `byte[]` +| `uniqueItems`, `additionalItems`, `items`, `minItems`, `maxItems` + +| `string` +| `String` +| `minLength`, `maxLentgth`, `pattern` + +| `int` +| `int`, `Integer` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `long` +| `long`, `Long` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `double` +| `float`, `Float`, `double`, `Double` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `decimal` +| `BigDecimal` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `number` +| `Number` +| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` + +| `binData` +| `byte[]` +| (none) + +| `boolean` +| `boolean`, `Boolean` +| (none) + +| `null` +| `null` +| (none) + +| `objectId` +| `ObjectId` +| (none) + +| `date` +| `java.util.Date` +| (none) + +| `timestamp` +| `BsonTimestamp` +| (none) + +| `regex` +| `java.util.regex.Pattern` +| (none) + +|=== + +NOTE: `untyped` is a generic type that is inherited by all typed schema types. It provides all `untyped` schema properties to typed schema types. + +For more information, see https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#op._S_jsonSchema[$jsonSchema]. diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index ec4bd601b0..1a7913c8b0 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -1736,184 +1736,7 @@ AggregationResults results = template.aggregate(aggregation, "tags", T WARNING: Indexes are only used if the collation used for the operation matches the index collation. -[[mongo.jsonSchema]] -=== JSON Schema - -As of version 3.6, MongoDB supports collections that validate documents against a provided https://docs.mongodb.com/manual/core/schema-validation/#json-schema[JSON Schema]. -The schema itself and both validation action and level can be defined when creating the collection, as the following example shows: - -.Sample JSON schema -==== -[source,json] ----- -{ - "type": "object", <1> - - "required": [ "firstname", "lastname" ], <2> - - "properties": { <3> - - "firstname": { <4> - "type": "string", - "enum": [ "luke", "han" ] - }, - "address": { <5> - "type": "object", - "properties": { - "postCode": { "type": "string", "minLength": 4, "maxLength": 5 } - } - } - } -} ----- -<1> JSON schema documents always describe a whole document from its root. A schema is a schema object itself that can contain -embedded schema objects that describe properties and subdocuments. -<2> `required` is a property that describes which properties are required in a document. It can be specified optionally, along with other -schema constraints. See MongoDB's documentation on https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#available-keywords[available keywords]. -<3> `properties` is related to a schema object that describes an `object` type. It contains property-specific schema constraints. -<4> `firstname` specifies constraints for the `firsname` field inside the document. Here, it is a string-based `properties` element declaring - possible field values. -<5> `address` is a subdocument defining a schema for values in its `postCode` field. -==== - -You can provide a schema either by specifying a schema document (that is, by using the `Document` API to parse or build a document object) or by building it with Spring Data's JSON schema utilities in `org.springframework.data.mongodb.core.schema`. `MongoJsonSchema` is the entry point for all JSON schema-related operations. The following example shows how use `MongoJsonSchema.builder()` to create a JSON schema: - -.Creating a JSON schema -==== -[source,java] ----- -MongoJsonSchema.builder() <1> - .required("firstname", "lastname") <2> - - .properties( - string("firstname").possibleValues("luke", "han"), <3> - - object("address") - .properties(string("postCode").minLength(4).maxLength(5))) - - .build(); <4> ----- -<1> Obtain a schema builder to configure the schema with a fluent API. -<2> Configure required properties. -<3> Configure the String-typed `firstname` field, allowing only `luke` and `han` values. Properties can be typed or untyped. Use a static import of `JsonSchemaProperty` to make the syntax slightly more compact and to get entry points such as `string(…)`. -<4> Build the schema object. Use the schema to create either a collection or <>. -==== - -There are already some predefined and strongly typed schema objects (`JsonSchemaObject` and `JsonSchemaProperty`) available -through static methods on the gateway interfaces. -However, you may need to build custom property validation rules, which can be created through the builder API, as the following example shows: - -[source,java] ----- -// "birthdate" : { "bsonType": "date" } -JsonSchemaProperty.named("birthdate").ofType(Type.dateType()); - -// "birthdate" : { "bsonType": "date", "description", "Must be a date" } -JsonSchemaProperty.named("birthdate").with(JsonSchemaObject.of(Type.dateType()).description("Must be a date")); ----- - -`CollectionOptions` provides the entry point to schema support for collections, as the following example shows: - -.Create collection with `$jsonSchema` -==== -[source,java] ----- -MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); - -template.createCollection(Person.class, CollectionOptions.empty().schema(schema)); ----- -==== - -You can use a schema to query any collection for documents that match a given structure defined by a JSON schema, as the following example shows: - -.Query for Documents matching a `$jsonSchema` -==== -[source,java] ----- -MongoJsonSchema schema = MongoJsonSchema.builder().required("firstname", "lastname").build(); - -template.find(query(matchingDocumentStructure(schema)), Person.class); ----- -==== - -The following table shows the supported JSON schema types: - -[cols="3,1,6", options="header"] -.Supported JSON schema types -|=== -| Schema Type -| Java Type -| Schema Properties - -| `untyped` -| - -| `description`, generated `description`, `enum`, `allOf`, `anyOf`, `oneOf`, `not` - -| `object` -| `Object` -| `required`, `additionalProperties`, `properties`, `minProperties`, `maxProperties`, `patternProperties` - -| `array` -| any array except `byte[]` -| `uniqueItems`, `additionalItems`, `items`, `minItems`, `maxItems` - -| `string` -| `String` -| `minLength`, `maxLentgth`, `pattern` - -| `int` -| `int`, `Integer` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `long` -| `long`, `Long` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `double` -| `float`, `Float`, `double`, `Double` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `decimal` -| `BigDecimal` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `number` -| `Number` -| `multipleOf`, `minimum`, `exclusiveMinimum`, `maximum`, `exclusiveMaximum` - -| `binData` -| `byte[]` -| (none) - -| `boolean` -| `boolean`, `Boolean` -| (none) - -| `null` -| `null` -| (none) - -| `objectId` -| `ObjectId` -| (none) - -| `date` -| `java.util.Date` -| (none) - -| `timestamp` -| `BsonTimestamp` -| (none) - -| `regex` -| `java.util.regex.Pattern` -| (none) - -|=== - -NOTE: `untyped` is a generic type that is inherited by all typed schema types. It provides all `untyped` schema properties to typed schema types. - -For more information, see https://docs.mongodb.com/manual/reference/operator/query/jsonSchema/#op._S_jsonSchema[$jsonSchema]. +include::./mongo-json-schema.adoc[leveloffset=+1] [[mongo.query.fluent-template-api]] === Fluent Template API @@ -2930,105 +2753,7 @@ TypedAggregation agg = Aggregation.newAggregation(Book.class, <4> Otherwise, add the field value of `author.middle`. ==== -[[mongo.custom-converters]] -== Overriding Default Mapping with Custom Converters - -To have more fine-grained control over the mapping process, you can register Spring converters with the `MongoConverter` implementations, such as the `MappingMongoConverter`. - -The `MappingMongoConverter` checks to see if any Spring converters can handle a specific class before attempting to map the object itself. To 'hijack' the normal mapping strategies of the `MappingMongoConverter`, perhaps for increased performance or other custom mapping needs, you first need to create an implementation of the Spring `Converter` interface and then register it with the `MappingConverter`. - -NOTE: For more information on the Spring type conversion service, see the reference docs https://docs.spring.io/spring/docs/{springVersion}/spring-framework-reference/core.html#validation[here]. - -[[mongo.custom-converters.writer]] -=== Saving by Using a Registered Spring Converter - -The following example shows an implementation of the `Converter` that converts from a `Person` object to a `org.bson.Document`: - -[source,java] ----- -import org.springframework.core.convert.converter.Converter; - -import org.bson.Document; - -public class PersonWriteConverter implements Converter { - - public Document convert(Person source) { - Document document = new Document(); - document.put("_id", source.getId()); - document.put("name", source.getFirstName()); - document.put("age", source.getAge()); - return document; - } -} ----- - -[[mongo.custom-converters.reader]] -=== Reading by Using a Spring Converter - -The following example shows an implementation of a `Converter` that converts from a `Document` to a `Person` object: - -[source,java] ----- -public class PersonReadConverter implements Converter { - - public Person convert(Document source) { - Person p = new Person((ObjectId) source.get("_id"), (String) source.get("name")); - p.setAge((Integer) source.get("age")); - return p; - } -} ----- - -[[mongo.custom-converters.xml]] -=== Registering Spring Converters with the `MongoConverter` - -The Mongo Spring namespace provides a convenient way to register Spring `Converter` instances with the `MappingMongoConverter`. The following configuration snippet shows how to manually register converter beans as well as configure the wrapping `MappingMongoConverter` into a `MongoTemplate`: - -[source,xml] ----- - - - - - - - - - - - - - - - - - ----- - -You can also use the `base-package` attribute of the `custom-converters` element to enable classpath scanning for all `Converter` and `GenericConverter` implementations below the given package, as the following example shows: - -[source,xml] ----- - - - ----- - -[[mongo.converter-disambiguation]] -=== Converter Disambiguation - -Generally, we inspect the `Converter` implementations for the source and target types they convert from and to. Depending on whether one of those is a type MongoDB can handle natively, we register the converter instance as a reading or a writing converter. The following examples show a writer converter and a read converter (note the difference is in the order of the qualifiers on `Converter`): - -[source,java] ----- -// Write converter as only the target type is one Mongo can handle natively -class MyConverter implements Converter { … } - -// Read converter as only the source type is one Mongo can handle natively -class MyConverter implements Converter { … } ----- - -If you write a `Converter` whose source and target type are native Mongo types, we cannot determine whether we should consider it as a reading or a writing converter. Registering the converter instance as both might lead to unwanted results. For example, a `Converter` is ambiguous, although it probably does not make sense to try to convert all `String` instances into `Long` instances when writing. To let you force the infrastructure to register a converter for only one way, we provide `@ReadingConverter` and `@WritingConverter` annotations to be used in the converter implementation. +include::mongo-custom-conversions.adoc[leveloffset=+1] [[mongo-template.index-and-collections]] == Index and Collection Management