From cdd4193199ebe76b2e57158ef900b3c72660dc27 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 28 Jan 2019 16:15:45 +0100 Subject: [PATCH 1/3] Prepare issue branch --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-cross-store/pom.xml | 4 ++-- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 657607148e..17e979e7af 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-REACTIVE-SKIP-TAKE-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index c2ff37b35c..5aa53d1052 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-REACTIVE-SKIP-TAKE-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml index fd36f227c0..22650f6fb1 100644 --- a/spring-data-mongodb-cross-store/pom.xml +++ b/spring-data-mongodb-cross-store/pom.xml @@ -6,7 +6,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-REACTIVE-SKIP-TAKE-SNAPSHOT ../pom.xml @@ -50,7 +50,7 @@ org.springframework.data spring-data-mongodb - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-REACTIVE-SKIP-TAKE-SNAPSHOT diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index fc8d28a2b6..bbb8f15e89 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-REACTIVE-SKIP-TAKE-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index f3c85a046a..0310d12a51 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 2.2.0.BUILD-SNAPSHOT + 2.2.0.DATAMONGO-REACTIVE-SKIP-TAKE-SNAPSHOT ../pom.xml From 5eafa86db20afc7d8c0a49770a990d4e6f75b7a3 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 28 Jan 2019 16:17:11 +0100 Subject: [PATCH 2/3] Hacking: Apply skip & limitRequest to source (eg. a db driver) --- .../mongodb/core/ReactiveMongoTemplate.java | 159 +++++++++++- .../data/mongodb/core/Fluxperiment.java | 240 ++++++++++++++++++ .../core/ReactiveMongoTemplateTests.java | 16 ++ 3 files changed, 406 insertions(+), 9 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Fluxperiment.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 73b9ea6b55..ac036d68ec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -20,11 +20,14 @@ import lombok.AccessLevel; import lombok.NonNull; import lombok.RequiredArgsConstructor; +import reactor.core.CoreSubscriber; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import reactor.util.context.Context; import reactor.util.function.Tuple2; import reactor.util.function.Tuples; +import java.lang.reflect.Field; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; @@ -69,7 +72,16 @@ import org.springframework.data.mongodb.core.aggregation.PrefixingDelegatingAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; -import org.springframework.data.mongodb.core.convert.*; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.JsonSchemaMapper; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; import org.springframework.data.mongodb.core.index.ReactiveMongoPersistentEntityIndexCreator; @@ -100,6 +112,7 @@ import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; +import org.springframework.util.ReflectionUtils; import org.springframework.util.ResourceUtils; import org.springframework.util.StringUtils; @@ -111,11 +124,29 @@ import com.mongodb.MongoException; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; -import com.mongodb.client.model.*; +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.ReturnDocument; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.ValidationOptions; import com.mongodb.client.model.changestream.FullDocument; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; -import com.mongodb.reactivestreams.client.*; +import com.mongodb.reactivestreams.client.AggregatePublisher; +import com.mongodb.reactivestreams.client.ChangeStreamPublisher; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.DistinctPublisher; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MapReducePublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; +import com.mongodb.reactivestreams.client.Success; /** * Primary implementation of {@link ReactiveMongoOperations}. It simplifies the use of Reactive MongoDB usage and helps @@ -579,9 +610,98 @@ public Flux createFlux(String collectionName, ReactiveCollectionCallback< Mono> collectionPublisher = Mono .fromCallable(() -> getAndPrepareCollection(doGetDatabase(), collectionName)); - return collectionPublisher.flatMapMany(callback::doInCollection).onErrorMap(translateException()); + Flux source = collectionPublisher.flatMapMany(callback::doInCollection).onErrorMap(translateException()); + + + return new Flux() { + + @Override + public void subscribe(CoreSubscriber actual) { + + Long skip = extractSkip(actual); + Long take = extractLimit(actual); + + System.out.println(String.format("Setting offset %s and limit: %s", skip, take)); + + Context context = Context.empty(); + + // and here we use the original Flux and evaluate skip / take in the template + if (skip != null && skip > 0L) { + context = context.put("skip", skip); + } + if (take != null && take > 0L) { + context = context.put("take", take); + } + + + source.subscriberContext(context).subscribe(actual); + } + }; + } + + // --> HACKING + + @Nullable + static Long extractSkip(Subscriber subscriber) { + + if (subscriber == null || !ClassUtils.getShortName(subscriber.getClass()).endsWith("SkipSubscriber")) { + return null; + } + + java.lang.reflect.Field field = ReflectionUtils.findField(subscriber.getClass(), "remaining"); + if (field == null) { + return null; + } + + ReflectionUtils.makeAccessible(field); + Long skip = (Long) ReflectionUtils.getField(field, subscriber); + if (skip != null && skip > 0L) { + + // reset the field, otherwise we'd skip stuff in the code. + ReflectionUtils.setField(field, subscriber, 0L); + } + + return skip; + } + + @Nullable + static Long extractLimit(Subscriber subscriber) { + + if (subscriber == null) { + return null; + } + + if (!ClassUtils.getShortName(subscriber.getClass()).endsWith("TakeSubscriber")) { + return extractLimit(extractPotentialTakeSubscriber(subscriber)); + } + + java.lang.reflect.Field field = ReflectionUtils.findField(subscriber.getClass(), "n"); + if (field == null) { + return null; + } + + ReflectionUtils.makeAccessible(field); + return (Long) ReflectionUtils.getField(field, subscriber); + } + + @Nullable + static Subscriber extractPotentialTakeSubscriber(Subscriber subscriber) { + + if (!ClassUtils.getShortName(subscriber.getClass()).endsWith("SkipSubscriber")) { + return null; + } + + Field field = ReflectionUtils.findField(subscriber.getClass(), "actual"); + if (field == null) { + return null; + } + + ReflectionUtils.makeAccessible(field); + return (Subscriber) ReflectionUtils.getField(field, subscriber); } + // <--- HACKING + /** * Create a reusable {@link Mono} for the {@code collectionName} and {@link ReactiveCollectionCallback}. * @@ -2533,12 +2653,33 @@ private Flux executeFindMultiInternal(ReactiveCollectionQueryCallback { - FindPublisher findPublisher = collectionCallback.doInCollection(collection); + return Mono.subscriberContext().flatMapMany(context -> { + + FindPublisher findPublisher = collectionCallback.doInCollection(collection); + + if (preparer != null) { + findPublisher = preparer.prepare(findPublisher); + } + + Long skip = context.getOrDefault("skip", null); + Long take = context.getOrDefault("take", null); + + System.out.println(String.format("Using offset: %s and limit: %s", skip, take)); + + if(skip != null && skip > 0L) { + findPublisher = findPublisher.skip(skip.intValue()); + } + + if(take != null && take > 0L) { + findPublisher = findPublisher.limit(take.intValue()); + } + + return Flux.from(findPublisher).doOnNext(System.out::println).map(objectCallback::doWith); + + }); + + - if (preparer != null) { - findPublisher = preparer.prepare(findPublisher); - } - return Flux.from(findPublisher).map(objectCallback::doWith); }); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Fluxperiment.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Fluxperiment.java new file mode 100644 index 0000000000..8544b81112 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/Fluxperiment.java @@ -0,0 +1,240 @@ +/* + * Copyright 2019 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import reactor.core.CoreSubscriber; +import reactor.core.publisher.Flux; +import reactor.test.StepVerifier; +import reactor.util.context.Context; + +import java.lang.reflect.Field; +import java.util.stream.Stream; + +import org.junit.Test; +import org.reactivestreams.Subscriber; +import org.springframework.lang.Nullable; +import org.springframework.util.ClassUtils; +import org.springframework.util.ReflectionUtils; + +/** + * @author Christoph Strobl + */ +public class Fluxperiment { + + @Test + public void applySkipFromFlux() { + + hackedFlux().skip(3) // + .as(StepVerifier::create) // + .expectAccessibleContext().assertThat(ctx -> { + + assertThat(ctx.getOrEmpty("skip")).contains(3L); + assertThat(ctx.getOrEmpty("take")).isEmpty(); + }).then() // + .expectNext("4") // + .expectNext("5") // + .verifyComplete(); + } + + @Test + public void applyTakeFromFlux() { + + hackedFlux().limitRequest(3) // + .as(StepVerifier::create) // + .expectAccessibleContext().assertThat(ctx -> { + + assertThat(ctx.getOrEmpty("skip")).isEmpty(); + assertThat(ctx.getOrEmpty("take")).contains(3L); + }).then() // + .expectNext("1") // + .expectNext("2") // + .expectNext("3") // + .verifyComplete(); + } + + @Test + public void applySkipAndLimitFromFlux/* in that order */() { + + hackedFlux().skip(1) /* in DB */.limitRequest(2) /* in DB */ // + .as(StepVerifier::create) // + .expectAccessibleContext().assertThat(ctx -> { + + assertThat(ctx.getOrEmpty("skip")).contains(1L); + assertThat(ctx.getOrEmpty("take")).contains(2L); + }).then() // + .expectNext("2") // + .expectNext("3") // + .verifyComplete(); + } + + @Test + public void applyTakeButNotSkipFromFlux/* cause order matters */() { + + hackedFlux().limitRequest(3)/* in DB */.skip(1) /* in memory */ // + .as(StepVerifier::create) // + .expectAccessibleContext().assertThat(ctx -> { + + assertThat(ctx.getOrEmpty("skip")).isEmpty(); + assertThat(ctx.getOrEmpty("take")).contains(3L); + }).then() // + .expectNext("2") // + .expectNext("3") // + .verifyComplete(); + } + + @Test + public void justApplySkipButNotTakeIfTheyDoNotFollowOneAnother() { + + hackedFlux().skip(1)/* in DB */.map(v -> v).limitRequest(2) /* in memory */ // + .as(StepVerifier::create) // + .expectAccessibleContext().assertThat(ctx -> { + + assertThat(ctx.getOrEmpty("skip")).contains(1L); + assertThat(ctx.getOrEmpty("take")).isEmpty(); + }).then() // + .expectNext("2") // + .expectNext("3") // + .verifyComplete(); + } + + @Test + public void applyNeitherSkipNorTakeIfPrecededWithOtherOperator() { + + hackedFlux().map(v -> v).skip(1).limitRequest(2) // + .as(StepVerifier::create) // + .expectAccessibleContext().assertThat(ctx -> { + + assertThat(ctx.getOrEmpty("skip")).isEmpty(); + assertThat(ctx.getOrEmpty("take")).isEmpty(); + }).then() // + .expectNext("2") // + .expectNext("3") // + .verifyComplete(); + } + + @Test + public void applyOnlyFirstSkip/* toDatabase */() { + + hackedFlux().skip(3)/* in DB */.skip(1)/* in memory */ // + .as(StepVerifier::create) // + .expectAccessibleContext().assertThat(ctx -> { + + assertThat(ctx.getOrEmpty("skip")).contains(3L); + assertThat(ctx.getOrEmpty("take")).isEmpty(); + }).then() // + .expectNext("5") // + .verifyComplete(); + } + + Flux hackedFlux() { + + return new Flux() { + + @Override + public void subscribe(CoreSubscriber actual) { + + Long skip = extractSkip(actual); + Long take = extractLimit(actual); + + System.out.println(String.format("Using offset: %s and limit: %s", skip, take)); + + // and here we use the original Flux and evaluate skip / take in the template + Stream source = Stream.of("1", "2", "3", "4", "5"); + Context context = Context.empty(); + + // and here we use the original Flux and evaluate skip / take in the template + if (skip != null && skip > 0L) { + context = context.put("skip", skip); + source = source.skip(skip); + } + if (take != null && take > 0L) { + + context = context.put("take", take); + source = source.limit(take); + } + + Flux.fromStream(source).subscriberContext(context).subscribe(actual); + + } + }; + } + + @Nullable + static Long extractSkip(Subscriber subscriber) { + + if (subscriber == null || !ClassUtils.getShortName(subscriber.getClass()).endsWith("SkipSubscriber")) { + return null; + } + + java.lang.reflect.Field field = ReflectionUtils.findField(subscriber.getClass(), "remaining"); + if (field == null) { + return null; + } + + ReflectionUtils.makeAccessible(field); + Long skip = (Long) ReflectionUtils.getField(field, subscriber); + if (skip != null && skip > 0L) { + + // reset the field, otherwise we'd skip stuff in the code. + ReflectionUtils.setField(field, subscriber, 0L); + } + + return skip; + } + + @Nullable + static Long extractLimit(Subscriber subscriber) { + + if (subscriber == null) { + return null; + } + + if (!ClassUtils.getShortName(subscriber.getClass()).endsWith("TakeSubscriber") + && !ClassUtils.getShortName(subscriber.getClass()).endsWith("FluxLimitRequestSubscriber")) { + return extractLimit(extractPotentialTakeSubscriber(subscriber)); + } + + java.lang.reflect.Field field = ReflectionUtils.findField(subscriber.getClass(), "n"); // from TakeSubscriber + if (field == null) { + + field = ReflectionUtils.findField(subscriber.getClass(), "toProduce"); // from FluxLimitRequestSubscriber + if (field == null) { + return null; + } + } + + ReflectionUtils.makeAccessible(field); + return (Long) ReflectionUtils.getField(field, subscriber); + } + + @Nullable + static Subscriber extractPotentialTakeSubscriber(Subscriber subscriber) { + + if (!ClassUtils.getShortName(subscriber.getClass()).endsWith("SkipSubscriber")) { + return null; + } + + Field field = ReflectionUtils.findField(subscriber.getClass(), "actual"); + if (field == null) { + return null; + } + + ReflectionUtils.makeAccessible(field); + return (Subscriber) ReflectionUtils.getField(field, subscriber); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java index e1856704c7..b8855de418 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java @@ -1469,6 +1469,22 @@ public void resumesAtBsonTimestampCorrectly() throws InterruptedException { .verify(); } + @Test + public void fluxperiment() { + + List people = Arrays.asList(new Person("Dick", 22), new Person("Harry", 23), new Person("Tom", 21)); + + StepVerifier.create(template.insertAll(people)).expectNextCount(3).verifyComplete(); + + template.find(new Query().skip(2).limit(1), Person.class); + + template.findAll(Person.class).skip(2).take(1) // + .as(StepVerifier::create) // + .consumeNextWith(System.out::println) // + .verifyComplete(); + + } + private PersonWithAList createPersonWithAList(String firstname, int age) { PersonWithAList p = new PersonWithAList(); From 689cc7fec394cfb382d90ed579d2b76c7f809aa2 Mon Sep 17 00:00:00 2001 From: Spring Operator Date: Sat, 16 Mar 2019 11:27:20 -0500 Subject: [PATCH 3/3] URL Cleanup This commit updates URLs to prefer the https protocol. Redirects are not followed to avoid accidentally expanding intentionally shortened URLs (i.e. if using a URL shortener). # Fixed URLs ## Fixed Success These URLs were switched to an https URL with a 2xx status. While the status was successful, your review is still recommended. * http://maven.apache.org/xsd/maven-4.0.0.xsd with 3 occurrences migrated to: https://maven.apache.org/xsd/maven-4.0.0.xsd ([https](https://maven.apache.org/xsd/maven-4.0.0.xsd) result 200). * http://www.gopivotal.com (302) with 6 occurrences migrated to: https://pivotal.io ([https](https://www.gopivotal.com) result 200). * http://maven.apache.org/maven-v4_0_0.xsd with 2 occurrences migrated to: https://maven.apache.org/maven-v4_0_0.xsd ([https](https://maven.apache.org/maven-v4_0_0.xsd) result 301). * http://projects.spring.io/spring-data-mongodb with 1 occurrences migrated to: https://projects.spring.io/spring-data-mongodb ([https](https://projects.spring.io/spring-data-mongodb) result 301). * http://www.pivotal.io with 1 occurrences migrated to: https://www.pivotal.io ([https](https://www.pivotal.io) result 301). # Ignored These URLs were intentionally ignored. * http://maven.apache.org/POM/4.0.0 with 10 occurrences * http://www.w3.org/2001/XMLSchema-instance with 5 occurrences --- pom.xml | 18 +++++++++--------- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-cross-store/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/pom.xml b/pom.xml index 17e979e7af..05c881cf3b 100644 --- a/pom.xml +++ b/pom.xml @@ -1,5 +1,5 @@ - + 4.0.0 @@ -10,7 +10,7 @@ Spring Data MongoDB MongoDB support for Spring Data - http://projects.spring.io/spring-data-mongodb + https://projects.spring.io/spring-data-mongodb org.springframework.data.build @@ -39,7 +39,7 @@ Oliver Gierke ogierke at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Project Lead @@ -50,7 +50,7 @@ Thomas Risberg trisberg at vmware.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -61,7 +61,7 @@ Mark Pollack mpollack at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -72,7 +72,7 @@ Jon Brisbin jbrisbin at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -83,7 +83,7 @@ Thomas Darimont tdarimont at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -94,7 +94,7 @@ Christoph Strobl cstrobl at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -105,7 +105,7 @@ Mark Paluch mpaluch at pivotal.io Pivotal - http://www.pivotal.io + https://www.pivotal.io Developer diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 5aa53d1052..d914ce4935 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -1,6 +1,6 @@ + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/maven-v4_0_0.xsd"> 4.0.0 diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml index 22650f6fb1..0fb1165515 100644 --- a/spring-data-mongodb-cross-store/pom.xml +++ b/spring-data-mongodb-cross-store/pom.xml @@ -1,5 +1,5 @@ - + 4.0.0 diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index bbb8f15e89..d61030975c 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -1,6 +1,6 @@ + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 0310d12a51..34b6f4d591 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -1,5 +1,5 @@ - + 4.0.0