Skip to content

DATAMONGO-2620 - Search by alike() criteria is broken when type alias… #884

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
* @author Thomas Darimont
* @author Christoph Strobl
* @author Mark Paluch
* @author Roman Puchkovskiy
*/
public class DefaultMongoTypeMapper extends DefaultTypeMapper<Bson> implements MongoTypeMapper {

Expand Down Expand Up @@ -154,7 +155,9 @@ public void writeTypeRestrictions(Document result, @Nullable Set<Class<?>> restr
}
}

accessor.writeTypeTo(result, new Document("$in", restrictedMappedTypes));
if (!restrictedMappedTypes.isEmpty()) {
accessor.writeTypeTo(result, new Document("$in", restrictedMappedTypes));
}
}

/*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
*/
package org.springframework.data.mongodb.core;

import static java.util.Collections.*;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import static org.springframework.data.mongodb.core.query.Query.*;
Expand All @@ -39,6 +40,7 @@
import java.util.stream.IntStream;

import org.bson.types.ObjectId;
import org.jetbrains.annotations.NotNull;
import org.joda.time.DateTime;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
Expand All @@ -57,19 +59,27 @@
import org.springframework.data.annotation.PersistenceConstructor;
import org.springframework.data.annotation.Version;
import org.springframework.data.auditing.IsNewAwareAuditingHandler;
import org.springframework.data.convert.MappingContextTypeInformationMapper;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mapping.MappingException;
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.core.convert.DbRefResolver;
import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
import org.springframework.data.mongodb.core.convert.LazyLoadingProxy;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
import org.springframework.data.mongodb.core.index.Index;
import org.springframework.data.mongodb.core.index.IndexField;
import org.springframework.data.mongodb.core.index.IndexInfo;
import org.springframework.data.mongodb.core.mapping.Field;
import org.springframework.data.mongodb.core.mapping.MongoId;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener;
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
Expand Down Expand Up @@ -117,6 +127,7 @@
* @author Mark Paluch
* @author Laszlo Csontos
* @author duozhilin
* @author Roman Puchkovskiy
*/
@ExtendWith(MongoClientExtension.class)
public class MongoTemplateTests {
Expand Down Expand Up @@ -2185,7 +2196,7 @@ public void findAndModifyShouldRetainTypeInformationWithinUpdatedTypeOnDocumentW
entry.put("key2", new ModelA("value2"));

Query query = query(where("id").is(doc.id));
Update update = Update.update("models", Collections.singletonList(entry));
Update update = Update.update("models", singletonList(entry));

assertThat(template.findOne(query, DocumentWithNestedCollection.class)).isNotNull();

Expand Down Expand Up @@ -3474,7 +3485,7 @@ public void onBeforeSave(BeforeSaveEvent<Document> event) {

Document document = new Document();

template.insertAll(Collections.singletonList(document));
template.insertAll(singletonList(document));

assertThat(document.id).isNotNull();
}
Expand All @@ -3486,7 +3497,7 @@ public void afterSaveEventContainsSavedObjectUsingInsertAll() {
AtomicReference<ImmutableVersioned> saved = createAfterSaveReference();
ImmutableVersioned source = new ImmutableVersioned();

template.insertAll(Collections.singletonList(source));
template.insertAll(singletonList(source));

assertThat(saved.get()).isNotNull();
assertThat(saved.get()).isNotSameAs(source);
Expand All @@ -3511,7 +3522,7 @@ public void afterSaveEventContainsSavedObjectUsingInsert() {
@Test // DATAMONGO-1509
public void findsByGenericNestedListElements() {

List<Model> modelList = Collections.singletonList(new ModelA("value"));
List<Model> modelList = singletonList(new ModelA("value"));
DocumentWithCollection dwc = new DocumentWithCollection(modelList);

template.insert(dwc);
Expand Down Expand Up @@ -3683,6 +3694,40 @@ public void sortOnIdFieldWithExplicitTypeShouldWork() {
assertThat(template.find(new BasicQuery("{}").with(Sort.by("id")), WithIdAndFieldAnnotation.class)).isNotEmpty();
}

@Test // DATAMONGO-2620
void alikeQueryShouldFindProperlyWhenNoClassAttributeIsSaved() {
MongoTemplate templateSavingNoTypeInfoInDb = createTemplateSavingNoTypeInfo();

Person john = new Person();
john.setFirstName("John");

Person savedPerson = templateSavingNoTypeInfoInDb.save(john);

Criteria alikeCriteria = new Criteria().alike(Example.of(savedPerson));
List<Person> foundPeople = templateSavingNoTypeInfoInDb.find(new Query(alikeCriteria), Person.class);

assertThat(foundPeople).hasSize(1);
assertThat(foundPeople.get(0).getId()).isEqualTo(savedPerson.getId());
}

@NotNull
private MongoTemplate createTemplateSavingNoTypeInfo() {
MongoMappingContext mappingContext = new MongoMappingContext();
mappingContext.setInitialEntitySet(singleton(Person.class));
mappingContext.setAutoIndexCreation(false);
mappingContext.afterPropertiesSet();

MappingContextTypeInformationMapper typeInformationMapper = new MappingContextTypeInformationMapper(
mappingContext);
DefaultMongoTypeMapper typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY,
singletonList(typeInformationMapper));

MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
converter.setTypeMapper(typeMapper);

return new MongoTemplate(new SimpleMongoClientDatabaseFactory(client, DB_NAME), converter);
}

private AtomicReference<ImmutableVersioned> createAfterSaveReference() {

AtomicReference<ImmutableVersioned> saved = new AtomicReference<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
*/
package org.springframework.data.mongodb.core;

import static java.util.Collections.*;
import static java.util.stream.Collectors.*;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
import static org.springframework.data.mongodb.core.query.Criteria.*;
Expand All @@ -24,6 +26,13 @@
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Wither;
import org.jetbrains.annotations.NotNull;
import org.springframework.data.convert.MappingContextTypeInformationMapper;
import org.springframework.data.domain.Example;
import org.springframework.data.mongodb.core.convert.DefaultMongoTypeMapper;
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import reactor.core.Disposable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
Expand All @@ -40,7 +49,6 @@
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

import org.bson.BsonDocument;
Expand Down Expand Up @@ -93,6 +101,7 @@
*
* @author Mark Paluch
* @author Christoph Strobl
* @author Roman Puchkovskiy
*/
@ExtendWith({ MongoClientExtension.class, MongoServerCondition.class })
public class ReactiveMongoTemplateTests {
Expand Down Expand Up @@ -1393,7 +1402,7 @@ void changeStreamEventsShouldBeEmittedCorrectly() throws InterruptedException {
Thread.sleep(500); // just give it some time to link receive all events

try {
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList())).hasSize(3)
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(toList())).hasSize(3)
.allMatch(val -> val instanceof Document);
} finally {
disposable.dispose();
Expand Down Expand Up @@ -1425,7 +1434,7 @@ void changeStreamEventsShouldBeConvertedCorrectly() throws InterruptedException
Thread.sleep(500); // just give it some time to link receive all events

try {
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList()))
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(toList()))
.containsExactly(person1, person2, person3);
} finally {
disposable.dispose();
Expand Down Expand Up @@ -1458,7 +1467,7 @@ void changeStreamEventsShouldBeFilteredCorrectly() throws InterruptedException {
Thread.sleep(500); // just give it some time to link receive all events

try {
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList()))
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(toList()))
.containsExactly(person1, person3);
} finally {
disposable.dispose();
Expand Down Expand Up @@ -1501,7 +1510,7 @@ void mapsReservedWordsCorrectly() throws InterruptedException {
Thread.sleep(500); // just give it some time to link receive all events

try {
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList()))
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(toList()))
.containsExactly(replacement);
} finally {
disposable.dispose();
Expand Down Expand Up @@ -1543,7 +1552,7 @@ void changeStreamEventsShouldBeResumedCorrectly() throws InterruptedException {
Thread.sleep(500); // just give it some time to link receive all events

try {
assertThat(resumeDocuments.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList()))
assertThat(resumeDocuments.stream().map(ChangeStreamEvent::getBody).collect(toList()))
.containsExactly(person2, person3);
} finally {
disposable.dispose();
Expand All @@ -1556,7 +1565,7 @@ void removeShouldConsiderLimit() {

List<Sample> samples = IntStream.range(0, 100) //
.mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) //
.collect(Collectors.toList());
.collect(toList());

template.insertAll(samples) //
.as(StepVerifier::create) //
Expand All @@ -1573,7 +1582,7 @@ void removeShouldConsiderSkipAndSort() {

List<Sample> samples = IntStream.range(0, 100) //
.mapToObj(i -> new Sample("id-" + i, i % 2 == 0 ? "stark" : "lannister")) //
.collect(Collectors.toList());
.collect(toList());

template.insertAll(samples).as(StepVerifier::create).expectNextCount(100).verifyComplete();

Expand Down Expand Up @@ -1653,7 +1662,7 @@ void watchesDatabaseCorrectly() throws InterruptedException {
Thread.sleep(500); // just give it some time to link receive all events

try {
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList()))
assertThat(documents.stream().map(ChangeStreamEvent::getBody).collect(toList()))
.containsExactly(person1, person2, person3);
} finally {
disposable.dispose();
Expand Down Expand Up @@ -1704,7 +1713,7 @@ void resumesAtTimestampCorrectly() throws InterruptedException {
Thread.sleep(500); // just give it some time to link receive all events

try {
assertThat(resumeDocuments.stream().map(ChangeStreamEvent::getBody).collect(Collectors.toList()))
assertThat(resumeDocuments.stream().map(ChangeStreamEvent::getBody).collect(toList()))
.containsExactly(person2, person3);
} finally {
disposable.dispose();
Expand Down Expand Up @@ -1777,6 +1786,41 @@ public void onAfterSave(AfterSaveEvent<ImmutableVersioned> event) {
return saved;
}

@Test // DATAMONGO-2620
void alikeQueryShouldFindProperlyWhenNoClassAttributeIsSaved() {
ReactiveMongoTemplate templateSavingNoTypeInfoInDb = createTemplateSavingNoTypeInfo();

Person john = new Person();
john.setFirstName("John");

Person savedPerson = templateSavingNoTypeInfoInDb.save(john).block();

Criteria alikeCriteria = new Criteria().alike(Example.of(savedPerson));
List<Person> foundPeople = templateSavingNoTypeInfoInDb.find(new Query(alikeCriteria), Person.class)
.toStream().collect(toList());

assertThat(foundPeople).hasSize(1);
assertThat(foundPeople.get(0).getId()).isEqualTo(savedPerson.getId());
}

@NotNull
private ReactiveMongoTemplate createTemplateSavingNoTypeInfo() {
MongoMappingContext mappingContext = new MongoMappingContext();
mappingContext.setInitialEntitySet(singleton(Person.class));
mappingContext.setAutoIndexCreation(false);
mappingContext.afterPropertiesSet();

MappingContextTypeInformationMapper typeInformationMapper = new MappingContextTypeInformationMapper(
mappingContext);
DefaultMongoTypeMapper typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY,
singletonList(typeInformationMapper));

MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
converter.setTypeMapper(typeMapper);

return new ReactiveMongoTemplate(new SimpleReactiveMongoDatabaseFactory(client, DB_NAME), converter);
}

@AllArgsConstructor
@Wither
static class ImmutableVersioned {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
*/
package org.springframework.data.mongodb.core.convert;

import static java.util.Collections.*;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.Mockito.*;

import java.util.Arrays;
import java.util.Collections;
Expand All @@ -25,15 +27,19 @@
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

import org.mockito.Mockito;
import org.springframework.data.convert.ConfigurableTypeInformationMapper;
import org.springframework.data.convert.SimpleTypeInformationMapper;
import org.springframework.data.convert.TypeInformationMapper;
import org.springframework.data.mapping.Alias;
import org.springframework.data.mongodb.core.DocumentTestUtils;
import org.springframework.data.util.TypeInformation;

/**
* Unit tests for {@link DefaultMongoTypeMapper}.
*
* @author Oliver Gierke
* @author Roman Puchkovskiy
*/
public class DefaultMongoTypeMapperUnitTests {

Expand Down Expand Up @@ -184,6 +190,19 @@ public void returnsCorrectTypeKey() {
assertThat(typeMapper.isTypeKey(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY)).isFalse();
}

@Test // DATAMONGO-2620
void givenTypeAliasesAreNotAvailable_whenWritingTypeRestrictions_thenDoesNotAddInConditionWithEmptySet() {
TypeInformationMapper informationMapperMaintainingNoAliasInfo = mock(TypeInformationMapper.class);
when(informationMapperMaintainingNoAliasInfo.createAliasFor(any())).thenReturn(Alias.NONE);
typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY,
singletonList(informationMapperMaintainingNoAliasInfo));

Document document = new Document();
typeMapper.writeTypeRestrictions(document, singleton(Integer.class));

assertThat(document).isEmpty();
}

private void readsTypeFromField(Document document, Class<?> type) {

TypeInformation<?> typeInfo = typeMapper.readType(document);
Expand Down