Skip to content

DATAREDIS-492 - Handle serializing arrays and collections #189

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

<groupId>org.springframework.data</groupId>
<artifactId>spring-data-redis</artifactId>
<version>1.8.0.BUILD-SNAPSHOT</version>
<version>1.8.0.DATAREDIS-492-SNAPSHOT</version>

<name>Spring Data Redis</name>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
package org.springframework.data.redis.core.convert;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
Expand Down Expand Up @@ -99,6 +100,7 @@
* </pre>
*
* @author Christoph Strobl
* @author Greg Turnquist
* @since 1.7
*/
public class MappingRedisConverter implements RedisConverter, InitializingBean {
Expand Down Expand Up @@ -354,8 +356,8 @@ public void write(Object source, final RedisData sink) {
sink.setTimeToLive(ttl);
}

for (IndexedData indexeData : indexResolver.resolveIndexesFor(entity.getTypeInformation(), source)) {
sink.addIndexedData(indexeData);
for (IndexedData indexedData : indexResolver.resolveIndexesFor(entity.getTypeInformation(), source)) {
sink.addIndexedData(indexedData);
}

}
Expand Down Expand Up @@ -408,8 +410,22 @@ public void doWithPersistentProperty(KeyValuePersistentProperty persistentProper
writeMap(keyspace, propertyStringPath, persistentProperty.getMapValueType(),
(Map<?, ?>) accessor.getProperty(persistentProperty), sink);
} else if (persistentProperty.isCollectionLike()) {
writeCollection(keyspace, propertyStringPath, (Collection<?>) accessor.getProperty(persistentProperty),

final Object property = accessor.getProperty(persistentProperty);

if (property == null || Iterable.class.isAssignableFrom(property.getClass())) {

writeCollection(keyspace, propertyStringPath, (Iterable<?>) property,
persistentProperty.getTypeInformation().getComponentType(), sink);
} else if (property.getClass().isArray()) {

writeCollection(keyspace, propertyStringPath, Arrays.asList((Object[]) property),
persistentProperty.getTypeInformation().getComponentType(), sink);
} else {

throw new RuntimeException("Don't know how to handle " + property.getClass() + " type collection");
}

} else if (persistentProperty.isEntity()) {
writeInternal(keyspace, propertyStringPath, accessor.getProperty(persistentProperty),
persistentProperty.getTypeInformation().getActualType(), sink);
Expand Down Expand Up @@ -481,7 +497,7 @@ public void doWithAssociation(Association<KeyValuePersistentProperty> associatio
* @param typeHint
* @param sink
*/
private void writeCollection(String keyspace, String path, Collection<?> values, TypeInformation<?> typeHint,
private void writeCollection(String keyspace, String path, Iterable<?> values, TypeInformation<?> typeHint,
RedisData sink) {

if (values == null) {
Expand All @@ -491,6 +507,10 @@ private void writeCollection(String keyspace, String path, Collection<?> values,
int i = 0;
for (Object value : values) {

if (value == null) {
break;
}

String currentPath = path + ".[" + i + "]";

if (customConversions.hasCustomWriteTarget(value.getClass())) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@
*/
package org.springframework.data.redis.core.convert;

import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
Expand All @@ -43,6 +45,7 @@
* {@link IndexConfiguration}.
*
* @author Christoph Strobl
* @author Greg Turnquist
* @since 1.7
*/
public class PathIndexResolver implements IndexResolver {
Expand Down Expand Up @@ -116,11 +119,23 @@ public void doWithPersistentProperty(KeyValuePersistentProperty persistentProper

} else if (persistentProperty.isCollectionLike()) {

for (Object listValue : (Iterable<?>) propertyValue) {
final Iterable<?> iterable;

TypeInformation<?> typeToUse = updateTypeHintForActualValue(typeHint, listValue);
indexes.addAll(
if (Iterable.class.isAssignableFrom(propertyValue.getClass())) {
iterable = (Iterable) propertyValue;
} else if (propertyValue.getClass().isArray()) {
iterable = Arrays.asList((Object[]) propertyValue);
} else {
throw new RuntimeException("Don't know how to handle " + propertyValue.getClass() + " type of collection");
}

for (Object listValue : iterable) {

if (listValue != null) {
TypeInformation<?> typeToUse = updateTypeHintForActualValue(typeHint, listValue);
indexes.addAll(
doResolveIndexesFor(keyspace, currentPath, typeToUse.getActualType(), persistentProperty, listValue));
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;

Expand Down Expand Up @@ -75,6 +76,7 @@

/**
* @author Christoph Strobl
* @author Greg Turnquist
*/
@RunWith(MockitoJUnitRunner.class)
public class MappingRedisConverterUnitTests {
Expand Down Expand Up @@ -146,7 +148,7 @@ public void writeDoesNotAppendPropertiesWithNullValues() {
* @see DATAREDIS-425
*/
@Test
public void writeDoesNotAppendPropertiesWithEmtpyCollections() {
public void writeDoesNotAppendPropertiesWithEmptyCollections() {

rand.firstname = "rand";

Expand Down Expand Up @@ -435,6 +437,27 @@ public void writeAppendsMapWithSimpleKeyCorrectly() {
.containingUtf8String("physicalAttributes.[eye-color]", "grey"));
}

/**
* @see DATAREDIS-492
*/
@Test
public void writeHandlesArraysProperly() {

this.converter = new MappingRedisConverter(null, null, resolverMock);
this.converter
.setCustomConversions(new CustomConversions(Collections.singletonList(new ListToByteConverter())));
this.converter.afterPropertiesSet();

Map<String, Object> innerMap = new LinkedHashMap<String, Object>();
innerMap.put("address", "tyrionl@netflix.com");
innerMap.put("when", new String[]{"pipeline.failed"});

Map<String, Object> map = new LinkedHashMap<String, Object>();
map.put("email", Collections.singletonList(innerMap));

RedisData target = write(map);
}

/**
* @see DATAREDIS-425
*/
Expand Down Expand Up @@ -1375,6 +1398,35 @@ public Map<String, byte[]> convert(Species source) {
}
}

@WritingConverter
static class ListToByteConverter implements Converter<List, byte[]> {

private final ObjectMapper mapper;
private final Jackson2JsonRedisSerializer<List> serializer;

ListToByteConverter() {

mapper = new ObjectMapper();
mapper.setVisibility(mapper.getSerializationConfig().getDefaultVisibilityChecker()
.withFieldVisibility(Visibility.ANY).withGetterVisibility(Visibility.NONE)
.withSetterVisibility(Visibility.NONE).withCreatorVisibility(Visibility.NONE));

serializer = new Jackson2JsonRedisSerializer<List>(List.class);
serializer.setObjectMapper(mapper);
}

@Override
public byte[] convert(List source) {

if (source == null || source.isEmpty()) {
return null;
}

return serializer.serialize(source);
}
}


@ReadingConverter
static class MapToSpeciesConverter implements Converter<Map<String, byte[]>, Species> {

Expand Down