diff --git a/pom.xml b/pom.xml
index 734a2c4e77..6afb8998f4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,7 +5,7 @@
org.springframework.dataspring-data-redis
- 2.0.0.BUILD-SNAPSHOT
+ 2.0.0.DATAREDIS-659-SNAPSHOTSpring Data Redis
diff --git a/src/main/java/org/springframework/data/redis/connection/ReactiveClusterServerCommands.java b/src/main/java/org/springframework/data/redis/connection/ReactiveClusterServerCommands.java
new file mode 100644
index 0000000000..d44623442c
--- /dev/null
+++ b/src/main/java/org/springframework/data/redis/connection/ReactiveClusterServerCommands.java
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2017 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.redis.connection;
+
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+
+import java.util.Properties;
+
+import org.springframework.data.redis.core.types.RedisClientInfo;
+
+/**
+ * Redis Server commands executed in cluster environment using reactive infrastructure.
+ *
+ * @author Mark Paluch
+ * @author Christoph Strobl
+ * @since 2.0
+ */
+public interface ReactiveClusterServerCommands extends ReactiveServerCommands {
+
+ /**
+ * Start an {@literal Append Only File} rewrite process on the specific server.
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Mono} indicating command completion.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#bgReWriteAof()
+ */
+ Mono bgReWriteAof(RedisClusterNode node);
+
+ /**
+ * Start background saving of db on server.
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Mono} indicating command received by server. Operation success needs to be checked via
+ * {@link #lastSave(RedisClusterNode)}.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#bgSave()
+ */
+ Mono bgSave(RedisClusterNode node);
+
+ /**
+ * Get time unix timestamp of last successful {@link #bgSave()} operation in seconds.
+ *
+ * @param node must not be {@literal null}.
+ * @return @return {@link Mono} wrapping unix timestamp.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#lastSave()
+ */
+ Mono lastSave(RedisClusterNode node);
+
+ /**
+ * Synchronous save current db snapshot on server.
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Mono} indicating command completion.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#save()
+ */
+ Mono save(RedisClusterNode node);
+
+ /**
+ * Get the total number of available keys in currently selected database.
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Mono} wrapping number of keys.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#dbSize()
+ */
+ Mono dbSize(RedisClusterNode node);
+
+ /**
+ * Delete all keys of the currently selected database.
+ *
+ * @param node must not be {@literal null}. {@link Mono} indicating command completion.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#flushDb()
+ */
+ Mono flushDb(RedisClusterNode node);
+
+ /**
+ * Delete all all keys from all databases.
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Mono} indicating command completion.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#flushAll()
+ */
+ Mono flushAll(RedisClusterNode node);
+
+ /**
+ * Load {@literal default} server information like
+ *
+ *
memory
+ *
cpu utilization
+ *
replication
+ *
+ *
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Mono} wrapping server information.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#info()
+ */
+ Mono info(RedisClusterNode node);
+
+ /**
+ * Load server information for given {@code selection}.
+ *
+ * @param node must not be {@literal null}.
+ * @param section must not be {@literal null} nor {@literal empty}.
+ * @return {@link Mono} wrapping server information of given {@code section}.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @throws IllegalArgumentException when section is {@literal null} or {@literal empty}.
+ * @see RedisServerCommands#info(String)
+ */
+ Mono info(RedisClusterNode node, String section);
+
+ /**
+ * Load configuration parameters for given {@code pattern} from server.
+ *
+ * @param node must not be {@literal null}.
+ * @param pattern must not be {@literal null}.
+ * @return {@link Mono} wrapping configuration parameters matching given {@code pattern}.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @throws IllegalArgumentException when {@code pattern} is {@literal null} or {@literal empty}.
+ * @see RedisServerCommands#getConfig(String)
+ */
+ Mono getConfig(RedisClusterNode node, String pattern);
+
+ /**
+ * Set server configuration for {@code param} to {@code value}.
+ *
+ * @param node must not be {@literal null}.
+ * @param param must not be {@literal null} nor {@literal empty}.
+ * @param value must not be {@literal null} nor {@literal empty}.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @throws IllegalArgumentException when {@code pattern} / {@code value} is {@literal null} or {@literal empty}.
+ * @see RedisServerCommands#setConfig(String, String)
+ */
+ Mono setConfig(RedisClusterNode node, String param, String value);
+
+ /**
+ * Reset statistic counters on server.
+ * Counters can be retrieved using {@link #info()}.
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Mono} indicating command completion.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#resetConfigStats()
+ */
+ Mono resetConfigStats(RedisClusterNode node);
+
+ /**
+ * Request server timestamp using {@code TIME} command.
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Mono} wrapping current server time in milliseconds.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#time()
+ */
+ Mono time(RedisClusterNode node);
+
+ /**
+ * Request information and statistics about connected clients.
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Flux} emitting {@link RedisClientInfo} objects.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisServerCommands#getClientList()
+ */
+ Flux getClientList(RedisClusterNode node);
+}
diff --git a/src/main/java/org/springframework/data/redis/connection/ReactiveRedisClusterConnection.java b/src/main/java/org/springframework/data/redis/connection/ReactiveRedisClusterConnection.java
index 384623da11..d705b5f637 100644
--- a/src/main/java/org/springframework/data/redis/connection/ReactiveRedisClusterConnection.java
+++ b/src/main/java/org/springframework/data/redis/connection/ReactiveRedisClusterConnection.java
@@ -15,8 +15,11 @@
*/
package org.springframework.data.redis.connection;
+import reactor.core.publisher.Mono;
+
/**
* @author Christoph Strobl
+ * @author Mark Paluch
* @since 2.0
*/
public interface ReactiveRedisClusterConnection extends ReactiveRedisConnection {
@@ -47,4 +50,17 @@ public interface ReactiveRedisClusterConnection extends ReactiveRedisConnection
@Override
ReactiveClusterHyperLogLogCommands hyperLogLogCommands();
+
+ @Override
+ ReactiveClusterServerCommands serverCommands();
+
+ /**
+ * Test the connection to a specific Redis cluster node.
+ *
+ * @param node must not be {@literal null}.
+ * @return {@link Mono} wrapping server response message - usually {@literal PONG}.
+ * @throws IllegalArgumentException when {@code node} is {@literal null}.
+ * @see RedisConnectionCommands#ping()
+ */
+ Mono ping(RedisClusterNode node);
}
diff --git a/src/main/java/org/springframework/data/redis/connection/ReactiveRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/ReactiveRedisConnection.java
index 01f87a36bd..e64dba5e9f 100644
--- a/src/main/java/org/springframework/data/redis/connection/ReactiveRedisConnection.java
+++ b/src/main/java/org/springframework/data/redis/connection/ReactiveRedisConnection.java
@@ -16,6 +16,7 @@
package org.springframework.data.redis.connection;
import lombok.Data;
+import reactor.core.publisher.Mono;
import java.io.Closeable;
import java.nio.ByteBuffer;
@@ -96,7 +97,7 @@ public interface ReactiveRedisConnection extends Closeable {
/**
* Get {@link ReactiveHashCommands}.
*
- * @return
+ * @return never {@literal null}.
*/
ReactiveHashCommands hashCommands();
@@ -114,6 +115,21 @@ public interface ReactiveRedisConnection extends Closeable {
*/
ReactiveHyperLogLogCommands hyperLogLogCommands();
+ /**
+ * Get {@link ReactiveServerCommands}.
+ *
+ * @return never {@literal null}.
+ */
+ ReactiveServerCommands serverCommands();
+
+ /**
+ * Test connection.
+ *
+ * @return {@link Mono} wrapping server response message - usually {@literal PONG}.
+ * @see Redis Documentation: PING
+ */
+ Mono ping();
+
/**
* Base interface for Redis commands executed with a reactive infrastructure.
*
diff --git a/src/main/java/org/springframework/data/redis/connection/ReactiveServerCommands.java b/src/main/java/org/springframework/data/redis/connection/ReactiveServerCommands.java
new file mode 100644
index 0000000000..18fbb21691
--- /dev/null
+++ b/src/main/java/org/springframework/data/redis/connection/ReactiveServerCommands.java
@@ -0,0 +1,187 @@
+/*
+ * Copyright 2017 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.redis.connection;
+
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+
+import java.util.Properties;
+
+import org.springframework.data.redis.core.types.RedisClientInfo;
+
+/**
+ * Redis Server commands executed using reactive infrastructure.
+ *
+ * @author Mark Paluch
+ * @author Christoph Strobl
+ * @since 2.0
+ */
+public interface ReactiveServerCommands {
+
+ /**
+ * Start an {@literal Append Only File} rewrite process on server.
+ *
+ * @return {@link Mono} indicating command completion.
+ * @see Redis Documentation: BGREWRITEAOF
+ */
+ Mono bgReWriteAof();
+
+ /**
+ * Start background saving of db on server.
+ *
+ * @return {@link Mono} indicating command received by server. Operation success needs to be checked via
+ * {@link #lastSave()}.
+ * @see Redis Documentation: BGSAVE
+ */
+ Mono bgSave();
+
+ /**
+ * Get time unix timestamp of last successful {@link #bgSave()} operation in seconds.
+ *
+ * @return {@link Mono} wrapping unix timestamp.
+ * @see Redis Documentation: LASTSAVE
+ */
+ Mono lastSave();
+
+ /**
+ * Synchronous save current db snapshot on server.
+ *
+ * @return {@link Mono} indicating command completion.
+ * @see Redis Documentation: SAVE
+ */
+ Mono save();
+
+ /**
+ * Get the total number of available keys in currently selected database.
+ *
+ * @return {@link Mono} wrapping number of keys.
+ * @see Redis Documentation: DBSIZE
+ */
+ Mono dbSize();
+
+ /**
+ * Delete all keys of the currently selected database.
+ *
+ * @return {@link Mono} indicating command completion.
+ * @see Redis Documentation: FLUSHDB
+ */
+ Mono flushDb();
+
+ /**
+ * Delete all all keys from all databases.
+ *
+ * @return {@link Mono} indicating command completion.
+ * @see Redis Documentation: FLUSHALL
+ */
+ Mono flushAll();
+
+ /**
+ * Load {@literal default} server information like
+ *
+ *
memory
+ *
cpu utilization
+ *
replication
+ *
+ *
+ *
+ * @return {@link Mono} wrapping server information.
+ * @see Redis Documentation: INFO
+ */
+ Mono info();
+
+ /**
+ * Load server information for given {@code selection}.
+ *
+ * @param section must not be {@literal null} nor {@literal empty}.
+ * @return {@link Mono} wrapping server information of given {@code section}.
+ * @throws IllegalArgumentException when section is {@literal null} or {@literal empty}.
+ * @see Redis Documentation: INFO
+ */
+ Mono info(String section);
+
+ /**
+ * Load configuration parameters for given {@code pattern} from server.
+ *
+ * @param pattern must not be {@literal null}.
+ * @return {@link Mono} wrapping configuration parameters matching given {@code pattern}.
+ * @throws IllegalArgumentException when {@code pattern} is {@literal null} or {@literal empty}.
+ * @see Redis Documentation: CONFIG GET
+ */
+ Mono getConfig(String pattern);
+
+ /**
+ * Set server configuration for {@code param} to {@code value}.
+ *
+ * @param param must not be {@literal null} nor {@literal empty}.
+ * @param value must not be {@literal null} nor {@literal empty}.
+ * @throws IllegalArgumentException when {@code pattern} / {@code value} is {@literal null} or {@literal empty}.
+ * @see Redis Documentation: CONFIG SET
+ */
+ Mono setConfig(String param, String value);
+
+ /**
+ * Reset statistic counters on server.
+ * Counters can be retrieved using {@link #info()}.
+ *
+ * @return {@link Mono} indicating command completion.
+ * @see Redis Documentation: CONFIG RESETSTAT
+ */
+ Mono resetConfigStats();
+
+ /**
+ * Request server timestamp using {@code TIME} command.
+ *
+ * @return {@link Mono} wrapping current server time in milliseconds.
+ * @see Redis Documentation: TIME
+ */
+ Mono time();
+
+ /**
+ * Closes a given client connection identified by {@literal host:port}.
+ *
+ * @param host of connection to close. Must not be {@literal null} nor {@literal empty}.
+ * @param port of connection to close
+ * @return {@link Mono} wrapping {@link String} representation of the command result.
+ * @throws IllegalArgumentException if {@code host} is {@literal null} or {@literal empty}.
+ * @see Redis Documentation: CLIENT KILL
+ */
+ Mono killClient(String host, int port);
+
+ /**
+ * Assign given name to current connection.
+ *
+ * @param name must not be {@literal null} nor {@literal empty}.
+ * @throws IllegalArgumentException when {@code name} is {@literal null} or {@literal empty}.
+ * @see Redis Documentation: CLIENT SETNAME
+ */
+ Mono setClientName(String name);
+
+ /**
+ * Returns the name of the current connection.
+ *
+ * @return {@link Mono} wrapping the connection name.
+ * @see Redis Documentation: CLIENT GETNAME
+ */
+ Mono getClientName();
+
+ /**
+ * Request information and statistics about connected clients.
+ *
+ * @return {@link Flux} emitting {@link RedisClientInfo} objects.
+ * @see Redis Documentation: CLIENT LIST
+ */
+ Flux getClientList();
+}
diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnection.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnection.java
index 06d236f878..046f558caf 100644
--- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnection.java
+++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnection.java
@@ -29,7 +29,6 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -614,35 +613,4 @@ public void destroy() throws Exception {
}
}
}
-
- /**
- * Lettuce specific implementation of {@link ClusterTopologyProvider}.
- *
- * @author Christoph Strobl
- * @since 1.7
- */
- static class LettuceClusterTopologyProvider implements ClusterTopologyProvider {
-
- private final RedisClusterClient client;
-
- /**
- * @param client must not be {@literal null}.
- */
- public LettuceClusterTopologyProvider(RedisClusterClient client) {
-
- Assert.notNull(client, "RedisClusterClient must not be null.");
- this.client = client;
- }
-
- /*
- * (non-Javadoc)
- * @see org.springframework.data.redis.connection.ClusterTopologyProvider#getTopology()
- */
- @Override
- public ClusterTopology getTopology() {
- return new ClusterTopology(
- new LinkedHashSet<>(LettuceConverters.partitionsToClusterNodes(client.getPartitions())));
- }
- }
-
}
diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceClusterTopologyProvider.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceClusterTopologyProvider.java
new file mode 100644
index 0000000000..46949730b1
--- /dev/null
+++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceClusterTopologyProvider.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2017 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.springframework.data.redis.connection.lettuce;
+
+import io.lettuce.core.cluster.RedisClusterClient;
+
+import java.util.LinkedHashSet;
+
+import org.springframework.data.redis.connection.ClusterTopology;
+import org.springframework.data.redis.connection.ClusterTopologyProvider;
+import org.springframework.util.Assert;
+
+/**
+ * Lettuce specific implementation of {@link ClusterTopologyProvider}.
+ *
+ * @author Christoph Strobl
+ * @author Mark Paluch
+ * @since 1.7
+ */
+class LettuceClusterTopologyProvider implements ClusterTopologyProvider {
+
+ private final RedisClusterClient client;
+
+ /**
+ * @param client must not be {@literal null}.
+ */
+ LettuceClusterTopologyProvider(RedisClusterClient client) {
+
+ Assert.notNull(client, "RedisClusterClient must not be null.");
+
+ this.client = client;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.springframework.data.redis.connection.ClusterTopologyProvider#getTopology()
+ */
+ @Override
+ public ClusterTopology getTopology() {
+ return new ClusterTopology(new LinkedHashSet<>(LettuceConverters.partitionsToClusterNodes(client.getPartitions())));
+ }
+}
diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnectionFactory.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnectionFactory.java
index 6425fbb3eb..2278224560 100644
--- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnectionFactory.java
+++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnectionFactory.java
@@ -41,18 +41,7 @@
import org.springframework.data.redis.ExceptionTranslationStrategy;
import org.springframework.data.redis.PassThroughExceptionTranslationStrategy;
import org.springframework.data.redis.RedisConnectionFailureException;
-import org.springframework.data.redis.connection.ClusterCommandExecutor;
-import org.springframework.data.redis.connection.Pool;
-import org.springframework.data.redis.connection.ReactiveRedisConnectionFactory;
-import org.springframework.data.redis.connection.RedisClusterConfiguration;
-import org.springframework.data.redis.connection.RedisClusterConnection;
-import org.springframework.data.redis.connection.RedisConnection;
-import org.springframework.data.redis.connection.RedisConnectionFactory;
-import org.springframework.data.redis.connection.RedisNode;
-import org.springframework.data.redis.connection.RedisPassword;
-import org.springframework.data.redis.connection.RedisSentinelConfiguration;
-import org.springframework.data.redis.connection.RedisSentinelConnection;
-import org.springframework.data.redis.connection.RedisStandaloneConfiguration;
+import org.springframework.data.redis.connection.*;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
@@ -772,7 +761,7 @@ private AbstractRedisClient createRedisClient() {
.orElseGet(() -> RedisClusterClient.create(initialUris));
this.clusterCommandExecutor = new ClusterCommandExecutor(
- new LettuceClusterConnection.LettuceClusterTopologyProvider(clusterClient),
+ new LettuceClusterTopologyProvider(clusterClient),
new LettuceClusterConnection.LettuceClusterNodeResourceProvider(clusterClient), EXCEPTION_TRANSLATION);
clientConfiguration.getClientOptions() //
diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java
index 314ca5e510..c474220c8e 100644
--- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java
+++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java
@@ -106,126 +106,101 @@ abstract public class LettuceConverters extends Converters {
public static final byte[] NEGATIVE_INFINITY_BYTES;
static {
- DATE_TO_LONG = new Converter() {
- public Long convert(Date source) {
- return source != null ? source.getTime() : null;
- }
- };
- BYTES_LIST_TO_BYTES_SET = new Converter, Set>() {
- public Set convert(List results) {
- return results != null ? new LinkedHashSet<>(results) : null;
- }
- };
- BYTES_TO_STRING = new Converter() {
- @Override
- public String convert(byte[] source) {
- if (source == null || Arrays.equals(source, new byte[0])) {
- return null;
- }
- return new String(source);
+ DATE_TO_LONG = source -> source != null ? source.getTime() : null;
+
+ BYTES_LIST_TO_BYTES_SET = results -> results != null ? new LinkedHashSet<>(results) : null;
+
+ BYTES_TO_STRING = source -> {
+ if (source == null || Arrays.equals(source, new byte[0])) {
+ return null;
}
+ return new String(source);
};
- STRING_TO_BYTES = new Converter() {
- @Override
- public byte[] convert(String source) {
- if (source == null) {
- return null;
- }
- return source.getBytes();
+ STRING_TO_BYTES = source -> {
+ if (source == null) {
+ return null;
}
+ return source.getBytes();
};
- BYTES_SET_TO_BYTES_LIST = new Converter, List>() {
- public List convert(Set results) {
- return results != null ? new ArrayList<>(results) : null;
+
+ BYTES_SET_TO_BYTES_LIST = results -> results != null ? new ArrayList<>(results) : null;
+
+ BYTES_COLLECTION_TO_BYTES_LIST = results -> {
+
+ if (results instanceof List) {
+ return (List) results;
}
+ return results != null ? new ArrayList<>(results) : null;
};
- BYTES_COLLECTION_TO_BYTES_LIST = new Converter, List>() {
- public List convert(Collection results) {
- if (results instanceof List) {
- return (List) results;
- }
- return results != null ? new ArrayList<>(results) : null;
+
+ KEY_VALUE_TO_BYTES_LIST = source -> {
+
+ if (source == null) {
+ return null;
}
+ List list = new ArrayList<>(2);
+ list.add(source.getKey());
+ list.add(source.getValue());
+
+ return list;
};
- KEY_VALUE_TO_BYTES_LIST = new Converter, List>() {
- public List convert(KeyValue source) {
- if (source == null) {
- return null;
- }
- List list = new ArrayList<>(2);
- list.add(source.getKey());
- list.add(source.getValue());
- return list;
+ BYTES_LIST_TO_MAP = source -> {
+
+ if (CollectionUtils.isEmpty(source)) {
+ return Collections.emptyMap();
}
- };
- BYTES_LIST_TO_MAP = new Converter, Map>() {
- @Override
- public Map convert(final List source) {
+ Map target = new LinkedHashMap<>();
- if (CollectionUtils.isEmpty(source)) {
- return Collections.emptyMap();
- }
+ Iterator kv = source.iterator();
+ while (kv.hasNext()) {
+ target.put(kv.next(), kv.hasNext() ? kv.next() : null);
+ }
- Map target = new LinkedHashMap<>();
+ return target;
+ };
- Iterator kv = source.iterator();
- while (kv.hasNext()) {
- target.put(kv.next(), kv.hasNext() ? kv.next() : null);
- }
+ SCORED_VALUES_TO_TUPLE_SET = source -> {
- return target;
+ if (source == null) {
+ return null;
}
- };
- SCORED_VALUES_TO_TUPLE_SET = new Converter>, Set>() {
- public Set convert(List> source) {
- if (source == null) {
- return null;
- }
- Set tuples = new LinkedHashSet<>(source.size());
- for (ScoredValue value : source) {
- tuples.add(LettuceConverters.toTuple(value));
- }
- return tuples;
+ Set tuples = new LinkedHashSet<>(source.size());
+ for (ScoredValue value : source) {
+ tuples.add(LettuceConverters.toTuple(value));
}
+ return tuples;
};
- SCORED_VALUES_TO_TUPLE_LIST = new Converter>, List>() {
- public List convert(List> source) {
- if (source == null) {
- return null;
- }
- List tuples = new ArrayList<>(source.size());
- for (ScoredValue value : source) {
- tuples.add(LettuceConverters.toTuple(value));
- }
- return tuples;
+ SCORED_VALUES_TO_TUPLE_LIST = source -> {
+
+ if (source == null) {
+ return null;
}
- };
- SCORED_VALUE_TO_TUPLE = new Converter, Tuple>() {
- public Tuple convert(ScoredValue source) {
- return source != null ? new DefaultTuple(source.getValue(), Double.valueOf(source.getScore())) : null;
+ List tuples = new ArrayList<>(source.size());
+ for (ScoredValue value : source) {
+ tuples.add(LettuceConverters.toTuple(value));
}
+ return tuples;
};
- BYTES_LIST_TO_TUPLE_LIST_CONVERTER = new Converter, List>() {
- @Override
- public List convert(List source) {
+ SCORED_VALUE_TO_TUPLE = source -> source != null
+ ? new DefaultTuple(source.getValue(), Double.valueOf(source.getScore())) : null;
- if (CollectionUtils.isEmpty(source)) {
- return Collections.emptyList();
- }
+ BYTES_LIST_TO_TUPLE_LIST_CONVERTER = source -> {
- List tuples = new ArrayList<>();
- Iterator it = source.iterator();
- while (it.hasNext()) {
- tuples.add(
- new DefaultTuple(it.next(), it.hasNext() ? Double.valueOf(LettuceConverters.toString(it.next())) : null));
- }
- return tuples;
+ if (CollectionUtils.isEmpty(source)) {
+ return Collections.emptyList();
}
+
+ List tuples = new ArrayList<>();
+ Iterator it = source.iterator();
+ while (it.hasNext()) {
+ tuples.add(new DefaultTuple(it.next(), it.hasNext() ? Double.valueOf(toString(it.next())) : null));
+ }
+ return tuples;
};
PARTITIONS_TO_CLUSTER_NODES = new Converter>() {
@@ -301,45 +276,24 @@ private Set parseFlags(Set source) {
POSITIVE_INFINITY_BYTES = toBytes("+inf");
NEGATIVE_INFINITY_BYTES = toBytes("-inf");
- BYTES_LIST_TO_TIME_CONVERTER = new Converter, Long>() {
+ BYTES_LIST_TO_TIME_CONVERTER = source -> {
- @Override
- public Long convert(List source) {
+ Assert.notEmpty(source, "Received invalid result from server. Expected 2 items in collection.");
+ Assert.isTrue(source.size() == 2,
+ "Received invalid nr of arguments from redis server. Expected 2 received " + source.size());
- Assert.notEmpty(source, "Received invalid result from server. Expected 2 items in collection.");
- Assert.isTrue(source.size() == 2,
- "Received invalid nr of arguments from redis server. Expected 2 received " + source.size());
-
- return toTimeMillis(LettuceConverters.toString(source.get(0)), LettuceConverters.toString(source.get(1)));
- }
+ return toTimeMillis(toString(source.get(0)), toString(source.get(1)));
};
- GEO_COORDINATE_TO_POINT_CONVERTER = new Converter() {
- @Override
- public Point convert(io.lettuce.core.GeoCoordinates geoCoordinate) {
- return geoCoordinate != null ? new Point(geoCoordinate.getX().doubleValue(), geoCoordinate.getY().doubleValue())
- : null;
- }
- };
+ GEO_COORDINATE_TO_POINT_CONVERTER = geoCoordinate -> geoCoordinate != null
+ ? new Point(geoCoordinate.getX().doubleValue(), geoCoordinate.getY().doubleValue()) : null;
GEO_COORDINATE_LIST_TO_POINT_LIST_CONVERTER = new ListConverter<>(GEO_COORDINATE_TO_POINT_CONVERTER);
- KEY_VALUE_UNWRAPPER = new Converter, Object>() {
-
- @Override
- public Object convert(KeyValue