Skip to content

Commit 72b57d2

Browse files
committed
Refactor code and cleanup compiler warnings in Redis caching infrastructure.
* Apply Java 17 syntax try-with-resources in DefaultRedisCacheWriter execute methods. * Organize source code * Edit Javadoc.
1 parent 54a2bd0 commit 72b57d2

File tree

4 files changed

+112
-81
lines changed

4 files changed

+112
-81
lines changed

src/main/java/org/springframework/data/redis/cache/BatchStrategies.java

+13-9
Original file line numberDiff line numberDiff line change
@@ -28,18 +28,15 @@
2828
import org.springframework.util.Assert;
2929

3030
/**
31-
* A collection of predefined {@link BatchStrategy} implementations using {@code KEYS} or {@code SCAN} command.
31+
* Collection of predefined {@link BatchStrategy} implementations using the Redis {@code KEYS} or {@code SCAN} command.
3232
*
3333
* @author Mark Paluch
3434
* @author Christoph Strobl
35+
* @author John Blum
3536
* @since 2.6
3637
*/
3738
public abstract class BatchStrategies {
3839

39-
private BatchStrategies() {
40-
// can't touch this - oh-oh oh oh oh-oh-oh
41-
}
42-
4340
/**
4441
* A {@link BatchStrategy} using a single {@code KEYS} and {@code DEL} command to remove all matching keys.
4542
* {@code KEYS} scans the entire keyspace of the Redis database and can block the Redis worker thread for a long time
@@ -68,6 +65,10 @@ public static BatchStrategy scan(int batchSize) {
6865
return new Scan(batchSize);
6966
}
7067

68+
private BatchStrategies() {
69+
// can't touch this - oh-oh oh oh oh-oh-oh
70+
}
71+
7172
/**
7273
* {@link BatchStrategy} using {@code KEYS}.
7374
*/
@@ -108,9 +109,11 @@ public long cleanCache(RedisConnection connection, String name, byte[] pattern)
108109
long count = 0;
109110

110111
PartitionIterator<byte[]> partitions = new PartitionIterator<>(cursor, batchSize);
112+
111113
while (partitions.hasNext()) {
112114

113115
List<byte[]> keys = partitions.next();
116+
114117
count += keys.size();
115118

116119
if (keys.size() > 0) {
@@ -141,7 +144,7 @@ static class PartitionIterator<T> implements Iterator<List<T>> {
141144

142145
@Override
143146
public boolean hasNext() {
144-
return iterator.hasNext();
147+
return this.iterator.hasNext();
145148
}
146149

147150
@Override
@@ -151,9 +154,10 @@ public List<T> next() {
151154
throw new NoSuchElementException();
152155
}
153156

154-
List<T> list = new ArrayList<>(size);
155-
while (list.size() < size && iterator.hasNext()) {
156-
list.add(iterator.next());
157+
List<T> list = new ArrayList<>(this.size);
158+
159+
while (list.size() < this.size && this.iterator.hasNext()) {
160+
list.add(this.iterator.next());
157161
}
158162

159163
return list;

src/main/java/org/springframework/data/redis/cache/DefaultRedisCacheWriter.java

+51-51
Original file line numberDiff line numberDiff line change
@@ -102,27 +102,6 @@ class DefaultRedisCacheWriter implements RedisCacheWriter {
102102
this.batchStrategy = batchStrategy;
103103
}
104104

105-
@Override
106-
public void put(String name, byte[] key, byte[] value, @Nullable Duration ttl) {
107-
108-
Assert.notNull(name, "Name must not be null");
109-
Assert.notNull(key, "Key must not be null");
110-
Assert.notNull(value, "Value must not be null");
111-
112-
execute(name, connection -> {
113-
114-
if (shouldExpireWithin(ttl)) {
115-
connection.set(key, value, Expiration.from(ttl.toMillis(), TimeUnit.MILLISECONDS), SetOption.upsert());
116-
} else {
117-
connection.set(key, value);
118-
}
119-
120-
return "OK";
121-
});
122-
123-
statistics.incPuts(name);
124-
}
125-
126105
@Override
127106
public byte[] get(String name, byte[] key) {
128107
return get(name, key, null);
@@ -135,8 +114,8 @@ public byte[] get(String name, byte[] key, @Nullable Duration ttl) {
135114
Assert.notNull(key, "Key must not be null");
136115

137116
byte[] result = shouldExpireWithin(ttl)
138-
? execute(name, connection -> connection.getEx(key, Expiration.from(ttl)))
139-
: execute(name, connection -> connection.get(key));
117+
? execute(name, connection -> connection.stringCommands().getEx(key, Expiration.from(ttl)))
118+
: execute(name, connection -> connection.stringCommands().get(key));
140119

141120
statistics.incGets(name);
142121

@@ -149,6 +128,28 @@ public byte[] get(String name, byte[] key, @Nullable Duration ttl) {
149128
return result;
150129
}
151130

131+
@Override
132+
public void put(String name, byte[] key, byte[] value, @Nullable Duration ttl) {
133+
134+
Assert.notNull(name, "Name must not be null");
135+
Assert.notNull(key, "Key must not be null");
136+
Assert.notNull(value, "Value must not be null");
137+
138+
execute(name, connection -> {
139+
140+
if (shouldExpireWithin(ttl)) {
141+
connection.stringCommands()
142+
.set(key, value, Expiration.from(ttl.toMillis(), TimeUnit.MILLISECONDS), SetOption.upsert());
143+
} else {
144+
connection.stringCommands().set(key, value);
145+
}
146+
147+
return "OK";
148+
});
149+
150+
statistics.incPuts(name);
151+
}
152+
152153
@Override
153154
public byte[] putIfAbsent(String name, byte[] key, byte[] value, @Nullable Duration ttl) {
154155

@@ -167,17 +168,17 @@ public byte[] putIfAbsent(String name, byte[] key, byte[] value, @Nullable Durat
167168
boolean put;
168169

169170
if (shouldExpireWithin(ttl)) {
170-
put = connection.set(key, value, Expiration.from(ttl), SetOption.ifAbsent());
171+
put = isTrue(connection.stringCommands().set(key, value, Expiration.from(ttl), SetOption.ifAbsent()));
171172
} else {
172-
put = connection.setNX(key, value);
173+
put = isTrue(connection.stringCommands().setNX(key, value));
173174
}
174175

175176
if (put) {
176177
statistics.incPuts(name);
177178
return null;
178179
}
179180

180-
return connection.get(key);
181+
return connection.stringCommands().get(key);
181182

182183
} finally {
183184
if (isLockingCacheWriter()) {
@@ -193,7 +194,7 @@ public void remove(String name, byte[] key) {
193194
Assert.notNull(name, "Name must not be null");
194195
Assert.notNull(key, "Key must not be null");
195196

196-
execute(name, connection -> connection.del(key));
197+
execute(name, connection -> connection.keyCommands().del(key));
197198
statistics.incDeletes(name);
198199
}
199200

@@ -257,6 +258,16 @@ void lock(String name) {
257258
execute(name, connection -> doLock(name, name, null, connection));
258259
}
259260

261+
@Nullable
262+
private Boolean doLock(String name, Object contextualKey, @Nullable Object contextualValue,
263+
RedisConnection connection) {
264+
265+
Expiration expiration = Expiration.from(this.lockTtl.getTimeToLive(contextualKey, contextualValue));
266+
267+
return connection.stringCommands()
268+
.set(createCacheLockKey(name), new byte[0], expiration, SetOption.SET_IF_ABSENT);
269+
}
270+
260271
/**
261272
* Explicitly remove a write lock from a cache.
262273
*
@@ -266,20 +277,13 @@ void unlock(String name) {
266277
executeLockFree(connection -> doUnlock(name, connection));
267278
}
268279

269-
private Boolean doLock(String name, Object contextualKey, Object contextualValue, RedisConnection connection) {
270-
271-
Expiration expiration = lockTtl == null ? Expiration.persistent()
272-
: Expiration.from(lockTtl.getTimeToLive(contextualKey, contextualValue));
273-
274-
return connection.set(createCacheLockKey(name), new byte[0], expiration, SetOption.SET_IF_ABSENT);
275-
}
276-
280+
@Nullable
277281
private Long doUnlock(String name, RedisConnection connection) {
278-
return connection.del(createCacheLockKey(name));
282+
return connection.keyCommands().del(createCacheLockKey(name));
279283
}
280284

281285
boolean doCheckLock(String name, RedisConnection connection) {
282-
return connection.exists(createCacheLockKey(name));
286+
return isTrue(connection.keyCommands().exists(createCacheLockKey(name)));
283287
}
284288

285289
/**
@@ -291,24 +295,16 @@ private boolean isLockingCacheWriter() {
291295

292296
private <T> T execute(String name, Function<RedisConnection, T> callback) {
293297

294-
RedisConnection connection = connectionFactory.getConnection();
295-
296-
try {
298+
try (RedisConnection connection = connectionFactory.getConnection()) {
297299
checkAndPotentiallyWaitUntilUnlocked(name, connection);
298300
return callback.apply(connection);
299-
} finally {
300-
connection.close();
301301
}
302302
}
303303

304304
private void executeLockFree(Consumer<RedisConnection> callback) {
305305

306-
RedisConnection connection = connectionFactory.getConnection();
307-
308-
try {
306+
try (RedisConnection connection = connectionFactory.getConnection()) {
309307
callback.accept(connection);
310-
} finally {
311-
connection.close();
312308
}
313309
}
314310

@@ -337,11 +333,15 @@ private void checkAndPotentiallyWaitUntilUnlocked(String name, RedisConnection c
337333
}
338334
}
339335

340-
private static boolean shouldExpireWithin(@Nullable Duration ttl) {
341-
return ttl != null && !ttl.isZero() && !ttl.isNegative();
342-
}
343-
344336
private static byte[] createCacheLockKey(String name) {
345337
return (name + "~lock").getBytes(StandardCharsets.UTF_8);
346338
}
339+
340+
private boolean isTrue(@Nullable Boolean value) {
341+
return Boolean.TRUE.equals(value);
342+
}
343+
344+
private static boolean shouldExpireWithin(@Nullable Duration ttl) {
345+
return ttl != null && !ttl.isZero() && !ttl.isNegative();
346+
}
347347
}

src/main/java/org/springframework/data/redis/cache/RedisCache.java

+21-10
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,10 @@
2424
import java.util.Map.Entry;
2525
import java.util.StringJoiner;
2626
import java.util.concurrent.Callable;
27+
import java.util.concurrent.CompletableFuture;
2728
import java.util.concurrent.locks.Lock;
2829
import java.util.concurrent.locks.ReentrantLock;
30+
import java.util.function.Supplier;
2931

3032
import org.springframework.cache.Cache;
3133
import org.springframework.cache.support.AbstractValueAdaptingCache;
@@ -37,6 +39,7 @@
3739
import org.springframework.data.redis.serializer.RedisSerializationContext;
3840
import org.springframework.data.redis.serializer.RedisSerializer;
3941
import org.springframework.data.redis.util.ByteUtils;
42+
import org.springframework.data.redis.util.RedisAssertions;
4043
import org.springframework.lang.Nullable;
4144
import org.springframework.util.Assert;
4245
import org.springframework.util.ObjectUtils;
@@ -69,7 +72,8 @@ public class RedisCache extends AbstractValueAdaptingCache {
6972
private final String name;
7073

7174
/**
72-
* Create a new {@link RedisCache} with the given {@link String name}.
75+
* Create a new {@link RedisCache} with the given {@link String name} and {@link RedisCacheConfiguration},
76+
* using the {@link RedisCacheWriter} to execute Redis commands supporting the cache operations.
7377
*
7478
* @param name {@link String name} for this {@link Cache}; must not be {@literal null}.
7579
* @param cacheWriter {@link RedisCacheWriter} used to perform {@link RedisCache} operations by
@@ -81,11 +85,11 @@ public class RedisCache extends AbstractValueAdaptingCache {
8185
*/
8286
protected RedisCache(String name, RedisCacheWriter cacheWriter, RedisCacheConfiguration cacheConfiguration) {
8387

84-
super(cacheConfiguration.getAllowCacheNullValues());
88+
super(RedisAssertions.requireNonNull(cacheConfiguration, "CacheConfiguration must not be null")
89+
.getAllowCacheNullValues());
8590

8691
Assert.notNull(name, "Name must not be null");
8792
Assert.notNull(cacheWriter, "CacheWriter must not be null");
88-
Assert.notNull(cacheConfiguration, "CacheConfiguration must not be null");
8993

9094
this.name = name;
9195
this.cacheWriter = cacheWriter;
@@ -116,7 +120,7 @@ protected RedisCacheWriter getCacheWriter() {
116120
* accessing entries in the cache.
117121
*
118122
* @return the configured {@link ConversionService} used to convert {@link Object cache keys} to a {@link String} when
119-
* accessing entries in the cache.
123+
* accessing entries in the cache.
120124
* @see RedisCacheConfiguration#getConversionService()
121125
* @see #getCacheConfiguration()
122126
*/
@@ -163,7 +167,6 @@ private <T> T getSynchronized(Object key, Callable<T> valueLoader) {
163167

164168
try {
165169
ValueWrapper result = get(key);
166-
167170
return result != null ? (T) result.get() : loadCacheValue(key, valueLoader);
168171
} finally {
169172
lock.unlock();
@@ -285,10 +288,19 @@ public void evict(Object key) {
285288
*/
286289
@Nullable
287290
protected Object preProcessCacheValue(@Nullable Object value) {
288-
289291
return value != null ? value : isAllowNullValues() ? NullValue.INSTANCE : null;
290292
}
291293

294+
@Override
295+
public CompletableFuture<?> retrieve(Object key) {
296+
return super.retrieve(key);
297+
}
298+
299+
@Override
300+
public <T> CompletableFuture<T> retrieve(Object key, Supplier<CompletableFuture<T>> valueLoader) {
301+
return super.retrieve(key, valueLoader);
302+
}
303+
292304
/**
293305
* Serialize the given {@link String cache key}.
294306
*
@@ -321,7 +333,7 @@ protected byte[] serializeCacheValue(Object value) {
321333
*
322334
* @param value array of bytes to deserialize; must not be {@literal null}.
323335
* @return an {@link Object} deserialized from the array of bytes using the configured value
324-
* {@link RedisSerializationContext.SerializationPair}; can be {@literal null}.
336+
* {@link RedisSerializationContext.SerializationPair}; can be {@literal null}.
325337
* @see RedisCacheConfiguration#getValueSerializationPair()
326338
*/
327339
@Nullable
@@ -382,9 +394,8 @@ protected String convertKey(Object key) {
382394
return key.toString();
383395
}
384396

385-
String message = String.format(
386-
"Cannot convert cache key %s to String; Please register a suitable Converter"
387-
+ " via 'RedisCacheConfiguration.configureKeyConverters(...)' or override '%s.toString()'",
397+
String message = String.format("Cannot convert cache key %s to String; Please register a suitable Converter"
398+
+ " via 'RedisCacheConfiguration.configureKeyConverters(...)' or override '%s.toString()'",
388399
source, key.getClass().getName());
389400

390401
throw new IllegalStateException(message);

0 commit comments

Comments
 (0)