38
38
*
39
39
* @author Mark Paluch
40
40
* @author Christoph Strobl
41
+ * @author John Blum
41
42
* @since 2.0
42
43
*/
43
44
class DefaultReactiveHashOperations <H , HK , HV > implements ReactiveHashOperations <H , HK , HV > {
@@ -62,7 +63,8 @@ public Mono<Long> remove(H key, Object... hashKeys) {
62
63
Assert .noNullElements (hashKeys , "Hash keys must not contain null elements" );
63
64
64
65
return createMono (hashCommands -> Flux .fromArray (hashKeys ) //
65
- .map (o -> (HK ) o ).map (this ::rawHashKey ) //
66
+ .map (hashKey -> (HK ) hashKey )
67
+ .map (this ::rawHashKey ) //
66
68
.collectList () //
67
69
.flatMap (hks -> hashCommands .hDel (rawKey (key ), hks )));
68
70
}
@@ -84,8 +86,8 @@ public Mono<HV> get(H key, Object hashKey) {
84
86
Assert .notNull (key , "Key must not be null" );
85
87
Assert .notNull (hashKey , "Hash key must not be null" );
86
88
87
- return createMono (hashCommands ->
88
- hashCommands . hGet ( rawKey ( key ), rawHashKey (( HK ) hashKey )) .map (this ::readHashValue ));
89
+ return createMono (hashCommands -> hashCommands . hGet ( rawKey ( key ), rawHashKey (( HK ) hashKey ))
90
+ .map (this ::readHashValue ));
89
91
}
90
92
91
93
@ Override
@@ -107,8 +109,7 @@ public Mono<Long> increment(H key, HK hashKey, long delta) {
107
109
Assert .notNull (key , "Key must not be null" );
108
110
Assert .notNull (hashKey , "Hash key must not be null" );
109
111
110
- return template .doCreateMono (connection -> connection //
111
- .numberCommands () //
112
+ return template .doCreateMono (connection -> connection .numberCommands ()
112
113
.hIncrBy (rawKey (key ), rawHashKey (hashKey ), delta ));
113
114
}
114
115
@@ -118,8 +119,7 @@ public Mono<Double> increment(H key, HK hashKey, double delta) {
118
119
Assert .notNull (key , "Key must not be null" );
119
120
Assert .notNull (hashKey , "Hash key must not be null" );
120
121
121
- return template .doCreateMono (connection -> connection //
122
- .numberCommands () //
122
+ return template .doCreateMono (connection -> connection .numberCommands ()
123
123
.hIncrBy (rawKey (key ), rawHashKey (hashKey ), delta ));
124
124
}
125
125
@@ -128,34 +128,35 @@ public Mono<HK> randomKey(H key) {
128
128
129
129
Assert .notNull (key , "Key must not be null" );
130
130
131
- return template .doCreateMono (connection -> connection //
132
- .hashCommands (). hRandField ( rawKey ( key ))). map (this ::readRequiredHashKey );
131
+ return template .doCreateMono (connection -> connection . hashCommands (). hRandField ( rawKey ( key )))
132
+ .map (this ::readRequiredHashKey );
133
133
}
134
134
135
135
@ Override
136
136
public Mono <Map .Entry <HK , HV >> randomEntry (H key ) {
137
137
138
138
Assert .notNull (key , "Key must not be null" );
139
139
140
- return createMono (hashCommands ->hashCommands .hRandFieldWithValues (rawKey (key ))).map (this ::deserializeHashEntry );
140
+ return createMono (hashCommands -> hashCommands .hRandFieldWithValues (rawKey (key )))
141
+ .map (this ::deserializeHashEntry );
141
142
}
142
143
143
144
@ Override
144
145
public Flux <HK > randomKeys (H key , long count ) {
145
146
146
147
Assert .notNull (key , "Key must not be null" );
147
148
148
- return template .doCreateFlux (connection -> connection //
149
- .hashCommands (). hRandField ( rawKey ( key ), count )). map (this ::readRequiredHashKey );
149
+ return template .doCreateFlux (connection -> connection . hashCommands (). hRandField ( rawKey ( key ), count ))
150
+ .map (this ::readRequiredHashKey );
150
151
}
151
152
152
153
@ Override
153
154
public Flux <Map .Entry <HK , HV >> randomEntries (H key , long count ) {
154
155
155
156
Assert .notNull (key , "Key must not be null" );
156
157
157
- return template .doCreateFlux (connection -> connection //
158
- .hashCommands (). hRandFieldWithValues ( rawKey ( key ), count )). map (this ::deserializeHashEntry );
158
+ return template .doCreateFlux (connection -> connection . hashCommands (). hRandFieldWithValues ( rawKey ( key ), count ))
159
+ .map (this ::deserializeHashEntry );
159
160
}
160
161
161
162
@ Override
@@ -211,7 +212,7 @@ public Flux<HV> values(H key) {
211
212
212
213
Assert .notNull (key , "Key must not be null" );
213
214
214
- return createFlux (connection -> connection .hVals (rawKey (key )) //
215
+ return createFlux (hashCommands -> hashCommands .hVals (rawKey (key )) //
215
216
.map (this ::readRequiredHashValue ));
216
217
}
217
218
@@ -278,28 +279,28 @@ private HK readRequiredHashKey(ByteBuffer buffer) {
278
279
279
280
HK hashKey = readHashKey (buffer );
280
281
281
- if (hashKey = = null ) {
282
- throw new InvalidDataAccessApiUsageException ( "Deserialized hash key is null" ) ;
282
+ if (hashKey ! = null ) {
283
+ return hashKey ;
283
284
}
284
285
285
- return hashKey ;
286
+ throw new InvalidDataAccessApiUsageException ( "Deserialized hash key is null" ) ;
286
287
}
287
288
288
289
@ SuppressWarnings ("unchecked" )
289
290
@ Nullable
290
291
private HV readHashValue (@ Nullable ByteBuffer value ) {
291
- return ( HV ) ( value == null ? null : serializationContext .getHashValueSerializationPair ().read (value )) ;
292
+ return value != null ? ( HV ) serializationContext .getHashValueSerializationPair ().read (value ) : null ;
292
293
}
293
294
294
295
private HV readRequiredHashValue (ByteBuffer buffer ) {
295
296
296
297
HV hashValue = readHashValue (buffer );
297
298
298
- if (hashValue = = null ) {
299
- throw new InvalidDataAccessApiUsageException ( "Deserialized hash value is null" ) ;
299
+ if (hashValue ! = null ) {
300
+ return hashValue ;
300
301
}
301
302
302
- return hashValue ;
303
+ throw new InvalidDataAccessApiUsageException ( "Deserialized hash value is null" ) ;
303
304
}
304
305
305
306
private Map .Entry <HK , HV > deserializeHashEntry (Map .Entry <ByteBuffer , ByteBuffer > source ) {
@@ -309,9 +310,11 @@ private Map.Entry<HK, HV> deserializeHashEntry(Map.Entry<ByteBuffer, ByteBuffer>
309
310
private List <HV > deserializeHashValues (List <ByteBuffer > source ) {
310
311
311
312
List <HV > values = new ArrayList <>(source .size ());
313
+
312
314
for (ByteBuffer byteBuffer : source ) {
313
315
values .add (readHashValue (byteBuffer ));
314
316
}
317
+
315
318
return values ;
316
319
}
317
320
}
0 commit comments