Skip to content

Commit 9969cb6

Browse files
committed
Improve limit handling in StringDecoder
The case of one data buffer containing multiple lines can could cause a buffer leak due to a suspected issue in concatMapIterable. This commit adds workarounds for that until the underlying issue is addressed. Closes gh-24346
1 parent 04b3f5a commit 9969cb6

File tree

2 files changed

+92
-39
lines changed

2 files changed

+92
-39
lines changed

spring-core/src/main/java/org/springframework/core/codec/StringDecoder.java

Lines changed: 70 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright 2002-2019 the original author or authors.
2+
* Copyright 2002-2020 the original author or authors.
33
*
44
* Licensed under the Apache License, Version 2.0 (the "License");
55
* you may not use this file except in compliance with the License.
@@ -95,17 +95,44 @@ public Flux<String> decode(Publisher<DataBuffer> input, ResolvableType elementTy
9595

9696
List<byte[]> delimiterBytes = getDelimiterBytes(mimeType);
9797

98-
// TODO: Drop Consumer and use bufferUntil with Supplier<LimistedDataBufferList> (reactor-core#1925)
99-
// TODO: Drop doOnDiscard(LimitedDataBufferList.class, ...) (reactor-core#1924)
100-
LimitedDataBufferConsumer limiter = new LimitedDataBufferConsumer(getMaxInMemorySize());
98+
Flux<DataBuffer> inputFlux = Flux.defer(() -> {
99+
if (getMaxInMemorySize() != -1) {
101100

102-
Flux<DataBuffer> inputFlux = Flux.from(input)
103-
.flatMapIterable(buffer -> splitOnDelimiter(buffer, delimiterBytes))
104-
.doOnNext(limiter)
105-
.bufferUntil(buffer -> buffer == END_FRAME)
106-
.map(StringDecoder::joinUntilEndFrame)
107-
.doOnDiscard(LimitedDataBufferList.class, LimitedDataBufferList::releaseAndClear)
108-
.doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release);
101+
// Passing limiter into endFrameAfterDelimiter helps to ensure that in case of one DataBuffer
102+
// containing multiple lines, the limit is checked and raised immediately without accumulating
103+
// subsequent lines. This is necessary because concatMapIterable doesn't respect doOnDiscard.
104+
// When reactor-core#1925 is resolved, we could replace bufferUntil with:
105+
106+
// .windowUntil(buffer -> buffer instanceof EndFrameBuffer)
107+
// .concatMap(fluxes -> fluxes.collect(() -> new LimitedDataBufferList(getMaxInMemorySize()), LimitedDataBufferList::add))
108+
109+
LimitedDataBufferList limiter = new LimitedDataBufferList(getMaxInMemorySize());
110+
111+
return Flux.from(input)
112+
.concatMapIterable(buffer -> splitOnDelimiter(buffer, delimiterBytes, limiter))
113+
.bufferUntil(buffer -> buffer == END_FRAME)
114+
.map(StringDecoder::joinUntilEndFrame)
115+
.doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release);
116+
}
117+
else {
118+
119+
// When the decoder is unlimited (-1), concatMapIterable will cache buffers that may not
120+
// be released if cancel is signalled before they are turned into String lines
121+
// (see test maxInMemoryLimitReleasesUnprocessedLinesWhenUnlimited).
122+
// When reactor-core#1925 is resolved, the workaround can be removed and the entire
123+
// else clause possibly dropped.
124+
125+
ConcatMapIterableDiscardWorkaroundCache cache = new ConcatMapIterableDiscardWorkaroundCache();
126+
127+
return Flux.from(input)
128+
.concatMapIterable(buffer -> cache.addAll(splitOnDelimiter(buffer, delimiterBytes, null)))
129+
.doOnNext(cache)
130+
.doOnCancel(cache)
131+
.bufferUntil(buffer -> buffer == END_FRAME)
132+
.map(StringDecoder::joinUntilEndFrame)
133+
.doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release);
134+
}
135+
});
109136

110137
return super.decode(inputFlux, elementType, mimeType, hints);
111138
}
@@ -125,7 +152,9 @@ private List<byte[]> getDelimiterBytes(@Nullable MimeType mimeType) {
125152
* Split the given data buffer on delimiter boundaries.
126153
* The returned Flux contains an {@link #END_FRAME} buffer after each delimiter.
127154
*/
128-
private List<DataBuffer> splitOnDelimiter(DataBuffer buffer, List<byte[]> delimiterBytes) {
155+
private List<DataBuffer> splitOnDelimiter(
156+
DataBuffer buffer, List<byte[]> delimiterBytes, @Nullable LimitedDataBufferList limiter) {
157+
129158
List<DataBuffer> frames = new ArrayList<>();
130159
try {
131160
do {
@@ -147,15 +176,28 @@ private List<DataBuffer> splitOnDelimiter(DataBuffer buffer, List<byte[]> delimi
147176
buffer.readPosition(readPosition + length + matchingDelimiter.length);
148177
frames.add(DataBufferUtils.retain(frame));
149178
frames.add(END_FRAME);
179+
if (limiter != null) {
180+
limiter.add(frame); // enforce the limit
181+
limiter.clear();
182+
}
150183
}
151184
else {
152185
frame = buffer.slice(readPosition, buffer.readableByteCount());
153186
buffer.readPosition(readPosition + buffer.readableByteCount());
154187
frames.add(DataBufferUtils.retain(frame));
188+
if (limiter != null) {
189+
limiter.add(frame);
190+
}
155191
}
156192
}
157193
while (buffer.readableByteCount() > 0);
158194
}
195+
catch (DataBufferLimitException ex) {
196+
if (limiter != null) {
197+
limiter.releaseAndClear();
198+
}
199+
throw ex;
200+
}
159201
catch (Throwable ex) {
160202
for (DataBuffer frame : frames) {
161203
DataBufferUtils.release(frame);
@@ -293,34 +335,32 @@ public static StringDecoder allMimeTypes(List<String> delimiters, boolean stripD
293335
}
294336

295337

296-
/**
297-
* Temporary measure for reactor-core#1925.
298-
* Consumer that adds to a {@link LimitedDataBufferList} to enforce limits.
299-
*/
300-
private static class LimitedDataBufferConsumer implements Consumer<DataBuffer> {
338+
private class ConcatMapIterableDiscardWorkaroundCache implements Consumer<DataBuffer>, Runnable {
301339

302-
private final LimitedDataBufferList bufferList;
340+
private final List<DataBuffer> buffers = new ArrayList<>();
303341

304342

305-
public LimitedDataBufferConsumer(int maxInMemorySize) {
306-
this.bufferList = new LimitedDataBufferList(maxInMemorySize);
343+
public List<DataBuffer> addAll(List<DataBuffer> buffersToAdd) {
344+
this.buffers.addAll(buffersToAdd);
345+
return buffersToAdd;
307346
}
308347

348+
@Override
349+
public void accept(DataBuffer dataBuffer) {
350+
this.buffers.remove(dataBuffer);
351+
}
309352

310353
@Override
311-
public void accept(DataBuffer buffer) {
312-
if (buffer == END_FRAME) {
313-
this.bufferList.clear();
314-
}
315-
else {
354+
public void run() {
355+
this.buffers.forEach(buffer -> {
316356
try {
317-
this.bufferList.add(buffer);
318-
}
319-
catch (DataBufferLimitException ex) {
320357
DataBufferUtils.release(buffer);
321-
throw ex;
322358
}
323-
}
359+
catch (Throwable ex) {
360+
// Keep going..
361+
}
362+
});
324363
}
325364
}
365+
326366
}

spring-core/src/test/java/org/springframework/core/codec/StringDecoderTests.java

Lines changed: 22 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright 2002-2019 the original author or authors.
2+
* Copyright 2002-2020 the original author or authors.
33
*
44
* Licensed under the Apache License, Version 2.0 (the "License");
55
* you may not use this file except in compliance with the License.
@@ -128,17 +128,30 @@ public void decodeNewLine() {
128128
}
129129

130130
@Test
131-
public void decodeNewLineWithLimit() {
131+
public void maxInMemoryLimit() {
132132
Flux<DataBuffer> input = Flux.just(
133-
stringBuffer("abc\n"),
134-
stringBuffer("defg\n"),
135-
stringBuffer("hijkl\n")
136-
);
137-
this.decoder.setMaxInMemorySize(4);
133+
stringBuffer("abc\n"), stringBuffer("defg\n"), stringBuffer("hijkl\n"));
138134

135+
this.decoder.setMaxInMemorySize(4);
139136
testDecode(input, String.class, step ->
140-
step.expectNext("abc", "defg")
141-
.verifyError(DataBufferLimitException.class));
137+
step.expectNext("abc", "defg").verifyError(DataBufferLimitException.class));
138+
}
139+
140+
@Test // gh-24312
141+
public void maxInMemoryLimitReleaseUnprocessedLinesFromCurrentBuffer() {
142+
Flux<DataBuffer> input = Flux.just(
143+
stringBuffer("TOO MUCH DATA\nanother line\n\nand another\n"));
144+
145+
this.decoder.setMaxInMemorySize(5);
146+
testDecode(input, String.class, step -> step.verifyError(DataBufferLimitException.class));
147+
}
148+
149+
@Test // gh-24339
150+
public void maxInMemoryLimitReleaseUnprocessedLinesWhenUnlimited() {
151+
Flux<DataBuffer> input = Flux.just(stringBuffer("Line 1\nLine 2\nLine 3\n"));
152+
153+
this.decoder.setMaxInMemorySize(-1);
154+
testDecodeCancel(input, ResolvableType.forClass(String.class), null, Collections.emptyMap());
142155
}
143156

144157
@Test

0 commit comments

Comments
 (0)