Skip to content

Commit 0b95e3c

Browse files
committed
Major performance improvement
`Reader.prototype.addChunk` was calling `Buffer.concat` constantly, which increased garbage collection and just all-around killed performance. The exact implications of this is documented in brianc/node-postgres#1286, which has a test case for showing how performance is affected. Rather than concatenating buffers to the new buffer size constantly, this change uses a growth strategy that doubles the size of the buffer each time and tracks the functional length in a separate `chunkLength` variable. This significantly reduces the amount of allocation and provides a 25x performance in my test cases, the larger the amount of data the query is returning, the greater improvement of performance. Since this uses a doubling buffer, it was important to avoid growing forever, so I also added a reclaimation strategy which reduces the size of the buffer wever time more than half of the data has been read.
1 parent cb5096c commit 0b95e3c

File tree

1 file changed

+28
-12
lines changed

1 file changed

+28
-12
lines changed

index.js

+28-12
Original file line numberDiff line numberDiff line change
@@ -8,32 +8,48 @@ var Reader = module.exports = function(options) {
88
options = options || {}
99
this.offset = 0
1010
this.lastChunk = false
11-
this.chunk = null
11+
this.chunk = Buffer.alloc(4);
12+
this.chunkLength = 0;
1213
this.headerSize = options.headerSize || 0
1314
this.lengthPadding = options.lengthPadding || 0
1415
this.header = null
1516
assert(this.headerSize < 2, 'pre-length header of more than 1 byte length not currently supported')
1617
}
1718

1819
Reader.prototype.addChunk = function(chunk) {
19-
this.offset = 0
20-
this.chunk = chunk
21-
if(this.lastChunk) {
22-
this.chunk = Buffer.concat([this.lastChunk, this.chunk])
23-
this.lastChunk = false
20+
var newChunkLength = chunk.length;
21+
var newLength = this.chunkLength + newChunkLength;
22+
23+
if (newLength > this.chunk.length) {
24+
var newBufferLength = this.chunk.length * 2;
25+
while (newLength >= newBufferLength) {
26+
newBufferLength *= 2;
27+
}
28+
var newBuffer = new Buffer(newBufferLength);
29+
this.chunk.copy(newBuffer);
30+
this.chunk = newBuffer;
31+
}
32+
chunk.copy(this.chunk, this.chunkLength);
33+
this.chunkLength = newLength;
34+
35+
// If more than half of the data has been read, shrink
36+
// the buffer and reset the offset to reclaim the memory
37+
var halfLength = this.chunk.length / 2;
38+
if (this.offset > halfLength) {
39+
var newBuffer = new Buffer(halfLength);
40+
this.chunk.copy(newBuffer, 0, this.offset);
41+
this.chunk = newBuffer;
42+
this.chunkLength -= this.offset;
43+
this.offset = 0;
2444
}
2545
}
2646

2747
Reader.prototype._save = function() {
28-
//save any unread chunks for next read
29-
if(this.offset < this.chunk.length) {
30-
this.lastChunk = this.chunk.slice(this.offset)
31-
}
3248
return false
3349
}
3450

3551
Reader.prototype.read = function() {
36-
if(this.chunk.length < (this.headerSize + 4 + this.offset)) {
52+
if(this.chunkLength < (this.headerSize + 4 + this.offset)) {
3753
return this._save()
3854
}
3955

@@ -45,7 +61,7 @@ Reader.prototype.read = function() {
4561
var length = this.chunk.readUInt32BE(this.offset + this.headerSize) + this.lengthPadding
4662

4763
//next item spans more chunks than we have
48-
var remaining = this.chunk.length - (this.offset + 4 + this.headerSize)
64+
var remaining = this.chunkLength - (this.offset + 4 + this.headerSize)
4965
if(length > remaining) {
5066
return this._save()
5167
}

0 commit comments

Comments
 (0)