Skip to content

Commit

Permalink
Truncate error.bufferedData if too large (#68)
Browse files Browse the repository at this point in the history
  • Loading branch information
ehmicky authored Aug 8, 2023
1 parent 64cba16 commit 82a194d
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 8 deletions.
31 changes: 25 additions & 6 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,16 +37,35 @@ const getStreamContents = async (stream, {convertChunk, getContents}, {maxBuffer

return getContents(chunks, length);
} catch (error) {
try {
error.bufferedData = getContents(chunks, length);
// This throws when the buffered data is larger than the maximum length
// for a string or buffer
} catch {}

error.bufferedData = getBufferedData(chunks, getContents, length);
throw error;
}
};

const getBufferedData = (chunks, getContents, length) => {
try {
return getContents(chunks, length);
} catch {
return truncateBufferedValue(chunks, getContents);
}
};

// If the input is larger than the maximum length for a string or a buffer,
// it will fail. We retry it with increasingly smaller inputs, so that
// `error.bufferedData` is still set, albeit with a truncated value, since that
// is still useful for debugging.
const truncateBufferedValue = (chunks, getContents) => {
let chunksCount = chunks.length;
do {
chunksCount = Math.floor(chunksCount / SPLIT_FACTOR);
try {
return getContents(chunks.slice(0, chunksCount));
} catch {}
} while (chunksCount > 0);
};

const SPLIT_FACTOR = 2;

const convertChunkToBuffer = chunk => Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);

const getContentsAsBuffer = (chunks, length) => Buffer.concat(chunks, length);
Expand Down
6 changes: 4 additions & 2 deletions test.js
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@ test.serial('handles streams larger than buffer max length', async t => {
const chunkCount = Math.floor(BufferConstants.MAX_LENGTH / chunkSize * 2);
const chunk = Buffer.alloc(chunkSize);
const chunks = Array.from({length: chunkCount}, () => chunk);
await t.throwsAsync(setupBuffer(chunks));
const {bufferedData} = await t.throwsAsync(setupBuffer(chunks));
t.is(bufferedData[0], 0);
});

test.serial('handles streams larger than string max length', async t => {
Expand All @@ -87,7 +88,8 @@ test.serial('handles streams larger than string max length', async t => {
const chunkCount = Math.floor(BufferConstants.MAX_STRING_LENGTH / chunkSize * 2);
const chunk = '.'.repeat(chunkSize);
const chunks = Array.from({length: chunkCount}, () => chunk);
await t.throwsAsync(setup(chunks));
const {bufferedData} = await t.throwsAsync(setup(chunks));
t.is(bufferedData[0], '.');
});

// Tests related to big buffers/strings can be slow. We run them serially and
Expand Down

0 comments on commit 82a194d

Please sign in to comment.