diff --git a/package.json b/package.json index cf84aa7..2a373c6 100644 --- a/package.json +++ b/package.json @@ -43,6 +43,7 @@ "@types/node": "^20.2.4", "ava": "^5.3.0", "precise-now": "^2.0.0", + "stream-json": "^1.8.0", "tsd": "^0.28.1", "xo": "^0.54.2" } diff --git a/readme.md b/readme.md index 1cfa6c7..7f04b94 100644 --- a/readme.md +++ b/readme.md @@ -216,6 +216,23 @@ const stream = fs.createReadStream('unicorn.txt'); console.log(new Blob([await getStreamAsArrayBuffer(stream)])); ``` +### JSON streaming + +[`getStreamAsArray()`](#getstreamasarraystream-options) can be combined with JSON streaming utilities to parse JSON incrementally. + +```js +import fs from 'node:fs'; +import {compose as composeStreams} from 'node:stream'; +import {getStreamAsArray} from 'get-stream'; +import streamJson from 'stream-json'; +import streamJsonArray from 'stream-json/streamers/StreamArray.js'; + +const stream = fs.createReadStream('big-array-of-objects.json'); +console.log(await getStreamAsArray( + composeStreams(stream, streamJson.parser(), streamJsonArray.streamArray()), +)); +``` + ## Benchmarks ### Node.js stream (100 MB, binary) diff --git a/test.js b/test.js index 40d2898..89503db 100644 --- a/test.js +++ b/test.js @@ -4,9 +4,11 @@ import {spawn} from 'node:child_process'; import {createReadStream} from 'node:fs'; import {open, opendir} from 'node:fs/promises'; import {version as nodeVersion} from 'node:process'; -import {Duplex} from 'node:stream'; +import {Duplex, compose} from 'node:stream'; import {text, buffer, arrayBuffer, blob} from 'node:stream/consumers'; import test from 'ava'; +import streamJson from 'stream-json'; +import streamJsonArray from 'stream-json/streamers/StreamArray.js'; import getStream, {getStreamAsBuffer, getStreamAsArrayBuffer, getStreamAsArray, MaxBufferError} from './index.js'; const fixtureString = 'unicorn\n'; @@ -453,3 +455,16 @@ test.serial('getStreamAsArray() behaves like readable.toArray()', async t => { ]); t.deepEqual(nativeResult, customResult); }); + +test.serial('getStreamAsArray() can stream JSON', async t => { + t.timeout(BIG_TEST_DURATION); + const bigJson = bigArray.map(byte => ({byte})); + const bigJsonString = JSON.stringify(bigJson); + const result = await getStreamAsArray(compose( + createStream([bigJsonString]), + streamJson.parser(), + streamJsonArray.streamArray(), + )); + t.is(result.length, bigJson.length); + t.deepEqual(result.at(-1).value, bigJson.at(-1)); +});