diff --git a/source/array-buffer.js b/source/array-buffer.js index bc121fe..4207ad8 100644 --- a/source/array-buffer.js +++ b/source/array-buffer.js @@ -5,7 +5,7 @@ export async function getStreamAsArrayBuffer(stream, options) { return getStreamContents(stream, arrayBufferMethods, options); } -const initArrayBuffer = () => new Uint8Array(0); +const initArrayBuffer = () => new ArrayBuffer(0); const useTextEncoder = chunk => textEncoder.encode(chunk); const textEncoder = new TextEncoder(); @@ -17,7 +17,7 @@ const useUint8ArrayWithOffset = chunk => new Uint8Array(chunk.buffer, chunk.byte // `contents` is an increasingly growing `Uint8Array`. const addArrayBufferChunk = (convertedChunk, contents, length, previousLength) => { const newContents = hasArrayBufferResize() ? resizeArrayBuffer(contents, length) : resizeArrayBufferSlow(contents, length); - newContents.set(convertedChunk, previousLength); + new Uint8Array(newContents).set(convertedChunk, previousLength); return newContents; }; @@ -25,13 +25,13 @@ const addArrayBufferChunk = (convertedChunk, contents, length, previousLength) = // This means its last bytes are zeroes (not stream data), which need to be // trimmed at the end with `ArrayBuffer.slice()`. const resizeArrayBufferSlow = (contents, length) => { - if (length <= contents.length) { + if (length <= contents.byteLength) { return contents; } - const newContents = new Uint8Array(getNewContentsLength(length)); - newContents.set(contents, 0); - return newContents; + const arrayBuffer = new ArrayBuffer(getNewContentsLength(length)); + new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0); + return arrayBuffer; }; // With `ArrayBuffer.resize()`, `contents` size matches exactly the size of @@ -39,15 +39,14 @@ const resizeArrayBufferSlow = (contents, length) => { // The underlying `ArrayBuffer` does allocate a number of bytes that is a power // of 2, but those bytes are only visible after calling `ArrayBuffer.resize()`. const resizeArrayBuffer = (contents, length) => { - if (length <= contents.buffer.maxByteLength) { - contents.buffer.resize(length); - return new Uint8Array(contents.buffer, 0, length); + if (length <= contents.maxByteLength) { + contents.resize(length); + return contents; } const arrayBuffer = new ArrayBuffer(length, {maxByteLength: getNewContentsLength(length)}); - const newContents = new Uint8Array(arrayBuffer); - newContents.set(contents, 0); - return newContents; + new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0); + return arrayBuffer; }; // Retrieve the closest `length` that is both >= and a power of 2 @@ -55,7 +54,7 @@ const getNewContentsLength = length => SCALE_FACTOR ** Math.ceil(Math.log(length const SCALE_FACTOR = 2; -const finalizeArrayBuffer = ({buffer}, length) => hasArrayBufferResize() ? buffer : buffer.slice(0, length); +const finalizeArrayBuffer = (contents, length) => hasArrayBufferResize() ? contents : contents.slice(0, length); // `ArrayBuffer.slice()` is slow. When `ArrayBuffer.resize()` is available // (Node >=20.0.0, Safari >=16.4 and Chrome), we can use it instead.