diff --git a/src/handlers/unixfs-file.js b/src/handlers/unixfs-file.js index a64fe58..c7b3d24 100644 --- a/src/handlers/unixfs-file.js +++ b/src/handlers/unixfs-file.js @@ -2,6 +2,7 @@ import { toReadableStream } from '../util/streams.js' import { detectContentType } from '../util/mime.js' import { HttpError } from '../util/errors.js' +import { decodeRangeHeader, resolveRange } from '../util/range.js' /** * @typedef {import('../bindings.js').UnixfsEntryContext} UnixfsFileHandlerContext @@ -34,8 +35,29 @@ export async function handleUnixfsFile (request, env, ctx) { throw new HttpError('method not allowed', { status: 405 }) } + /** @type {import('dagula').AbsoluteRange|undefined} */ + let range + if (request.headers.has('range')) { + /** @type {import('dagula').Range[]} */ + let ranges = [] + try { + ranges = decodeRangeHeader(request.headers.get('range') ?? '') + } catch (err) { + throw new HttpError('invalid range', { cause: err, status: 400 }) + } + + if (ranges.length > 1) { + throw new HttpError('multipart byte range unsupported', { status: 400 }) + } + + range = resolveRange(ranges[0], Number(entry.size)) + } + console.log('unixfs root', entry.cid.toString()) - const contentIterator = entry.content()[Symbol.asyncIterator]() + const status = range ? 206 : 200 + const contentLength = range ? range[1] - range[0] + 1 : Number(entry.size) + const exportOpts = range ? { offset: range[0], length: range[1] - range[0] + 1 } : {} + const contentIterator = entry.content(exportOpts)[Symbol.asyncIterator]() const { done, value: firstChunk } = await contentIterator.next() if (done || !firstChunk.length) { return new Response(null, { status: 204, headers }) @@ -47,6 +69,11 @@ export async function handleUnixfsFile (request, env, ctx) { headers['Content-Type'] = contentType } + if (range && Number(entry.size) !== contentLength) { + const contentRange = `bytes ${range[0]}-${range[1]}/${entry.size}` + headers['Content-Range'] = contentRange + } + // stream the remainder const stream = toReadableStream((async function * () { let bytesWritten = firstChunk.length @@ -58,10 +85,9 @@ export async function handleUnixfsFile (request, env, ctx) { yield chunk } // FixedLengthStream does not like when you send less than what you said - const entrySize = Number(entry.size) - if (bytesWritten < entry.size) { - console.warn(`padding with ${entrySize - bytesWritten} zeroed bytes`) - yield new Uint8Array(entrySize - bytesWritten) + if (bytesWritten < contentLength) { + console.warn(`padding with ${contentLength - bytesWritten} zeroed bytes`) + yield new Uint8Array(contentLength - bytesWritten) } } catch (/** @type {any} */ err) { console.error(err.stack) @@ -69,5 +95,5 @@ export async function handleUnixfsFile (request, env, ctx) { } })()) - return new Response(stream, { headers }) + return new Response(stream, { status, headers }) } diff --git a/test/handlers/unixfs-dir.spec.js b/test/handlers/unixfs-dir.spec.js index e375301..d2f0f38 100644 --- a/test/handlers/unixfs-dir.spec.js +++ b/test/handlers/unixfs-dir.spec.js @@ -11,7 +11,7 @@ import { UnixFS } from 'ipfs-unixfs' import { handleUnixfs } from '../../src/handlers/unixfs.js' import { mockWaitUntil, mockBlockstore } from '../helpers.js' -describe('UnixFS handler', () => { +describe('UnixFS directory handler', () => { it('directory correctly links to files whose name includes a #', async () => { const waitUntil = mockWaitUntil() const path = '' diff --git a/test/handlers/unixfs-file.spec.js b/test/handlers/unixfs-file.spec.js new file mode 100644 index 0000000..dafc6f5 --- /dev/null +++ b/test/handlers/unixfs-file.spec.js @@ -0,0 +1,63 @@ +/* eslint-env browser */ +import { describe, it } from 'node:test' +import assert from 'node:assert' +import { Dagula } from 'dagula' +import { fromString } from 'uint8arrays' +import { encode } from 'multiformats/block' +import * as raw from 'multiformats/codecs/raw' +import * as pb from '@ipld/dag-pb' +import { sha256 as hasher } from 'multiformats/hashes/sha2' +import { UnixFS } from 'ipfs-unixfs' +import { handleUnixfs } from '../../src/handlers/unixfs.js' +import { mockWaitUntil, mockBlockstore } from '../helpers.js' + +describe('UnixFS file handler', async () => { + const waitUntil = mockWaitUntil() + const filename = 'Puzzle People #1.png' + const path = `/${filename}` + const searchParams = new URLSearchParams() + const fileData = fromString('test') + const fileBlock = await encode({ value: fileData, codec: raw, hasher }) + const pbData = pb.createNode(new UnixFS({ type: 'directory' }).marshal(), [{ + Name: filename, + Hash: fileBlock.cid + }]) + const dirBlock = await encode({ value: pbData, codec: pb, hasher }) + const blockstore = mockBlockstore([dirBlock, fileBlock]) + const dagula = new Dagula(blockstore) + const ctx = { waitUntil, unixfs: dagula, dataCid: dirBlock.cid, path, searchParams } + const env = { DEBUG: 'true' } + + it('absolute byte range request', async () => { + const [first, last] = [1, 3] + const req = new Request('http://localhost/ipfs/bafy', { headers: { range: `bytes=${first}-${last}` } }) + const res = await handleUnixfs(req, env, ctx) + + assert.equal(res.status, 206) + assert.equal(res.headers.get('Content-Range'), `bytes ${first}-${last}/${fileData.length}`) + const data = await res.text() + assert.equal(data, 'est') + }) + + it('offset byte range request', async () => { + const [first] = [1] + const req = new Request('http://localhost/ipfs/bafy', { headers: { range: `bytes=${first}-` } }) + const res = await handleUnixfs(req, env, ctx) + + assert.equal(res.status, 206) + assert.equal(res.headers.get('Content-Range'), `bytes ${first}-${fileData.length - 1}/${fileData.length}`) + const data = await res.text() + assert.equal(data, 'est') + }) + + it('suffix byte range request', async () => { + const suffix = -3 + const req = new Request('http://localhost/ipfs/bafy', { headers: { range: `bytes=${suffix}` } }) + const res = await handleUnixfs(req, env, ctx) + + assert.equal(res.status, 206) + assert.equal(res.headers.get('Content-Range'), `bytes ${fileData.length + suffix}-${fileData.length - 1}/${fileData.length}`) + const data = await res.text() + assert.equal(data, 'est') + }) +})