diff --git a/packages/blockstore-opfs/LICENSE b/packages/blockstore-opfs/LICENSE new file mode 100644 index 00000000..20ce483c --- /dev/null +++ b/packages/blockstore-opfs/LICENSE @@ -0,0 +1,4 @@ +This project is dual licensed under MIT and Apache-2.0. + +MIT: https://www.opensource.org/licenses/mit +Apache-2.0: https://www.apache.org/licenses/license-2.0 diff --git a/packages/blockstore-opfs/LICENSE-APACHE b/packages/blockstore-opfs/LICENSE-APACHE new file mode 100644 index 00000000..14478a3b --- /dev/null +++ b/packages/blockstore-opfs/LICENSE-APACHE @@ -0,0 +1,5 @@ +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/packages/blockstore-opfs/LICENSE-MIT b/packages/blockstore-opfs/LICENSE-MIT new file mode 100644 index 00000000..72dc60d8 --- /dev/null +++ b/packages/blockstore-opfs/LICENSE-MIT @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/packages/blockstore-opfs/README.md b/packages/blockstore-opfs/README.md new file mode 100644 index 00000000..b9730bea --- /dev/null +++ b/packages/blockstore-opfs/README.md @@ -0,0 +1,35 @@ +[![ipfs.tech](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](https://ipfs.tech) +[![Discuss](https://img.shields.io/discourse/https/discuss.ipfs.tech/posts.svg?style=flat-square)](https://discuss.ipfs.tech) +[![codecov](https://img.shields.io/codecov/c/github/ipfs/js-stores.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-stores) +[![CI](https://img.shields.io/github/actions/workflow/status/ipfs/js-stores/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/ipfs/js-stores/actions/workflows/js-test-and-release.yml?query=branch%3Amain) + +> Blockstore implementation with Origin Private Filesystem backend. + +# Install + +```console +$ npm i blockstore-opfs +``` + +# API Docs + +- + +# License + +Licensed under either of + +- Apache 2.0, ([LICENSE-APACHE](LICENSE-APACHE) / ) +- MIT ([LICENSE-MIT](LICENSE-MIT) / ) + +# Contribute + +Contributions welcome! Please check out [the issues](https://github.com/ipfs/js-stores/issues). + +Also see our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general. + +Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). + +Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. + +[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) diff --git a/packages/blockstore-opfs/benchmarks/encoding/package.json b/packages/blockstore-opfs/benchmarks/encoding/package.json new file mode 100644 index 00000000..00104d4f --- /dev/null +++ b/packages/blockstore-opfs/benchmarks/encoding/package.json @@ -0,0 +1,18 @@ +{ + "name": "benchmarks-encoding", + "version": "1.0.0", + "main": "index.js", + "private": true, + "type": "module", + "scripts": { + "clean": "aegir clean", + "build": "aegir build --bundle false", + "lint": "aegir lint", + "dep-check": "aegir dep-check", + "start": "npm run build && node dist/src/index.js" + }, + "devDependencies": { + "multiformats": "^11.0.1", + "tinybench": "^2.4.0" + } +} diff --git a/packages/blockstore-opfs/benchmarks/encoding/src/README.md b/packages/blockstore-opfs/benchmarks/encoding/src/README.md new file mode 100644 index 00000000..3caf1e9e --- /dev/null +++ b/packages/blockstore-opfs/benchmarks/encoding/src/README.md @@ -0,0 +1,31 @@ +# Encoding Benchmark + +Multiformats ships a number of base encoding algorithms. This module has no strong opinion +on which is best, as long as it is case insensitive, so benchmark them to choose the fastest. + +At the time of writing `base8` is the fastest, followed other alorithms using `rfc4648` encoding +internally in `multiformats` (e.g. `base16`, `base32`), and finally anything using `baseX` encoding. + +We choose base32upper which uses `rfc4648` because it has a longer alphabet so will shard better. + +## Usage + +```console +$ npm i +$ npm start + +> benchmarks-gc@1.0.0 start +> npm run build && node dist/src/index.js + + +> benchmarks-gc@1.0.0 build +> aegir build --bundle false + +[14:51:28] tsc [started] +[14:51:33] tsc [completed] +generating Ed25519 keypair... +┌─────────┬────────────────┬─────────┬───────────┬──────┐ +│ (index) │ Implementation │ ops/s │ ms/op │ runs │ +├─────────┼────────────────┼─────────┼───────────┼──────┤ +//... results here +``` diff --git a/packages/blockstore-opfs/benchmarks/encoding/src/index.ts b/packages/blockstore-opfs/benchmarks/encoding/src/index.ts new file mode 100644 index 00000000..40fc0905 --- /dev/null +++ b/packages/blockstore-opfs/benchmarks/encoding/src/index.ts @@ -0,0 +1,73 @@ +import { base10 } from 'multiformats/bases/base10' +import { base16upper } from 'multiformats/bases/base16' +import { base256emoji } from 'multiformats/bases/base256emoji' +import { base32, base32upper, base32hexupper, base32z } from 'multiformats/bases/base32' +import { base36, base36upper } from 'multiformats/bases/base36' +import { base8 } from 'multiformats/bases/base8' +import { CID } from 'multiformats/cid' +import { Bench } from 'tinybench' + +const RESULT_PRECISION = 2 + +const cid = CID.parse('QmeimKZyjcBnuXmAD9zMnSjM9JodTbgGT3gutofkTqz9rE') + +async function main (): Promise { + const suite = new Bench() + suite.add('base8', () => { + base8.encode(cid.bytes) + }) + suite.add('base10', () => { + base10.encode(cid.bytes) + }) + suite.add('base16upper', () => { + base16upper.encode(cid.bytes) + }) + suite.add('base32', () => { + base32.encode(cid.bytes) + }) + suite.add('base32upper', () => { + base32upper.encode(cid.bytes) + }) + suite.add('base32hexupper', () => { + base32hexupper.encode(cid.bytes) + }) + suite.add('base32z', () => { + base32z.encode(cid.bytes) + }) + suite.add('base36', () => { + base36.encode(cid.bytes) + }) + suite.add('base36upper', () => { + base36upper.encode(cid.bytes) + }) + suite.add('base256emoji', () => { + base256emoji.encode(cid.bytes) + }) + + await suite.run() + + console.table(suite.tasks.sort((a, b) => { // eslint-disable-line no-console + const resultA = a.result?.hz ?? 0 + const resultB = b.result?.hz ?? 0 + + if (resultA === resultB) { + return 0 + } + + if (resultA < resultB) { + return 1 + } + + return -1 + }).map(({ name, result }) => ({ + Implementation: name, + 'ops/s': parseFloat(result?.hz.toFixed(RESULT_PRECISION) ?? '0'), + 'ms/op': parseFloat(result?.period.toFixed(RESULT_PRECISION) ?? '0'), + runs: result?.samples.length + }))) +} + +main().catch(err => { + console.error(err) // eslint-disable-line no-console + process.exit(1) +}) diff --git a/packages/blockstore-opfs/benchmarks/encoding/tsconfig.json b/packages/blockstore-opfs/benchmarks/encoding/tsconfig.json new file mode 100644 index 00000000..fee64009 --- /dev/null +++ b/packages/blockstore-opfs/benchmarks/encoding/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist", + "target": "ES2022", + "module": "ES2022", + "lib": ["ES2022", "DOM", "DOM.Iterable"] + }, + "include": [ + "src", + "test" + ] +} diff --git a/packages/blockstore-opfs/package.json b/packages/blockstore-opfs/package.json new file mode 100644 index 00000000..49b300de --- /dev/null +++ b/packages/blockstore-opfs/package.json @@ -0,0 +1,175 @@ +{ + "name": "blockstore-opfs", + "version": "1.1.8", + "description": "Blockstore implementation with file system backend", + "license": "Apache-2.0 OR MIT", + "homepage": "https://github.com/ipfs/js-stores/tree/master/packages/blockstore-fs#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/ipfs/js-stores.git" + }, + "bugs": { + "url": "https://github.com/ipfs/js-stores/issues" + }, + "keywords": [ + "datastore", + "fs", + "interface", + "ipfs", + "key-value" + ], + "type": "module", + "types": "./dist/src/index.d.ts", + "typesVersions": { + "*": { + "*": [ + "*", + "dist/*", + "dist/src/*", + "dist/src/*/index" + ], + "src/*": [ + "*", + "dist/*", + "dist/src/*", + "dist/src/*/index" + ] + } + }, + "files": [ + "src", + "dist", + "!dist/test", + "!**/*.tsbuildinfo" + ], + "exports": { + ".": { + "types": "./dist/src/index.d.ts", + "import": "./dist/src/index.js" + }, + "./sharding": { + "types": "./dist/src/sharding.d.ts", + "import": "./dist/src/sharding.js" + } + }, + "eslintConfig": { + "extends": "ipfs", + "parserOptions": { + "project": [ + "tsconfig.json", + "benchmarks/encoding/tsconfig.json" + ], + "sourceType": "module", + "lib": ["DOM"] + } + }, + "release": { + "branches": [ + "main" + ], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "conventionalcommits", + "releaseRules": [ + { + "breaking": true, + "release": "major" + }, + { + "revert": true, + "release": "patch" + }, + { + "type": "feat", + "release": "minor" + }, + { + "type": "fix", + "release": "patch" + }, + { + "type": "docs", + "release": "patch" + }, + { + "type": "test", + "release": "patch" + }, + { + "type": "deps", + "release": "patch" + }, + { + "scope": "no-release", + "release": false + } + ] + } + ], + [ + "@semantic-release/release-notes-generator", + { + "preset": "conventionalcommits", + "presetConfig": { + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "chore", + "section": "Trivial Changes" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "deps", + "section": "Dependencies" + }, + { + "type": "test", + "section": "Tests" + } + ] + } + } + ], + "@semantic-release/changelog", + "@semantic-release/npm", + "@semantic-release/github", + "@semantic-release/git" + ] + }, + "scripts": { + "clean": "aegir clean", + "lint": "aegir lint", + "build": "aegir build --bundle false", + "release": "aegir release", + "test": "aegir test -t browser", + "test:chrome": "aegir test -t browser", + "test:chrome-webworker": "aegir test -t webworker", + "test:firefox": "aegir test -t browser -- --browser firefox", + "test:firefox-webworker": "aegir test -t webworker -- --browser firefox", + "dep-check": "aegir dep-check" + }, + "dependencies": { + "blockstore-core": "^4.0.0", + "interface-blockstore": "^5.0.0", + "interface-store": "^5.0.0", + "it-map": "^3.0.1", + "it-parallel-batch": "^3.0.0", + "multiformats": "^12.0.1" + }, + "devDependencies": { + "aegir": "^41.1.9", + "interface-blockstore-tests": "^6.0.0" + } +} diff --git a/packages/blockstore-opfs/src/index.ts b/packages/blockstore-opfs/src/index.ts new file mode 100644 index 00000000..c63cd4a9 --- /dev/null +++ b/packages/blockstore-opfs/src/index.ts @@ -0,0 +1,296 @@ +/** + * @packageDocumentation + * + * A Blockstore implementation that stores blocks in Origin Private Filesystem. + * + * @example + * + * ```js + * import { OpfsBlockstore } from 'blockstore-opfs' + * + * const store = new OpfsBlockstore('store-name') + * ``` + */ + +import { + Errors +} from 'blockstore-core' +import map from 'it-map' +import parallelBatch from 'it-parallel-batch' +import { FlatDirectory, NextToLast, type ShardingStrategy } from './sharding.js' +import type { Blockstore, Pair } from 'interface-blockstore' +import type { AwaitIterable } from 'interface-store' +import type { CID } from 'multiformats/cid' + +export interface OpfsBlockstoreInit { + /** + * If true and the passed blockstore location does not exist, create + * it on startup. default: true + */ + createIfMissing?: boolean + + /** + * If true and the passed blockstore location exists on startup, throw + * an error. default: false + */ + errorIfExists?: boolean + + /** + * The file extension to use when storing blocks. default: '.data' + */ + extension?: string + + /** + * How many blocks to put in parallel when `.putMany` is called. + * default: 50 + */ + putManyConcurrency?: number + + /** + * How many blocks to read in parallel when `.getMany` is called. + * default: 50 + */ + getManyConcurrency?: number + + /** + * How many blocks to delete in parallel when `.deleteMany` is called. + * default: 50 + */ + deleteManyConcurrency?: number + + /** + * Control how CIDs map to paths and back + */ + shardingStrategy?: ShardingStrategy +} + +/** + * A blockstore backed by the Origin Private Filesystem + */ +export class OpfsBlockstore implements Blockstore { + public name: string + private directory: FileSystemDirectoryHandle | null + private readonly createIfMissing: boolean + private readonly errorIfExists: boolean + private readonly putManyConcurrency: number + private readonly getManyConcurrency: number + private readonly deleteManyConcurrency: number + private readonly shardingStrategy: ShardingStrategy + + constructor (name: string, init: OpfsBlockstoreInit = {}) { + this.name = name + this.directory = null + this.createIfMissing = init.createIfMissing ?? true + this.errorIfExists = init.errorIfExists ?? false + this.deleteManyConcurrency = init.deleteManyConcurrency ?? 50 + this.getManyConcurrency = init.getManyConcurrency ?? 50 + this.putManyConcurrency = init.putManyConcurrency ?? 50 + this.shardingStrategy = init.shardingStrategy ?? new NextToLast() + } + + async open (): Promise { + let opfsRootDir: FileSystemDirectoryHandle + try { + opfsRootDir = await window.navigator.storage.getDirectory() + } catch (err: unknown) { + throw Errors.openFailedError(new Error('Failed to get root directory of bucket file system. OPFS may not be supported by environment.')) + } + + try { + const directory = await opfsRootDir.getDirectoryHandle(this.name) + + if (this.errorIfExists) { + throw Errors.openFailedError(new Error(`Blockstore name: ${this.name} already exists`)) + } + + this.directory = directory + } catch (err: unknown) { + if (err instanceof DOMException) { + if (err.name === 'NotFoundError') { + if (!this.createIfMissing) { + throw Errors.openFailedError(new Error(`Blockstore name: ${this.name} does not exist`)) + } else { + this.directory = await opfsRootDir.getDirectoryHandle(this.name, { create: true }) + return + } + } + + if (err.name === 'TypeMismatchError') { + throw Errors.openFailedError(new Error(`Blockstore name: ${this.name} exists but is not a directory`)) + } + } + + throw err + } + } + + async close (): Promise { + this.directory = null + } + + /** + * Does not support path strings atm ('/path/to/dir') + * Only supports directory names ('name-of-dir') + */ + async #getParentDirectory (dir?: string): Promise { + let directory = this.directory + + if (directory === null) { + throw new Error('Blockstore is not open.') + } + + if (typeof dir === 'string' && dir !== '') { + try { + directory = await directory.getDirectoryHandle(dir, { create: true }) + } catch (err: any) { + if (err.name === 'TypeMismatchError') { + throw Errors.openFailedError(new Error(`Blockstore directory: ${dir} exists but is not a directory`)) + } + + throw err + } + } + + return directory + } + + async put (key: CID, val: Uint8Array): Promise { + const { dir, file: name } = this.shardingStrategy.encode(key) + + try { + const directory = await this.#getParentDirectory(dir) + const file = await directory.getFileHandle(name, { create: true }) + const writeable = await file.createWritable() + await writeable.write(val) + await writeable.close() + } catch (err: any) { + throw Errors.putFailedError(err) + } + + return key + } + + async * putMany (source: AwaitIterable): AsyncIterable { + yield * parallelBatch( + map(source, ({ cid, block }: Pair) => { + return async () => { + await this.put(cid, block) + + return cid + } + }), + this.putManyConcurrency + ) + } + + async get (key: CID): Promise { + const { dir, file: name } = this.shardingStrategy.encode(key) + + let directory: FileSystemDirectoryHandle + try { + directory = await this.#getParentDirectory(dir) + } catch (err: any) { + throw Errors.getFailedError(err) + } + + try { + const fileHandle = await directory.getFileHandle(name) + const file = await fileHandle.getFile() + return new Uint8Array(await file.arrayBuffer()) + } catch (err: any) { + throw Errors.notFoundError(err) + } + } + + async * getMany (source: AwaitIterable): AsyncIterable { + yield * parallelBatch( + map(source, (key: CID) => { + return async () => { + return { + cid: key, + block: await this.get(key) + } + } + }), + this.getManyConcurrency + ) + } + + async delete (key: CID): Promise { + const { dir, file: name } = this.shardingStrategy.encode(key) + + let directory + try { + directory = await this.#getParentDirectory(dir) + + await directory.getFileHandle(name) + + // succeeds regardless if name exists or not, only fails if name is a non-empty directoy + await directory.removeEntry(name) + } catch (err: any) { + if (err instanceof DOMException && err.name === 'NotFoundError') { + return + } + + throw Errors.deleteFailedError(err) + } + } + + async * deleteMany (source: AwaitIterable): AsyncIterable { + yield * parallelBatch( + map(source, (key: CID) => { + return async () => { + await this.delete(key) + + return key + } + }), + this.deleteManyConcurrency + ) + } + + /** + * Check for the existence of the given key + */ + async has (key: CID): Promise { + const { dir, file: name } = this.shardingStrategy.encode(key) + + try { + const directory = await this.#getParentDirectory(dir) + return Boolean(await directory.getFileHandle(name)) + } catch (err: any) { + if (err instanceof DOMException && err.name === 'NotFoundError') { + return false + } + + throw Errors.hasFailedError(err) + } + } + + async * getAll (): AsyncIterable { + const directory = await this.#getParentDirectory() + + let directories: AwaitIterable<[string, FileSystemHandle]> + if (this.shardingStrategy instanceof NextToLast) { + directories = directory.entries() + } else if (this.shardingStrategy instanceof FlatDirectory) { + directories = [[this.name, directory]] + } else { + throw new Error('unsupported sharding strategy') + } + + for await (const [, dirHandle] of directories) { + if (dirHandle instanceof FileSystemDirectoryHandle && dirHandle[Symbol.asyncIterator] !== null) { + for await (const [name, fileHandle] of dirHandle) { + if (fileHandle instanceof FileSystemFileHandle && name.endsWith(this.shardingStrategy.extension)) { + const file = await fileHandle.getFile() + yield { + cid: this.shardingStrategy.decode(name), + block: new Uint8Array(await file.arrayBuffer()) + } + } + } + } + } + } +} diff --git a/packages/blockstore-opfs/src/sharding.ts b/packages/blockstore-opfs/src/sharding.ts new file mode 100644 index 00000000..8164c6fe --- /dev/null +++ b/packages/blockstore-opfs/src/sharding.ts @@ -0,0 +1,119 @@ +import { base32upper } from 'multiformats/bases/base32' +import { CID } from 'multiformats/cid' +import type { MultibaseCodec } from 'multiformats/bases/interface' + +export interface ShardingStrategy { + extension: string + encode(cid: CID): { dir: string, file: string } + decode(path: string): CID +} + +export interface NextToLastInit { + /** + * The file extension to use. default: '.data' + */ + extension?: string + + /** + * How many characters to take from the end of the CID. default: 2 + */ + prefixLength?: number + + /** + * The multibase codec to use - nb. should be case insensitive. + * default: base32upper + */ + base?: MultibaseCodec +} + +/** + * A sharding strategy that takes the last few characters of a multibase encoded + * CID and uses them as the directory to store the block in. This prevents + * storing all blocks in a single directory which would overwhelm most + * filesystems. + */ +export class NextToLast implements ShardingStrategy { + public extension: string + private readonly prefixLength: number + private readonly base: MultibaseCodec + + constructor (init: NextToLastInit = {}) { + this.extension = init.extension ?? '.data' + this.prefixLength = init.prefixLength ?? 2 + this.base = init.base ?? base32upper + } + + encode (cid: CID): { dir: string, file: string } { + const str = this.base.encoder.encode(cid.multihash.bytes) + const prefix = str.substring(str.length - this.prefixLength) + + return { + dir: prefix, + file: `${str}${this.extension}` + } + } + + decode (str: string): CID { + const names = str.split('/') + + let fileName = names[names.length - 1] + + if (fileName.endsWith(this.extension)) { + fileName = fileName.substring(0, fileName.length - this.extension.length) + } + + return CID.decode(this.base.decoder.decode(fileName)) + } +} + +export interface FlatDirectoryInit { + /** + * The file extension to use. default: '.data' + */ + extension?: string + + /** + * How many characters to take from the end of the CID. default: 2 + */ + prefixLength?: number + + /** + * The multibase codec to use - nb. should be case insensitive. + * default: base32padupper + */ + base?: MultibaseCodec +} + +/** + * A sharding strategy that does not do any sharding and stores all files + * in one directory. Only for testing, do not use in production. + */ +export class FlatDirectory implements ShardingStrategy { + public extension: string + private readonly base: MultibaseCodec + + constructor (init: NextToLastInit = {}) { + this.extension = init.extension ?? '.data' + this.base = init.base ?? base32upper + } + + encode (cid: CID): { dir: string, file: string } { + const str = this.base.encoder.encode(cid.multihash.bytes) + + return { + dir: '', + file: `${str}${this.extension}` + } + } + + decode (str: string): CID { + const names = str.split('/') + let fileName = names[names.length - 1] + + if (fileName.endsWith(this.extension)) { + fileName = fileName.substring(0, fileName.length - this.extension.length) + } + + return CID.decode(this.base.decoder.decode(fileName)) + } +} diff --git a/packages/blockstore-opfs/test/index.spec.ts b/packages/blockstore-opfs/test/index.spec.ts new file mode 100644 index 00000000..ab0e546c --- /dev/null +++ b/packages/blockstore-opfs/test/index.spec.ts @@ -0,0 +1,149 @@ +/* eslint-env mocha */ +import { expect } from 'aegir/chai' +import { interfaceBlockstoreTests } from 'interface-blockstore-tests' +import { base32 } from 'multiformats/bases/base32' +import { CID } from 'multiformats/cid' +import { OpfsBlockstore } from '../src/index.js' +import { FlatDirectory, NextToLast } from '../src/sharding.js' + +const opfs = await window.navigator.storage.getDirectory() +const utf8Encoder = new TextEncoder() + +describe('OpfsBlockstore', () => { + describe('construction', () => { + it('defaults - folder missing', async () => { + const dir = `test-${Math.random()}` + await expect( + (async () => { + const fs = new OpfsBlockstore(dir) + await fs.open() + await fs.close() + })() + ).to.eventually.be.undefined() + }) + + it('defaults - folder exists', async () => { + const dir = `test-${Math.random()}` + await opfs.getDirectoryHandle(dir, { create: true }) + await expect( + (async () => { + const fs = new OpfsBlockstore(dir) + await fs.open() + await fs.close() + })() + ).to.eventually.be.undefined() + }) + }) + + describe('open', () => { + it('createIfMissing: false - folder missing', async () => { + const dir = `test-${Math.random()}` + const store = new OpfsBlockstore(dir, { createIfMissing: false }) + await expect(store.open()).to.eventually.be.rejected + .with.property('code', 'ERR_OPEN_FAILED') + }) + + it('errorIfExists: true - folder exists', async () => { + const dir = `test-${Math.random()}` + await opfs.getDirectoryHandle(dir, { create: true }) + const store = new OpfsBlockstore(dir, { errorIfExists: true }) + await expect(store.open()).to.eventually.be.rejected + .with.property('code', 'ERR_OPEN_FAILED') + }) + }) + + it('deleting files', async () => { + const dir = `test-${Math.random()}` + const fs = new OpfsBlockstore(dir) + await fs.open() + + const key = CID.parse('QmeimKZyjcBnuXmAD9zMnSjM9JodTbgGT3gutofkTqz9rE') + await fs.put(key, Uint8Array.from([0, 1, 2, 3])) + await fs.delete(key) + + await expect(fs.get(key)).to.eventually.be.rejected + .with.property('code', 'ERR_NOT_FOUND') + }) + + it('deleting non-existent files', async () => { + const dir = `test-${Math.random()}` + const fs = new OpfsBlockstore(dir) + await fs.open() + + const key = CID.parse('QmeimKZyjcBnuXmAD9zMnSjM9JodTbgGT3gutofkTqz9rE') + + await fs.delete(key) + + await expect(fs.get(key)).to.eventually.be.rejected + .with.property('code', 'ERR_NOT_FOUND') + }) + + describe('interface-blockstore (flat directory)', () => { + interfaceBlockstoreTests({ + setup: async () => { + const store = new OpfsBlockstore(`test-${Math.random()}`, { + shardingStrategy: new FlatDirectory() + }) + await store.open() + + return store + }, + teardown: async (store) => { + await store.close() + await opfs.removeEntry(store.name, { recursive: true }) + } + }) + }) + + describe('interface-blockstore (default sharding)', () => { + interfaceBlockstoreTests({ + setup: async () => { + const store = new OpfsBlockstore(`test-${Math.random()}`) + await store.open() + + return store + }, + teardown: async (store) => { + await store.close() + await opfs.removeEntry(store.name, { recursive: true }) + } + }) + }) + + describe('interface-blockstore (custom encoding)', () => { + interfaceBlockstoreTests({ + setup: async () => { + const store = new OpfsBlockstore(`test-${Math.random()}`, { + shardingStrategy: new NextToLast({ + base: base32 + }) + }) + + await store.open() + + return store + }, + teardown: async (store) => { + await store.close() + await opfs.removeEntry(store.name, { recursive: true }) + } + }) + }) + + it('can survive concurrent writes', async () => { + const dir = `test-${Math.random()}` + const fs = new OpfsBlockstore(dir) + await fs.open() + + const key = CID.parse('QmeimKZyjcBnuXmAD9zMnSjM9JodTbgGT3gutofkTqz9rE') + const value = utf8Encoder.encode('Hello world') + + await Promise.all( + new Array(100).fill(0).map(async () => { await fs.put(key, value) }) + ) + + const res = await fs.get(key) + + expect(res).to.deep.equal(value) + }) +}) diff --git a/packages/blockstore-opfs/test/sharding.spec.ts b/packages/blockstore-opfs/test/sharding.spec.ts new file mode 100644 index 00000000..3c896570 --- /dev/null +++ b/packages/blockstore-opfs/test/sharding.spec.ts @@ -0,0 +1,109 @@ +/* eslint-env mocha */ +import { expect } from 'aegir/chai' +import { base32upper } from 'multiformats/bases/base32' +import { CID } from 'multiformats/cid' +import { FlatDirectory, NextToLast } from '../src/sharding.js' + +describe('flat', () => { + it('should encode', () => { + const cid = CID.parse('QmeimKZyjcBnuXmAD9zMnSjM9JodTbgGT3gutofkTqz9rE') + const strategy = new FlatDirectory() + const { dir, file } = strategy.encode(cid) + + expect(dir).to.equal('') + expect(file).to.equal(`${base32upper.encode(cid.multihash.bytes)}.data`) + }) + + it('should encode with extension', () => { + const cid = CID.parse('QmeimKZyjcBnuXmAD9zMnSjM9JodTbgGT3gutofkTqz9rE') + const strategy = new FlatDirectory({ + extension: '.file' + }) + const { dir, file } = strategy.encode(cid) + + expect(dir).to.equal('') + expect(file).to.equal(`${base32upper.encode(cid.multihash.bytes)}.file`) + }) + + it('should decode', () => { + const mh = 'BCIQPGZJ6QLZOFG3OP45NLMSJUWGJCO72QQKHLDTB6FXIB6BDSLRQYLY' + const strategy = new FlatDirectory() + const cid = strategy.decode(`${mh}.data`) + + expect(cid).to.eql(CID.decode(base32upper.decode(mh))) + }) + + it('should decode with extension', () => { + const mh = 'BCIQPGZJ6QLZOFG3OP45NLMSJUWGJCO72QQKHLDTB6FXIB6BDSLRQYLY' + const strategy = new FlatDirectory({ + extension: '.file' + }) + const cid = strategy.decode(`${mh}.file`) + + expect(cid).to.eql(CID.decode(base32upper.decode(mh))) + }) +}) + +describe('next to last', () => { + it('should encode', () => { + const mh = 'BCIQPGZJ6QLZOFG3OP45NLMSJUWGJCO72QQKHLDTB6FXIB6BDSLRQYLY' + const cid = CID.decode(base32upper.decode(mh)) + const strategy = new NextToLast() + const { dir, file } = strategy.encode(cid) + + expect(dir).to.equal('LY') + expect(file).to.equal(`${mh}.data`) + }) + + it('should encode with prefix length', () => { + const mh = 'BCIQPGZJ6QLZOFG3OP45NLMSJUWGJCO72QQKHLDTB6FXIB6BDSLRQYLY' + const cid = CID.decode(base32upper.decode(mh)) + const strategy = new NextToLast({ + prefixLength: 4 + }) + const { dir, file } = strategy.encode(cid) + + expect(dir).to.equal('QYLY') + expect(file).to.equal(`${mh}.data`) + }) + + it('should encode with extension', () => { + const mh = 'BCIQPGZJ6QLZOFG3OP45NLMSJUWGJCO72QQKHLDTB6FXIB6BDSLRQYLY' + const cid = CID.decode(base32upper.decode(mh)) + const strategy = new NextToLast({ + extension: '.file' + }) + const { dir, file } = strategy.encode(cid) + + expect(dir).to.equal('LY') + expect(file).to.equal(`${mh}.file`) + }) + + it('should decode', () => { + const mh = 'BCIQPGZJ6QLZOFG3OP45NLMSJUWGJCO72QQKHLDTB6FXIB6BDSLRQYLY' + const strategy = new NextToLast() + const cid = strategy.decode(`LY/${mh}.data`) + + expect(cid).to.eql(CID.decode(base32upper.decode(mh))) + }) + + it('should decode with prefix length', () => { + const mh = 'BCIQPGZJ6QLZOFG3OP45NLMSJUWGJCO72QQKHLDTB6FXIB6BDSLRQYLY' + const strategy = new NextToLast({ + prefixLength: 4 + }) + const cid = strategy.decode(`QYLY/${mh}.data`) + + expect(cid).to.eql(CID.decode(base32upper.decode(mh))) + }) + + it('should decode with extension', () => { + const mh = 'BCIQPGZJ6QLZOFG3OP45NLMSJUWGJCO72QQKHLDTB6FXIB6BDSLRQYLY' + const strategy = new NextToLast({ + extension: '.file' + }) + const cid = strategy.decode(`LY/${mh}.file`) + + expect(cid).to.eql(CID.decode(base32upper.decode(mh))) + }) +}) diff --git a/packages/blockstore-opfs/tsconfig.json b/packages/blockstore-opfs/tsconfig.json new file mode 100644 index 00000000..d3a94e19 --- /dev/null +++ b/packages/blockstore-opfs/tsconfig.json @@ -0,0 +1,25 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist", + "lib": ["DOM", "DOM.AsyncIterable"] + }, + "include": [ + "src", + "test" + ], + "references": [ + { + "path": "../blockstore-core" + }, + { + "path": "../interface-blockstore" + }, + { + "path": "../interface-blockstore-tests" + }, + { + "path": "../interface-store" + } + ] +} diff --git a/packages/blockstore-opfs/typedoc.json b/packages/blockstore-opfs/typedoc.json new file mode 100644 index 00000000..332ef6a2 --- /dev/null +++ b/packages/blockstore-opfs/typedoc.json @@ -0,0 +1,6 @@ +{ + "entryPoints": [ + "./src/index.ts", + "./src/sharding.ts" + ] +}