diff --git a/README.md b/README.md index 2d571ee..4c1ec77 100644 --- a/README.md +++ b/README.md @@ -77,7 +77,7 @@ dagPB.util #### Create a DAGNode ```JavaScript -const node1 = new DAGNode(Buffer.from('some data')) +const node1 = new DAGNode(new TextEncoder('utf8').encode('some data')) // node2 will have the same data as node1 const node2 = new DAGNode('some data') @@ -114,7 +114,7 @@ const DAGNode = dagPB.DAGNode #### DAGNode constructor -- `data` - type: Buffer +- `data` - type: Uint8Array or String - `links`- (optional) type: Array of DAGLink instances or Array of DAGLink instances in its json format (link.toJSON) - `serializedSize`- (optional) type: Number of bytes the serialized node has. If none is given, it will automatically be calculated. @@ -198,7 +198,7 @@ node.rmLink('Link1') #### `node.serialize()` -Serialize the DAGNode instance to its portable binary format. Yields the same result as `dagPB.util.serialize(node)`. Returns a `Buffer`. +Serialize the DAGNode instance to its portable binary format. Yields the same result as `dagPB.util.serialize(node)`. Returns a `Uint8Array`. ### DAGLink functions @@ -238,7 +238,6 @@ const link = new DAGLink( > See: https://github.com/ipld/interface-ipld-format#local-resolver-methods - #### `dagPB.resolver.resolve` #### `dagPB.resolver.tree` @@ -251,7 +250,7 @@ const link = new DAGLink( ### `dagPB.util.serialize` -Serialize the DAGNode instance to its portable binary format. Yields the same result as `node.serialize()`. Returns a `Buffer`. +Serialize the DAGNode instance to its portable binary format. Yields the same result as `node.serialize()`. Returns a `Uint8Array`. ### `dagPB.util.deserialize` diff --git a/package.json b/package.json index 1fc55f3..5f3a3c8 100644 --- a/package.json +++ b/package.json @@ -65,22 +65,19 @@ "npm": ">=3.0.0" }, "dependencies": { - "buffer": "^5.6.0", - "cids": "~0.8.3", + "cids": "^1.0.0", "class-is": "^1.1.0", - "multicodec": "^1.0.3", - "multihashing-async": "^1.0.0", - "protons": "^1.2.1", - "stable": "^0.1.8" + "multicodec": "^2.0.0", + "multihashing-async": "^2.0.0", + "protons": "^2.0.0", + "reset": "^0.1.0", + "run": "^1.4.0", + "stable": "^0.1.8", + "uint8arrays": "^1.0.0" }, "devDependencies": { - "aegir": "^23.0.0", - "fs-extra": "^9.0.1", - "ipfs-block-service": "~0.17.1", - "ipfs-repo": "^4.0.0", - "ipfs-utils": "^2.3.1", - "ipld-block": "~0.9.2", - "multibase": "^1.0.1", - "multihashes": "^1.0.1" + "aegir": "^25.0.0", + "multibase": "^3.0.0", + "multihashes": "^3.0.0" } } diff --git a/src/dag-link/dagLink.js b/src/dag-link/dagLink.js index b661845..6028c57 100644 --- a/src/dag-link/dagLink.js +++ b/src/dag-link/dagLink.js @@ -2,7 +2,7 @@ const CID = require('cids') const withIs = require('class-is') -const { Buffer } = require('buffer') +const uint8ArrayFromString = require('uint8arrays/from-string') // Link represents an IPFS Merkle DAG Link between Nodes. class DAGLink { @@ -39,15 +39,15 @@ class DAGLink { return Object.assign({}, this._json) } - // Memoize the Buffer representation of name + // Memoize the Uint8Array representation of name // We need this to sort the links, otherwise - // we will reallocate new buffers every time + // we will reallocate new Uint8Arrays every time get nameAsBuffer () { if (this._nameBuf !== null) { return this._nameBuf } - this._nameBuf = Buffer.from(this.Name) + this._nameBuf = uint8ArrayFromString(this.Name) return this._nameBuf } } diff --git a/src/dag-node/dagNode.js b/src/dag-node/dagNode.js index 878fb2b..ae3fc10 100644 --- a/src/dag-node/dagNode.js +++ b/src/dag-node/dagNode.js @@ -1,24 +1,26 @@ 'use strict' const withIs = require('class-is') -const { Buffer } = require('buffer') const sortLinks = require('./sortLinks') const DAGLink = require('../dag-link/dagLink') const { serializeDAGNode } = require('../serialize.js') const toDAGLink = require('./toDagLink') const addLink = require('./addLink') const rmLink = require('./rmLink') +const uint8ArrayFromString = require('uint8arrays/from-string') +const uint8ArrayToString = require('uint8arrays/to-string') class DAGNode { constructor (data, links = [], serializedSize = null) { if (!data) { - data = Buffer.alloc(0) + data = new Uint8Array(0) } if (typeof data === 'string') { - data = Buffer.from(data) + data = uint8ArrayFromString(data) } - if (!Buffer.isBuffer(data)) { - throw new Error('Passed \'data\' is not a buffer or a string!') + + if (!(data instanceof Uint8Array)) { + throw new Error('Passed \'data\' is not a Uint8Array or a String!') } if (serializedSize !== null && typeof serializedSize !== 'number') { @@ -53,7 +55,7 @@ class DAGNode { } toString () { - return `DAGNode ` + return `DAGNode ` } _invalidateCached () { diff --git a/src/dag-node/rmLink.js b/src/dag-node/rmLink.js index 7822a2f..4e9fe92 100644 --- a/src/dag-node/rmLink.js +++ b/src/dag-node/rmLink.js @@ -1,7 +1,7 @@ 'use strict' const CID = require('cids') -const { Buffer } = require('buffer') +const uint8ArrayEquals = require('uint8arrays/equals') const rmLink = (dagNode, nameOrCid) => { let predicate = null @@ -9,8 +9,8 @@ const rmLink = (dagNode, nameOrCid) => { // It's a name if (typeof nameOrCid === 'string') { predicate = (link) => link.Name === nameOrCid - } else if (Buffer.isBuffer(nameOrCid) || CID.isCID(nameOrCid)) { - predicate = (link) => link.Hash.equals(nameOrCid) + } else if (nameOrCid instanceof Uint8Array || CID.isCID(nameOrCid)) { + predicate = (link) => uint8ArrayEquals(link.Hash, nameOrCid) } if (predicate) { diff --git a/src/dag-node/sortLinks.js b/src/dag-node/sortLinks.js index 6f39161..8b015d5 100644 --- a/src/dag-node/sortLinks.js +++ b/src/dag-node/sortLinks.js @@ -1,10 +1,13 @@ 'use strict' -const { Buffer } = require('buffer') const sort = require('stable') +const uint8ArrayCompare = require('uint8arrays/compare') const linkSort = (a, b) => { - return Buffer.compare(a.nameAsBuffer, b.nameAsBuffer) + const buf1 = a.nameAsBuffer + const buf2 = b.nameAsBuffer + + return uint8ArrayCompare(buf1, buf2) } /** diff --git a/src/resolver.js b/src/resolver.js index d5ebed3..a65c568 100644 --- a/src/resolver.js +++ b/src/resolver.js @@ -10,7 +10,7 @@ const util = require('./util') * Returns the value or a link and the partial mising path. This way the * IPLD Resolver can fetch the link and continue to resolve. * - * @param {Buffer} binaryBlob - Binary representation of a PB block + * @param {Uint8Array} binaryBlob - Binary representation of a PB block * @param {string} [path='/'] - Path that should be resolved * @returns {Object} result - Result of the path it it was resolved successfully * @returns {*} result.value - Value the path resolves to @@ -58,7 +58,7 @@ exports.resolve = (binaryBlob, path) => { * Return all available paths of a block. * * @generator - * @param {Buffer} binaryBlob - Binary representation of a PB block + * @param {Uint8Array} binaryBlob - Binary representation of a PB block * @yields {string} - A single path */ exports.tree = function * (binaryBlob) { diff --git a/src/serialize.js b/src/serialize.js index fb4aec1..f651dac 100644 --- a/src/serialize.js +++ b/src/serialize.js @@ -13,14 +13,14 @@ const toProtoBuf = (node) => { pbn.Data = node.Data } else { // NOTE: this has to be null in order to match go-ipfs serialization - // `null !== new Buffer(0)` + // `null !== new Uint8Array(0)` pbn.Data = null } if (node.Links && node.Links.length > 0) { pbn.Links = node.Links .map((link) => ({ - Hash: link.Hash.buffer, + Hash: link.Hash.bytes, Name: link.Name, Tsize: link.Tsize })) @@ -35,7 +35,7 @@ const toProtoBuf = (node) => { * Serialize internal representation into a binary PB block. * * @param {Object} node - Internal representation of a PB block - * @returns {Buffer} - The encoded binary representation + * @returns {Uint8Array} - The encoded binary representation */ const serializeDAGNode = (node) => { const data = node.Data diff --git a/src/util.js b/src/util.js index 76e4267..53fcc9d 100644 --- a/src/util.js +++ b/src/util.js @@ -1,6 +1,5 @@ 'use strict' -const { Buffer } = require('buffer') const protons = require('protons') const proto = protons(require('./dag.proto')) const DAGLink = require('./dag-link/dagLink') @@ -30,7 +29,7 @@ const cid = (binaryBlob, userOptions) => { * Serialize internal representation into a binary PB block. * * @param {Object} node - Internal representation of a CBOR block - * @returns {Buffer} - The encoded binary representation + * @returns {Uint8Array} - The encoded binary representation */ const serialize = (node) => { if (DAGNode.isDAGNode(node)) { @@ -43,7 +42,7 @@ const serialize = (node) => { /** * Deserialize PB block into the internal representation. * - * @param {Buffer} buffer - Binary representation of a PB block + * @param {Uint8Array} buffer - Binary representation of a PB block * @returns {Object} - An object that conforms to the IPLD Data Model */ const deserialize = (buffer) => { @@ -53,9 +52,9 @@ const deserialize = (buffer) => { return new DAGLink(link.Name, link.Tsize, link.Hash) }) - const data = pbn.Data == null ? Buffer.alloc(0) : pbn.Data + const data = pbn.Data == null ? new Uint8Array(0) : pbn.Data - return new DAGNode(data, links, buffer.length) + return new DAGNode(data, links, buffer.byteLength) } exports.serialize = serialize diff --git a/test/browser.js b/test/browser.js deleted file mode 100644 index e8e8ef8..0000000 --- a/test/browser.js +++ /dev/null @@ -1,30 +0,0 @@ -/* eslint-env mocha */ -/* global self */ - -'use strict' - -const IPFSRepo = require('ipfs-repo') - -const basePath = 'ipfs' + Math.random() - -const idb = self.indexedDB -idb.deleteDatabase(basePath) -idb.deleteDatabase(basePath + '/blocks') - -describe('Browser', () => { - const repo = new IPFSRepo(basePath) - - before(async () => { - await repo.init({}) - await repo.open() - }) - - after(async () => { - await repo.close() - idb.deleteDatabase(basePath) - idb.deleteDatabase(basePath + '/blocks') - }) - - require('./dag-node-test')(repo) - require('./dag-link-test')(repo) -}) diff --git a/test/dag-link-test.js b/test/dag-link-test.js deleted file mode 100644 index 232fec3..0000000 --- a/test/dag-link-test.js +++ /dev/null @@ -1,67 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('aegir/utils/chai') -const { Buffer } = require('buffer') -const expect = chai.expect -const CID = require('cids') -const DAGLink = require('../src').DAGLink - -module.exports = (repo) => { - describe('DAGLink', () => { - describe('create with multihash as b58 encoded string', () => { - it('string', () => { - const link = new DAGLink('hello', 3, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') - - expect(link.Hash.buffer.toString('hex')) - .to.equal('12208ab7a6c5e74737878ac73863cb76739d15d4666de44e5756bf55a2f9e9ab5f43') - }) - - it('empty string', () => { - const link = new DAGLink('', 4, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') - expect(link.Name).to.be.eql('') - }) - - it('create with multihash as a multihash Buffer', () => { - const link = new DAGLink('hello', 3, Buffer.from('12208ab7a6c5e74737878ac73863cb76739d15d4666de44e5756bf55a2f9e9ab5f43', 'hex')) - - expect(new CID(link.Hash).toBaseEncodedString()) - .to.equal('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') - }) - - it('fail to create without multihash', () => { - expect(() => { - const link = new DAGLink('hello', 3) - expect(link).to.not.exist() - }).to.throw() - }) - }) - - it('toJSON', () => { - const link = new DAGLink('hello', 3, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') - - expect(link.toJSON()).to.eql({ - name: 'hello', - size: 3, - cid: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U' - }) - }) - - it('toString', () => { - const link = new DAGLink('hello', 3, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') - - expect(link.toString()).to.equal('DAGLink ') - }) - - it('exposes a CID', () => { - const cid = 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U' - const link = new DAGLink('hello', 3, cid) - expect(link.Hash.toBaseEncodedString()).to.equal(cid) - }) - - it('has an immutable CID', () => { - const link = new DAGLink('hello', 3, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') - expect(() => { link.Hash = 'foo' }).to.throw(/read.only/) - }) - }) -} diff --git a/test/dag-link-test.spec.js b/test/dag-link-test.spec.js new file mode 100644 index 0000000..d989b63 --- /dev/null +++ b/test/dag-link-test.spec.js @@ -0,0 +1,66 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('aegir/utils/chai') +const expect = chai.expect +const CID = require('cids') +const DAGLink = require('../src').DAGLink +const uint8ArrayFromString = require('uint8arrays/from-string') +const uint8ArrayToString = require('uint8arrays/to-string') + +describe('DAGLink', () => { + describe('create with multihash as b58 encoded string', () => { + it('string', () => { + const link = new DAGLink('hello', 3, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') + + expect(uint8ArrayToString(link.Hash.bytes, 'base16')) + .to.equal('12208ab7a6c5e74737878ac73863cb76739d15d4666de44e5756bf55a2f9e9ab5f43') + }) + + it('empty string', () => { + const link = new DAGLink('', 4, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') + expect(link.Name).to.be.eql('') + }) + + it('create with multihash as a multihash Buffer', () => { + const link = new DAGLink('hello', 3, uint8ArrayFromString('12208ab7a6c5e74737878ac73863cb76739d15d4666de44e5756bf55a2f9e9ab5f43', 'base16')) + + expect(new CID(link.Hash).toBaseEncodedString()) + .to.equal('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') + }) + + it('fail to create without multihash', () => { + expect(() => { + const link = new DAGLink('hello', 3) + expect(link).to.not.exist() + }).to.throw() + }) + }) + + it('toJSON', () => { + const link = new DAGLink('hello', 3, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') + + expect(link.toJSON()).to.eql({ + name: 'hello', + size: 3, + cid: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U' + }) + }) + + it('toString', () => { + const link = new DAGLink('hello', 3, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') + + expect(link.toString()).to.equal('DAGLink ') + }) + + it('exposes a CID', () => { + const cid = 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U' + const link = new DAGLink('hello', 3, cid) + expect(link.Hash.toBaseEncodedString()).to.equal(cid) + }) + + it('has an immutable CID', () => { + const link = new DAGLink('hello', 3, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') + expect(() => { link.Hash = 'foo' }).to.throw(/read.only/) + }) +}) diff --git a/test/dag-node-test.js b/test/dag-node-test.js deleted file mode 100644 index 686654d..0000000 --- a/test/dag-node-test.js +++ /dev/null @@ -1,579 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const chai = require('aegir/utils/chai') -const { Buffer } = require('buffer') -const expect = chai.expect - -const dagPB = require('../src') -const DAGLink = dagPB.DAGLink -const DAGNode = dagPB.DAGNode -const { isNode } = require('ipfs-utils/src/env') -const multihash = require('multihashes') -const multicodec = require('multicodec') -const multihashing = require('multihashing-async') - -const BlockService = require('ipfs-block-service') -const Block = require('ipld-block') -const CID = require('cids') -const multibase = require('multibase') -const loadFixture = require('aegir/fixtures') - -const testBlockNamedLinks = loadFixture('test/fixtures/test-block-named-links') -const testBlockUnnamedLinks = loadFixture('test/fixtures/test-block-unnamed-links') - -module.exports = (repo) => { - const bs = new BlockService(repo) - - describe('DAGNode', () => { - it('create a node', () => { - const data = Buffer.from('some data') - - const node = new DAGNode(data) - expect(node.Data.length).to.be.above(0) - expect(Buffer.isBuffer(node.Data)).to.be.true() - expect(node.size).to.be.above(0) - - const serialized = dagPB.util.serialize(node) - const deserialized = dagPB.util.deserialize(serialized) - expect(node.Data).to.eql(deserialized.Data) - }) - - it('dagPB.util.serialize same as node.serialize()', () => { - const node = new DAGNode(Buffer.from('some data')) - const serialized = dagPB.util.serialize(node) - expect(serialized).to.eql(node.serialize()) - }) - - it('create a node with string data', () => { - const data = 'some data' - - const node = new DAGNode(data) - expect(node.Data.length).to.be.above(0) - expect(Buffer.isBuffer(node.Data)).to.be.true() - expect(node.size).to.be.above(0) - - const serialized = dagPB.util.serialize(node) - - const deserialized = dagPB.util.deserialize(serialized) - expect(node.Data).to.eql(deserialized.Data) - }) - - it('create a node with links', () => { - const l1 = [{ - Name: 'some other link', - Hash: new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V'), - Tsize: 8 - }, { - Name: 'some link', - Hash: new CID('QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U'), - Tsize: 10 - }] - - const someData = Buffer.from('some data') - - const node1 = new DAGNode(someData, l1) - const l2 = l1.map((l) => { - return new DAGLink(l.Name, l.Tsize, l.Hash) - }) - - const node2 = new DAGNode(someData, l2) - expect(node2.Links).to.containSubset([l1[1], l1[0]]) - expect(node1.toJSON()).to.eql(node2.toJSON()) - - // check sorting - expect(node1.Links.map((l) => l.Name)).to.be.eql([ - 'some link', - 'some other link' - ]) - }) - - it('create a node with sorted links', () => { - const links = [{ - Name: '', - Hash: new CID('QmUGhP2X8xo9dsj45vqx1H6i5WqPqLqmLQsHTTxd3ke8mp'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmP7SrR76KHK9A916RbHG1ufy2TzNABZgiE23PjZDMzZXy'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmQg1v4o9xdT3Q14wh4S7dxZkDjyZ9ssFzFzyep1YrVJBY'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmdP6fartWRrydZCUjHgrJ4XpxSE4SAoRsWJZ1zJ4MWiuf'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmNNjUStxtMC1WaSZYiDW6CmAUrvd5Q2e17qnxPgVdwrwW'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmWJwqZBJWerHsN1b7g4pRDYmzGNnaMYuD3KSbnpaxsB2h'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmRXPSdysBS3dbUXe6w8oXevZWHdPQWaR2d3fggNsjvieL'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmTUZAXfws6zrhEksnMqLxsbhXZBQs4FNiarjXSYQqVrjC'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmNNk7dTdh8UofwgqLNauq6N78DPc6LKK2yBs1MFdx7Mbg'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmW5mrJfyqh7B4ywSvraZgnWjS3q9CLiYURiJpCX3aro5i'), - Tsize: 262158 - }, { - Name: '', - Hash: new CID('QmTFHZL5CkgNz19MdPnSuyLAi6AVq9fFp81zmPpaL2amED'), - Tsize: 262158 - }] - - const node = new DAGNode(Buffer.from('some data'), links) - const serialized = node.serialize() - const deserialized = dagPB.util.deserialize(serialized) - - // check sorting - expect(deserialized.Links.map((l) => l.Hash)).to.be.eql(links.map(l => l.Hash)) - }) - - it('create with empty link name', () => { - const node = new DAGNode(Buffer.from('hello'), [ - new DAGLink('', 10, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') - ]) - expect(node.Links[0].Name).to.be.eql('') - }) - - it('create with undefined link name', () => { - const node = new DAGNode(Buffer.from('hello'), [ - new DAGLink(undefined, 10, 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39U') - ]) - expect(node.Links[0].Name).to.be.eql('') - const serialized = node.serialize() - const deserialized = dagPB.util.deserialize(serialized) - for (const key of Object.keys(node)) { - if (key !== '_serializedSize') { - expect(node[key]).to.deep.equal(deserialized[key]) - } - } - }) - - it('create an empty node', () => { - // this node is not in the repo as we don't copy node data to the browser - const node = new DAGNode(Buffer.alloc(0)) - expect(node.Data.length).to.be.equal(0) - expect(Buffer.isBuffer(node.Data)).to.be.true() - expect(node.size).to.be.equal(0) - - const serialized = dagPB.util.serialize(node) - const deserialized = dagPB.util.deserialize(serialized) - expect(node.Data).to.eql(deserialized.Data) - }) - - it('fail to create a node with other data types', () => { - expect(() => new DAGNode({})).to.throw( - 'Passed \'data\' is not a buffer or a string!' - ) - expect(() => new DAGNode([])).to.throw( - 'Passed \'data\' is not a buffer or a string!' - ) - }) - - it('addLink by DAGNode', async () => { - const node1 = new DAGNode(Buffer.from('1')) - const node2 = new DAGNode(Buffer.from('2')) - node1.addLink(await node2.toDAGLink()) - expect(node1.Links.length).to.equal(1) - expect(node1.Links[0].Tsize).to.eql(node2.size) - expect(node1.Links[0].Name).to.be.eql('') - }) - - it('addLink by DAGLink', async () => { - const node1 = new DAGNode(Buffer.from('1')) - const node2 = new DAGNode(Buffer.from('2')) - const link = await node2.toDAGLink() - node1.addLink(link) - expect(node1.Links.length).to.equal(1) - expect(node1.Links[0].Tsize).to.eql(node2.size) - expect(node1.Links[0].Name).to.be.eql('') - }) - - it('addLink by object', async () => { - const node1 = new DAGNode(Buffer.from('1')) - const node2 = new DAGNode(Buffer.from('2')) - const link = await node2.toDAGLink() - const linkObject = link.toJSON() - node1.addLink(linkObject) - expect(node1.Links.length).to.equal(1) - expect(node1.Links[0].Tsize).to.eql(node2.size) - expect(node1.Links[0].Name).to.be.eql('') - }) - - it('addLink by name', async () => { - const node1 = new DAGNode(Buffer.from('1')) - const node2 = new DAGNode(Buffer.from('2')) - const link = await node2.toDAGLink({ name: 'banana' }) - expect(node1.Links.length).to.equal(0) - node1.addLink(link) - expect(node1.Links.length).to.equal(1) - expect(node1.Links[0].Tsize).to.eql(node2.size) - expect(node1.Links[0].Name).to.eql('banana') - }) - - it('addLink - add several links', async () => { - const node1 = new DAGNode(Buffer.from('1')) - expect(node1.Links.length).to.equal(0) - - const node2 = new DAGNode(Buffer.from('2')) - node1.addLink(await node2.toDAGLink()) - expect(node1.Links.length).to.equal(1) - - const node3 = new DAGNode(Buffer.from('3')) - node1.addLink(await node3.toDAGLink()) - expect(node1.Links.length).to.equal(2) - }) - - it('addLink by DAGNode.Links', async () => { - const linkName = 'link-name' - const remote = new DAGNode(Buffer.from('2')) - const source = new DAGNode(Buffer.from('1')) - source.addLink(await remote.toDAGLink({ name: linkName })) - - expect(source.Links.length).to.equal(1) - - const target = new DAGNode(null, [], 0) - target.addLink(source.Links[0]) - - expect(target.Links.length).to.equal(1) - expect(target.Links[0].Tsize).to.eql(remote.size) - expect(target.Links[0].Name).to.be.eql(linkName) - }) - - it('rmLink by name', async () => { - const node1 = new DAGNode(Buffer.from('1')) - expect(node1.Links.length).to.eql(0) - const withoutLink = node1.toJSON() - - const node2 = new DAGNode(Buffer.from('2')) - const link = await node2.toDAGLink({ name: 'banana' }) - - node1.addLink(link) - expect(node1.Links.length).to.eql(1) - node1.rmLink('banana') - expect(node1.Links.length).to.eql(0) - expect(node1.toJSON()).to.eql(withoutLink) - }) - - it('rmLink by hash', async () => { - const node1 = new DAGNode(Buffer.from('1')) - expect(node1.Links.length).to.eql(0) - const withoutLink = node1.toJSON() - - const node2 = new DAGNode(Buffer.from('2')) - const link = await node2.toDAGLink({ name: 'banana' }) - - node1.addLink(link) - expect(node1.Links.length).to.eql(1) - node1.rmLink(node1.Links[0].Hash) - expect(node1.Links.length).to.eql(0) - expect(node1.toJSON()).to.eql(withoutLink) - }) - - it('get node CID', async () => { - const node = new DAGNode(Buffer.from('some data')) - const serialized = dagPB.util.serialize(node) - const cid = await dagPB.util.cid(serialized) - expect(cid.multihash).to.exist() - expect(cid.codec).to.equal('dag-pb') - expect(cid.version).to.equal(1) - const mh = multihash.decode(cid.multihash) - expect(mh.name).to.equal('sha2-256') - }) - - it('get node CID with version', async () => { - const node = new DAGNode(Buffer.from('some data')) - const serialized = dagPB.util.serialize(node) - const cid = await dagPB.util.cid(serialized, { cidVersion: 0 }) - expect(cid.multihash).to.exist() - expect(cid.codec).to.equal('dag-pb') - expect(cid.version).to.equal(0) - const mh = multihash.decode(cid.multihash) - expect(mh.name).to.equal('sha2-256') - }) - - it('get node CID with hashAlg', async () => { - const node = new DAGNode(Buffer.from('some data')) - const serialized = dagPB.util.serialize(node) - const cid = await dagPB.util.cid(serialized, { hashAlg: multicodec.SHA2_512 }) - expect(cid.multihash).to.exist() - expect(cid.codec).to.equal('dag-pb') - expect(cid.version).to.equal(1) - const mh = multihash.decode(cid.multihash) - expect(mh.name).to.equal('sha2-512') - }) - - it('node size updates with mutation', async () => { - // see pbcross.go for the source of the sizes and CIDs here - - async function cid (node) { - const serialized = dagPB.util.serialize(node) - const cid = await dagPB.util.cid(serialized, { cidVersion: 0 }) - return cid.toBaseEncodedString() - } - - async function rawBlockCid (str) { - const raw = Buffer.from(str) - const rawHash = await multihashing(raw, 'sha2-256') - return new CID(1, 'raw', rawHash) - } - - // raw nodes - const rnd1 = await rawBlockCid('aaaa') - const rnd2 = await rawBlockCid('bbbb') - const rnd3 = await rawBlockCid('cccc') - - // empty PB nodes - const pnd1 = new DAGNode() - const pnd2 = new DAGNode() - const pnd3 = new DAGNode() - - // sanity check empty nodes - for (const node of [pnd1, pnd2, pnd3]) { - expect(node.size).to.equal(0) - expect(await cid(node)).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - } - - // named PB links to a raw nodes - const cat = new DAGLink('cat', 4, rnd1) - const dog = new DAGLink('dog', 4, rnd2) - const bear = new DAGLink('bear', 4, rnd3) - - // pnd1 - // links by constructor and addLink should yield the same node - const pnd1ByConstructor = new DAGNode(null, [cat]) - expect(pnd1ByConstructor.size).to.equal(51) - expect(await cid(pnd1ByConstructor)).to.equal('QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT') - - pnd1.addLink(cat) - expect(pnd1.size).to.equal(51) - expect(await cid(pnd1)).to.equal('QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT') - - // pnd2 - const pnd1Link = await pnd1.toDAGLink({ name: 'first', cidVersion: 0 }) - const pnd2ByConstructor = new DAGNode(null, [pnd1Link, dog]) - expect(pnd2ByConstructor.size).to.equal(149) - expect(await cid(pnd2ByConstructor)).to.equal('QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys') - - pnd2.addLink(pnd1Link) - pnd2.addLink(dog) - expect(pnd2.size).to.equal(149) - expect(await cid(pnd2)).to.equal('QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys') - - // pnd3 - const pnd2Link = await pnd2.toDAGLink({ name: 'second', cidVersion: 0 }) - const pnd3ByConstructor = new DAGNode(null, [pnd2Link, bear]) - expect(pnd3ByConstructor.size).to.equal(250) - expect(await cid(pnd3ByConstructor)).to.equal('QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d') - - pnd3.addLink(pnd2Link) - pnd3.addLink(bear) - expect(pnd3.size).to.equal(250) - expect(await cid(pnd3)).to.equal('QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d') - }) - - it('marshal a node and store it with block-service', async () => { - const node = new DAGNode(Buffer.from('some data')) - const serialized = dagPB.util.serialize(node) - const cid = await dagPB.util.cid(serialized) - const block = new Block(Buffer.from(serialized), cid) - - await bs.put(block) - const retrievedBlock = await bs.get(block.cid) - expect(retrievedBlock).to.eql(block) - }) - - it('deserialize go-ipfs block from ipldResolver', async () => { - if (!isNode) { - return - } - - const cidStr = 'QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG' - const cid = new CID(cidStr) - - const block = await bs.get(cid) - const node = dagPB.util.deserialize(block.data) - expect(node.Data).to.exist() - expect(node.Links.length).to.equal(6) - }) - - it('deserialize go-ipfs block with unnamed links', async () => { - const buf = testBlockUnnamedLinks - - const expectedLinks = [ - { - name: '', - cid: 'QmSbCgdsX12C4KDw3PDmpBN9iCzS87a5DjgSCoW9esqzXk', - size: 45623854 - }, - { - name: '', - cid: 'Qma4GxWNhywSvWFzPKtEswPGqeZ9mLs2Kt76JuBq9g3fi2', - size: 45623854 - }, - { - name: '', - cid: 'QmQfyxyys7a1e3mpz9XsntSsTGc8VgpjPj5BF1a1CGdGNc', - size: 45623854 - }, - { - name: '', - cid: 'QmSh2wTTZT4N8fuSeCFw7wterzdqbE93j1XDhfN3vQHzDV', - size: 45623854 - }, - { - name: '', - cid: 'QmVXsSVjwxMsCwKRCUxEkGb4f4B98gXVy3ih3v4otvcURK', - size: 45623854 - }, - { - name: '', - cid: 'QmZjhH97MEYwQXzCqSQbdjGDhXWuwW4RyikR24pNqytWLj', - size: 45623854 - }, - { - name: '', - cid: 'QmRs6U5YirCqC7taTynz3x2GNaHJZ3jDvMVAzaiXppwmNJ', - size: 32538395 - } - ] - - const node = dagPB.util.deserialize(buf) - const nodeJSON = node.toJSON() - expect(nodeJSON.links).to.eql(expectedLinks) - - const cid = await dagPB.util.cid(buf, { cidVersion: 0 }) - expect(cid.toBaseEncodedString()).to.eql( - 'QmQqy2SiEkKgr2cw5UbQ93TtLKEMsD8TdcWggR8q9JabjX') - }) - - it('deserialize go-ipfs block with named links', async () => { - const buf = testBlockNamedLinks - - const expectedLinks = [ - { - name: 'audio_only.m4a', - cid: 'QmaUAwAQJNtvUdJB42qNbTTgDpzPYD1qdsKNtctM5i7DGB', - size: 23319629 - }, - { - name: 'chat.txt', - cid: 'QmNVrxbB25cKTRuKg2DuhUmBVEK9NmCwWEHtsHPV6YutHw', - size: 996 - }, - { - name: 'playback.m3u', - cid: 'QmUcjKzDLXBPmB6BKHeKSh6ZoFZjss4XDhMRdLYRVuvVfu', - size: 116 - }, - { - name: 'zoom_0.mp4', - cid: 'QmQqy2SiEkKgr2cw5UbQ93TtLKEMsD8TdcWggR8q9JabjX', - size: 306281879 - } - ] - - const node = dagPB.util.deserialize(buf) - const nodeJSON = node.toJSON() - expect(nodeJSON.links).to.eql(expectedLinks) - - const cid = await dagPB.util.cid(buf, { cidVersion: 0 }) - expect(cid.toBaseEncodedString()).to.eql( - 'QmbSAC58x1tsuPBAoarwGuTQAgghKvdbKSBC8yp5gKCj5M') - }) - - it('dagNode.toJSON with empty Node', () => { - const node = new DAGNode(Buffer.alloc(0)) - expect(node.toJSON().data).to.eql(Buffer.alloc(0)) - expect(node.toJSON().links).to.eql([]) - expect(node.toJSON().size).to.exist() - }) - - it('dagNode.toJSON with data no links', () => { - const data = Buffer.from('La cucaracha') - const node = new DAGNode(data) - expect(node.toJSON().data).to.eql(data) - expect(node.toJSON().links).to.eql([]) - expect(node.toJSON().size).to.exist() - }) - - it('add two nameless links to a node', () => { - const l1 = { - Name: '', - Hash: 'QmbAmuwox51c91FmC2jEX5Ng4zS4HyVgpA5GNPBF5QsWMA', - Size: 57806 - } - - const l2 = { - Name: '', - Hash: 'QmP7SrR76KHK9A916RbHG1ufy2TzNABZgiE23PjZDMzZXy', - Size: 262158 - } - const link1 = new DAGLink( - l1.Name, - l1.Tsize, - multibase.decode('z' + l1.Hash) - ) - const link2 = new DAGLink( - l2.Name, - l2.Tsize, - multibase.decode('z' + l2.Hash) - ) - - const node = new DAGNode(Buffer.from('hiya'), [link1, link2]) - expect(node.Links).to.have.lengthOf(2) - }) - - it('toString', () => { - const node = new DAGNode(Buffer.from('hello world')) - const expected = 'DAGNode { }) it('should serialize a node with data', () => { - const data = Buffer.from([0, 1, 2, 3]) + const data = Uint8Array.from([0, 1, 2, 3]) const result = serialize({ Data: data }) expect(result).to.be.an.instanceof(Uint8Array) @@ -37,7 +36,7 @@ describe('util', () => { expect(result).to.be.an.instanceof(Uint8Array) const node = deserialize(result) - expect(node.Data).to.deep.equal(Buffer.from([0, 1, 2, 3])) + expect(node.Data).to.deep.equal(Uint8Array.from([0, 1, 2, 3])) }) it('should serialize a node with links', () => {