diff --git a/package.json b/package.json index ec31c57acb..80631798e4 100644 --- a/package.json +++ b/package.json @@ -37,13 +37,14 @@ }, "homepage": "https://github.com/ipfs/js-ipfs#readme", "devDependencies": { - "aegir": "^3.0.1", + "aegir": "^3.0.2", "buffer-loader": "0.0.1", "chai": "^3.5.0", "expose-loader": "^0.7.1", "form-data": "^1.0.0-rc3", "gulp": "^3.9.1", "idb-plus-blob-store": "^1.1.2", + "interface-ipfs-core": "^0.1.5", "libp2p-ipfs-browser": "^0.2.0", "lodash": "^4.11.2", "mocha": "^2.4.5", @@ -63,24 +64,25 @@ "fs-blob-store": "^5.2.1", "glob": "^7.0.3", "hapi": "^13.3.0", - "ipfs-api": "^3.0.2", + "ipfs-api": "^4.0.2", "ipfs-bitswap": "^0.2.0", "ipfs-block": "^0.3.0", "ipfs-block-service": "^0.4.0", - "ipfs-merkle-dag": "^0.5.1", + "ipfs-merkle-dag": "^0.6.0", "ipfs-multipart": "^0.1.0", "ipfs-repo": "^0.8.0", "ipfs-unixfs-engine": "^0.6.1", "joi": "^8.0.5", - "libp2p-ipfs": "^0.3.3", + "libp2p-ipfs": "^0.3.8", "libp2p-swarm": "^0.12.11", - "lodash.get": "^4.2.1", - "lodash.set": "^4.1.0", + "lodash.get": "^4.3.0", + "lodash.set": "^4.2.0", "multiaddr": "^1.4.1", "path-exists": "^3.0.0", "peer-book": "^0.1.1", "peer-id": "^0.6.6", "peer-info": "^0.6.2", + "promisify-es6": "^1.0.1", "readable-stream": "1.1.13", "ronin": "^0.3.11", "run-parallel": "^1.1.6", diff --git a/src/cli/commands/block/get.js b/src/cli/commands/block/get.js index 9d3d2d97ad..7ca1f84432 100644 --- a/src/cli/commands/block/get.js +++ b/src/cli/commands/block/get.js @@ -28,7 +28,6 @@ module.exports = Command.extend({ ipfs.block.get(mh, (err, block) => { if (err) { - log.error(err) throw err } diff --git a/src/cli/commands/block/put.js b/src/cli/commands/block/put.js index 6ebec7a1e5..619e925669 100644 --- a/src/cli/commands/block/put.js +++ b/src/cli/commands/block/put.js @@ -19,7 +19,6 @@ function addBlock (buf) { if (utils.isDaemonOn()) { return ipfs.block.put(buf, (err, block) => { if (err) { - log.error(err) throw err } @@ -31,7 +30,6 @@ function addBlock (buf) { ipfs.block.put(block, (err, obj) => { if (err) { - log.error(err) throw err } @@ -52,7 +50,6 @@ module.exports = Command.extend({ process.stdin.pipe(bl((err, input) => { if (err) { - log.error(err) throw err } diff --git a/src/cli/commands/block/rm.js b/src/cli/commands/block/rm.js index 1fa26acd89..9fea64c4dc 100644 --- a/src/cli/commands/block/rm.js +++ b/src/cli/commands/block/rm.js @@ -31,7 +31,6 @@ module.exports = Command.extend({ ipfs.block.del(mh, (err) => { if (err) { - log.error(err) throw err } diff --git a/src/cli/commands/block/stat.js b/src/cli/commands/block/stat.js index a6e4a6c440..a7ad13657a 100644 --- a/src/cli/commands/block/stat.js +++ b/src/cli/commands/block/stat.js @@ -28,7 +28,6 @@ module.exports = Command.extend({ ipfs.block.stat(mh, (err, block) => { if (err) { - log.error(err) throw err } diff --git a/src/cli/commands/bootstrap/add.js b/src/cli/commands/bootstrap/add.js index 80625d1dd9..820a78ced0 100644 --- a/src/cli/commands/bootstrap/add.js +++ b/src/cli/commands/bootstrap/add.js @@ -18,7 +18,7 @@ module.exports = Command.extend({ } ipfs.bootstrap.add(multiaddr, (err, list) => { if (err) { - return log.error(err) + throw err } }) }) diff --git a/src/cli/commands/bootstrap/rm.js b/src/cli/commands/bootstrap/rm.js index c8a41665cf..0cf1a93560 100644 --- a/src/cli/commands/bootstrap/rm.js +++ b/src/cli/commands/bootstrap/rm.js @@ -18,7 +18,7 @@ module.exports = Command.extend({ } ipfs.bootstrap.rm(multiaddr, (err, list) => { if (err) { - return log.error(err) + throw err } }) }) diff --git a/src/cli/commands/config/edit.js b/src/cli/commands/config/edit.js index b510f1e640..dfe9061199 100644 --- a/src/cli/commands/config/edit.js +++ b/src/cli/commands/config/edit.js @@ -61,7 +61,6 @@ module.exports = Command.extend({ child.on('exit', (err, code) => { if (err) { - log.error(err) throw new Error('error on the editor') } diff --git a/src/cli/commands/daemon.js b/src/cli/commands/daemon.js index ae3b02dc47..65d43723f7 100644 --- a/src/cli/commands/daemon.js +++ b/src/cli/commands/daemon.js @@ -16,7 +16,7 @@ module.exports = Command.extend({ httpAPI = new HttpAPI() httpAPI.start((err) => { if (err) { - return log.error(err) + throw err } console.log('Daemon is ready') }) @@ -25,7 +25,7 @@ module.exports = Command.extend({ console.log('Received interrupt signal, shutting down..') httpAPI.stop((err) => { if (err) { - return log.error(err) + throw err } process.exit(0) }) diff --git a/src/cli/commands/id.js b/src/cli/commands/id.js index 27240aaa91..4dea1c261e 100644 --- a/src/cli/commands/id.js +++ b/src/cli/commands/id.js @@ -24,7 +24,7 @@ module.exports = Command.extend({ ipfs.id((err, id) => { if (err) { - return log.error(err) + throw err } console.log(id) }) diff --git a/src/cli/commands/object/data.js b/src/cli/commands/object/data.js index f6f32b8051..173608bf05 100644 --- a/src/cli/commands/object/data.js +++ b/src/cli/commands/object/data.js @@ -2,7 +2,6 @@ const Command = require('ronin').Command const utils = require('../../utils') -const bs58 = require('bs58') const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') @@ -21,23 +20,13 @@ module.exports = Command.extend({ if (err) { throw err } - const mh = utils.isDaemonOn() - ? key - : new Buffer(bs58.decode(key)) - ipfs.object.data(mh, (err, data) => { + ipfs.object.data(key, {enc: 'base58'}, (err, data) => { if (err) { - log.error(err) throw err } - if (data instanceof Buffer) { - console.log(data.toString()) - return - } - - // js-ipfs-api output (http stream) - data.pipe(process.stdout) + console.log(data.toString()) }) }) } diff --git a/src/cli/commands/object/get.js b/src/cli/commands/object/get.js index 3b85740ff5..afd08948b7 100644 --- a/src/cli/commands/object/get.js +++ b/src/cli/commands/object/get.js @@ -2,7 +2,6 @@ const Command = require('ronin').Command const utils = require('../../utils') -const bs58 = require('bs58') const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') @@ -21,32 +20,15 @@ module.exports = Command.extend({ if (err) { throw err } - if (utils.isDaemonOn()) { - return ipfs.object.get(key, (err, obj) => { - if (err) { - log.error(err) - throw err - } - console.log(JSON.stringify(obj)) - }) - } - - const mh = new Buffer(bs58.decode(key)) - ipfs.object.get(mh, (err, obj) => { + ipfs.object.get(key, {enc: 'base58'}, (err, node) => { if (err) { - log.error(err) throw err } - console.log(JSON.stringify({ - Links: obj.links.map((link) => ({ - Name: link.name, - Hash: bs58.encode(link.hash).toString(), - Size: link.size - })), - Data: obj.data.toString() - })) + const res = node.toJSON() + res.Data = res.Data ? res.Data.toString() : '' + console.log(JSON.stringify(res)) }) }) } diff --git a/src/cli/commands/object/links.js b/src/cli/commands/object/links.js index e0d16ed4df..b2d0959013 100644 --- a/src/cli/commands/object/links.js +++ b/src/cli/commands/object/links.js @@ -2,7 +2,6 @@ const Command = require('ronin').Command const utils = require('../../utils') -const bs58 = require('bs58') const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') @@ -21,25 +20,15 @@ module.exports = Command.extend({ if (err) { throw err } - const mh = utils.isDaemonOn() - ? key - : new Buffer(bs58.decode(key)) - ipfs.object.links(mh, (err, links) => { + ipfs.object.links(key, {enc: 'base58'}, (err, links) => { if (err) { - log.error(err) throw err } - if (links.Links) { // js-ipfs-api output - links.Links.forEach((link) => { - console.log(link.Hash, link.Size, link.Name) - }) - return - } - links.forEach((link) => { - console.log(bs58.encode(link.hash).toString(), link.size, link.name) + link = link.toJSON() + console.log(link.Hash, link.Size, link.Name) }) }) }) diff --git a/src/cli/commands/object/new.js b/src/cli/commands/object/new.js index 58e183cd45..e7b0f4b2b2 100644 --- a/src/cli/commands/object/new.js +++ b/src/cli/commands/object/new.js @@ -2,7 +2,6 @@ const Command = require('ronin').Command const utils = require('../../utils') -const bs58 = require('bs58') const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') @@ -12,23 +11,18 @@ module.exports = Command.extend({ options: {}, - run: (template) => { + run: () => { utils.getIPFS((err, ipfs) => { if (err) { throw err } - ipfs.object.new(template, (err, obj) => { + + ipfs.object.new((err, node) => { if (err) { - log.error(err) throw err } - if (typeof obj.Hash === 'string') { // js-ipfs-api output - console.log(obj.Hash) - return - } - - console.log(bs58.encode(obj.Hash).toString()) + console.log(node.toJSON().Hash) }) }) } diff --git a/src/cli/commands/object/patch/add-link.js b/src/cli/commands/object/patch/add-link.js index a9289c1b2d..e5837c615f 100644 --- a/src/cli/commands/object/patch/add-link.js +++ b/src/cli/commands/object/patch/add-link.js @@ -2,7 +2,6 @@ const Command = require('ronin').Command const utils = require('../../../utils') -const bs58 = require('bs58') const debug = require('debug') const log = debug('cli:object') const mDAG = require('ipfs-merkle-dag') @@ -30,36 +29,17 @@ module.exports = Command.extend({ throw err } - if (utils.isDaemonOn()) { - return ipfs.object.patch.addLink(root, name, ref, (err, obj) => { - if (err) { - log.error(err) - throw err - } - - console.log(obj.Hash) - }) - } - - // when running locally we first need to get the ref object, - // so we can create the link with the correct size - const refMh = new Buffer(bs58.decode(ref)) - ipfs.object.get(refMh, (err, linkedObj) => { - if (err) { - log.error(err) - throw err - } - - const rootMh = new Buffer(bs58.decode(root)) - const link = new DAGLink(name, linkedObj.size(), linkedObj.multihash()) - ipfs.object.patch.addLink(rootMh, link, (err, obj) => { - if (err) { - log.error(err) - throw err - } - - console.log(bs58.encode(obj.multihash()).toString()) - }) + ipfs.object.get(ref, {enc: 'base58'}).then((linkedObj) => { + const link = new DAGLink( + name, + linkedObj.size(), + linkedObj.multihash() + ) + return ipfs.object.patch.addLink(root, link, {enc: 'base58'}) + }).then((node) => { + console.log(node.toJSON().Hash) + }).catch((err) => { + throw err }) }) } diff --git a/src/cli/commands/object/patch/append-data.js b/src/cli/commands/object/patch/append-data.js index 23b0e14f83..dd420943b7 100644 --- a/src/cli/commands/object/patch/append-data.js +++ b/src/cli/commands/object/patch/append-data.js @@ -2,33 +2,24 @@ const Command = require('ronin').Command const utils = require('../../../utils') -const bs58 = require('bs58') const bl = require('bl') const fs = require('fs') const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') -function appendData (keyStr, data) { +function appendData (key, data) { utils.getIPFS((err, ipfs) => { if (err) { throw err } - const key = utils.isDaemonOn() ? keyStr : new Buffer(bs58.decode(keyStr)) - - ipfs.object.patch.appendData(key, data, (err, obj) => { + ipfs.object.patch.appendData(key, data, {enc: 'base58'}, (err, node) => { if (err) { - log.error(err) throw err } - if (typeof obj.multihash === 'function') { - console.log(bs58.encode(obj.multihash()).toString()) - return - } - - console.log(obj.Hash) + console.log(node.toJSON().Hash) }) }) } @@ -49,7 +40,6 @@ module.exports = Command.extend({ process.stdin.pipe(bl((err, input) => { if (err) { - log.error(err) throw err } diff --git a/src/cli/commands/object/patch/rm-link.js b/src/cli/commands/object/patch/rm-link.js index 818282b0b2..430ebb3fda 100644 --- a/src/cli/commands/object/patch/rm-link.js +++ b/src/cli/commands/object/patch/rm-link.js @@ -1,8 +1,8 @@ 'use strict' const Command = require('ronin').Command +const DAGLink = require('ipfs-merkle-dag').DAGLink const utils = require('../../../utils') -const bs58 = require('bs58') const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') @@ -25,25 +25,14 @@ module.exports = Command.extend({ throw err } - if (utils.isDaemonOn()) { - return ipfs.object.patch.rmLink(root, link, (err, obj) => { - if (err) { - log.error(err) - throw err - } + const dLink = new DAGLink(link) - console.log(obj.Hash) - }) - } - - const mh = new Buffer(bs58.decode(root)) - ipfs.object.patch.rmLink(mh, link, (err, obj) => { + ipfs.object.patch.rmLink(root, dLink, {enc: 'base58'}, (err, node) => { if (err) { - log.error(err) throw err } - console.log(bs58.encode(obj.multihash()).toString()) + console.log(node.toJSON().Hash) }) }) } diff --git a/src/cli/commands/object/patch/set-data.js b/src/cli/commands/object/patch/set-data.js index 51ef181788..abbe115cf5 100644 --- a/src/cli/commands/object/patch/set-data.js +++ b/src/cli/commands/object/patch/set-data.js @@ -2,33 +2,24 @@ const Command = require('ronin').Command const utils = require('../../../utils') -const bs58 = require('bs58') const bl = require('bl') const fs = require('fs') const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') -function parseAndAddNode (keyStr, data) { +function parseAndAddNode (key, data) { utils.getIPFS((err, ipfs) => { if (err) { throw err } - const key = utils.isDaemonOn() ? keyStr : new Buffer(bs58.decode(keyStr)) - - ipfs.object.patch.setData(key, data, (err, obj) => { + ipfs.object.patch.setData(key, data, {enc: 'base58'}, (err, node) => { if (err) { - log.error(err) throw err } - if (typeof obj.multihash === 'function') { - console.log(bs58.encode(obj.multihash()).toString()) - return - } - - console.log(obj.Hash) + console.log(node.toJSON().Hash) }) }) } @@ -49,7 +40,6 @@ module.exports = Command.extend({ process.stdin.pipe(bl((err, input) => { if (err) { - log.error(err) throw err } diff --git a/src/cli/commands/object/put.js b/src/cli/commands/object/put.js index b5aaa1d449..8b8054a2f5 100644 --- a/src/cli/commands/object/put.js +++ b/src/cli/commands/object/put.js @@ -2,57 +2,24 @@ const Command = require('ronin').Command const utils = require('../../utils') -const bs58 = require('bs58') const bl = require('bl') const fs = require('fs') -const mDAG = require('ipfs-merkle-dag') -const DAGNode = mDAG.DAGNode const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') -function parseJSONBuffer (buf) { - try { - const parsed = JSON.parse(buf.toString()) - return { - data: new Buffer(parsed.Data), - links: parsed.Links ? parsed.Links.map((link) => ({ - name: link.Name, - hash: new Buffer(bs58.decode(link.Hash)), - size: link.Size - })) : [] - } - } catch (err) { - log.error(err) - throw new Error('failed to parse JSON: ' + err) - } -} - -function parseAndAddNode (buf) { +function putNode (buf, enc) { utils.getIPFS((err, ipfs) => { if (err) { throw err } - if (utils.isDaemonOn()) { - return ipfs.object.put(buf, 'json', (err, obj) => { - if (err) { - log.error(err) - throw err - } - console.log('added', obj.Hash) - }) - } - - const parsed = parseJSONBuffer(buf) - const dagNode = new DAGNode(parsed.data, parsed.links) - ipfs.object.put(dagNode, (err, obj) => { + ipfs.object.put(buf, {enc}, (err, node) => { if (err) { - log.error(err) throw err } - console.log('added', bs58.encode(dagNode.multihash()).toString()) + console.log('added', node.toJSON().Hash) }) }) } @@ -60,20 +27,24 @@ function parseAndAddNode (buf) { module.exports = Command.extend({ desc: 'Stores input as a DAG object, outputs its key', - options: {}, + options: { + inputenc: { + type: 'string', + default: 'json' + } + }, - run: (filePath) => { + run: (inputenc, filePath) => { if (filePath) { - return parseAndAddNode(fs.readFileSync(filePath)) + return putNode(fs.readFileSync(filePath), inputenc) } process.stdin.pipe(bl((err, input) => { if (err) { - log.error(err) throw err } - parseAndAddNode(input) + putNode(input, inputenc) })) } }) diff --git a/src/cli/commands/object/stat.js b/src/cli/commands/object/stat.js index f0adfebe33..a819ae0e2b 100644 --- a/src/cli/commands/object/stat.js +++ b/src/cli/commands/object/stat.js @@ -2,7 +2,6 @@ const Command = require('ronin').Command const utils = require('../../utils') -const bs58 = require('bs58') const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') @@ -21,13 +20,9 @@ module.exports = Command.extend({ if (err) { throw err } - const mh = utils.isDaemonOn() - ? key - : new Buffer(bs58.decode(key)) - ipfs.object.stat(mh, (err, stats) => { + ipfs.object.stat(key, {enc: 'base58'}, (err, stats) => { if (err) { - log.error(err) throw err } diff --git a/src/cli/commands/version.js b/src/cli/commands/version.js index f2708f54d7..1f55de77b2 100644 --- a/src/cli/commands/version.js +++ b/src/cli/commands/version.js @@ -31,7 +31,9 @@ module.exports = Command.extend({ throw err } ipfs.version((err, version) => { - if (err) { return log.error(err) } + if (err) { + throw err + } if (typeof version === 'object') { // js-ipfs-api output console.log('ipfs version', version.Version) diff --git a/src/core/ipfs/object.js b/src/core/ipfs/object.js index 0db31c4158..f1ab133a88 100644 --- a/src/core/ipfs/object.js +++ b/src/core/ipfs/object.js @@ -1,141 +1,247 @@ 'use strict' -const Block = require('ipfs-block') const mDAG = require('ipfs-merkle-dag') +const waterfall = require('run-waterfall') +const promisify = require('promisify-es6') +const bs58 = require('bs58') const DAGNode = mDAG.DAGNode +const DAGLink = mDAG.DAGLink + +function normalizeMultihash (multihash, enc) { + if (typeof multihash === 'string') { + if (enc === 'base58') { + return multihash + } + + return new Buffer(multihash, enc) + } else if (Buffer.isBuffer(multihash)) { + return multihash + } else { + throw new Error('unsupported multihash') + } +} + +function parseBuffer (buf, encoding) { + switch (encoding) { + case 'json': + return parseJSONBuffer(buf) + case 'protobuf': + return parseProtoBuffer(buf) + default: + throw new Error(`unkown encoding: ${encoding}`) + } +} + +function parseJSONBuffer (buf) { + try { + const parsed = JSON.parse(buf.toString()) + const links = (parsed.Links || []).map((link) => { + return new DAGLink( + link.Name, + link.Size, + new Buffer(bs58.decode(link.Hash)) + ) + }) + return new DAGNode(new Buffer(parsed.Data), links) + } catch (err) { + throw new Error('failed to parse JSON: ' + err) + } +} + +function parseProtoBuffer (buf) { + const node = new DAGNode() + node.unMarshal(buf) + return node +} module.exports = function object (self) { - return { - new: (template, callback) => { - if (!callback) { - callback = template + function editAndSave (edit) { + return (multihash, options, cb) => { + if (typeof options === 'function') { + cb = options + options = {} } - var node = new DAGNode() - var block = new Block(node.marshal()) - self._blockS.addBlock(block, function (err) { + + waterfall([ + (cb) => self.object.get(multihash, options, cb), + (node, cb) => { + self._dagS.add(edit(node), (err) => { + cb(err, node) + }) + } + ], (err, node) => { if (err) { - return callback(err) + return cb(err) } - callback(null, { - Hash: block.key, - Size: node.size(), - Name: '' - }) + cb(null, node) }) - }, - patch: { - appendData: (multihash, data, callback) => { - self.object.get(multihash, (err, obj) => { - if (err) { - return callback(err) - } - obj.data = Buffer.concat([obj.data, data]) - self._dagS.add(obj, (err) => { - if (err) { - return callback(err) - } - callback(null, obj) - }) - }) - }, - addLink: (multihash, link, callback) => { - self.object.get(multihash, (err, obj) => { - if (err) { - return callback(err) - } - obj.addRawLink(link) - self._dagS.add(obj, (err) => { - if (err) { - return callback(err) - } - callback(null, obj) - }) - }) - }, - rmLink: (multihash, linkRef, callback) => { - self.object.get(multihash, (err, obj) => { - if (err) { - return callback(err) - } - obj.links = obj.links.filter((link) => { - // filter by name when linkRef is a string, or by hash otherwise - if (typeof linkRef === 'string') { - return link.name !== linkRef - } - return !link.hash.equals(linkRef) - }) - self._dagS.add(obj, (err) => { - if (err) { - return callback(err) - } - callback(null, obj) - }) - }) - }, - setData: (multihash, data, callback) => { - self.object.get(multihash, (err, obj) => { - if (err) { return callback(err) } - obj.data = data - self._dagS.add(obj, (err) => { - if (err) { - return callback(err) - } - callback(null, obj) - }) - }) - } - }, - data: (multihash, callback) => { - self.object.get(multihash, (err, obj) => { + } + } + + return { + new: promisify((cb) => { + const node = new DAGNode() + + self._dagS.add(node, function (err) { if (err) { - return callback(err) + return cb(err) } - callback(null, obj.data) + + cb(null, node) }) - }, - links: (multihash, callback) => { - self.object.get(multihash, (err, obj) => { + }), + + put: promisify((obj, options, cb) => { + if (typeof options === 'function') { + cb = options + options = {} + } + + const encoding = options.enc + let node + + if (Buffer.isBuffer(obj)) { + if (encoding) { + try { + node = parseBuffer(obj, encoding) + } catch (err) { + return cb(err) + } + } else { + node = new DAGNode(obj) + } + } else if (obj.multihash) { + // already a dag node + node = obj + } else if (typeof obj === 'object') { + node = new DAGNode(obj.Data, obj.Links) + } else { + return cb(new Error('obj not recognized')) + } + + self._dagS.add(node, (err, block) => { if (err) { - return callback(err) + return cb(err) } - callback(null, obj.links) + + self.object.get(node.multihash(), cb) }) - }, - get: (multihash, options, callback) => { + }), + + get: promisify((multihash, options, cb) => { if (typeof options === 'function') { - callback = options + cb = options options = {} } - self._dagS.get(multihash, callback) - }, - put: (dagNode, options, callback) => { + + let mh + + try { + mh = normalizeMultihash(multihash, options.enc) + } catch (err) { + return cb(err) + } + + self._dagS.get(mh, cb) + }), + + data: promisify((multihash, options, cb) => { if (typeof options === 'function') { - callback = options + cb = options options = {} } - self._dagS.add(dagNode, callback) - }, - stat: (multihash, options, callback) => { + + self.object.get(multihash, options, (err, node) => { + if (err) { + return cb(err) + } + + cb(null, node.data) + }) + }), + + links: promisify((multihash, options, cb) => { if (typeof options === 'function') { - callback = options + cb = options options = {} } - self.object.get(multihash, (err, obj) => { + self.object.get(multihash, options, (err, node) => { if (err) { - return callback(err) + return cb(err) } - var res = { - NumLinks: obj.links.length, - BlockSize: obj.marshal().length, - LinksSize: obj.links.reduce((prev, link) => { - return prev + link.size - }, 0), - DataSize: obj.data.length, - CumulativeSize: '' + + cb(null, node.links) + }) + }), + + stat: promisify((multihash, options, cb) => { + if (typeof options === 'function') { + cb = options + options = {} + } + + self.object.get(multihash, options, (err, node) => { + if (err) { + return cb(err) } - callback(null, res) + + const blockSize = node.marshal().length + const linkLength = node.links.reduce((a, l) => a + l.size, 0) + + cb(null, { + Hash: node.toJSON().Hash, + NumLinks: node.links.length, + BlockSize: blockSize, + LinksSize: blockSize - node.data.length, + DataSize: node.data.length, + CumulativeSize: blockSize + linkLength + }) }) - } + }), + + patch: promisify({ + addLink (multihash, link, options, cb) { + editAndSave((node) => { + node.addRawLink(link) + return node + })(multihash, options, cb) + }, + + rmLink (multihash, linkRef, options, cb) { + editAndSave((node) => { + node.links = node.links.filter((link) => { + if (typeof linkRef === 'string') { + return link.name !== linkRef + } + + if (Buffer.isBuffer(linkRef)) { + return !link.hash.equals(linkRef) + } + + if (linkRef.name) { + return link.name !== linkRef.name + } + + return !link.hash.equals(linkRef.hash) + }) + return node + })(multihash, options, cb) + }, + + appendData (multihash, data, options, cb) { + editAndSave((node) => { + node.data = Buffer.concat([node.data, data]) + return node + })(multihash, options, cb) + }, + + setData (multihash, data, options, cb) { + editAndSave((node) => { + node.data = data + return node + })(multihash, options, cb) + } + }) } } diff --git a/src/http-api/resources/object.js b/src/http-api/resources/object.js index ae4728cc07..bcad095d45 100644 --- a/src/http-api/resources/object.js +++ b/src/http-api/resources/object.js @@ -30,41 +30,18 @@ exports.parseKey = (request, reply) => { } } -exports.new = { - // pre request handler that parses the args and returns `template` which is assigned to `request.pre.args` - parseArgs: (request, reply) => { - // TODO improve this validation once request.server.app.ipfs.object.new supports templates - if (request.query.arg === '') { +exports.new = (request, reply) => { + request.server.app.ipfs.object.new((err, node) => { + if (err) { + log.error(err) return reply({ - Message: `template \'${request.query.arg}\' not found`, + Message: `Failed to create object: ${err.message}`, Code: 0 - }).code(500).takeover() + }).code(500) } - return reply({ - template: request.query.arg - }) - }, - - // main route handler which is called after the above `parseArgs`, but only if the args were valid - handler: (request, reply) => { - const template = request.pre.args.template - - request.server.app.ipfs.object.new(template, (err, obj) => { - if (err) { - log.error(err) - return reply({ - Message: 'Failed to create object: ' + err, - Code: 0 - }).code(500) - } - - return reply({ - Hash: bs58.encode(obj.Hash).toString(), - Links: obj.Links || null - }) - }) - } + return reply(node.toJSON()) + }) } exports.get = { @@ -74,8 +51,9 @@ exports.get = { // main route handler which is called after the above `parseArgs`, but only if the args were valid handler: (request, reply) => { const key = request.pre.args.key + const enc = request.query.enc || 'base58' - request.server.app.ipfs.object.get(key, (err, obj) => { + request.server.app.ipfs.object.get(key, {enc}, (err, node) => { if (err) { log.error(err) return reply({ @@ -84,14 +62,9 @@ exports.get = { }).code(500) } - return reply({ - Links: obj.links.map((link) => ({ - Name: link.name, - Hash: bs58.encode(link.hash).toString(), - Size: link.size - })), - Data: obj.data.toString() - }) + const res = node.toJSON() + res.Data = res.Data ? res.Data.toString() : '' + return reply(res) }) } } @@ -133,15 +106,7 @@ exports.put = { // main route handler which is called after the above `parseArgs`, but only if the args were valid handler: (request, reply) => { const node = request.pre.args.node - - const data = new Buffer(node.Data) - const links = node.Links.map((link) => ({ - name: link.Name, - hash: new Buffer(bs58.decode(link.Hash)), - size: link.Size - })) - - const dagNode = new DAGNode(data, links) + const dagNode = new DAGNode(new Buffer(node.Data), node.Links) request.server.app.ipfs.object.put(dagNode, (err, obj) => { if (err) { @@ -151,15 +116,7 @@ exports.put = { Code: 0 }).code(500) } - - return reply({ - Hash: bs58.encode(dagNode.multihash()).toString(), - Links: dagNode.links.map((link) => ({ - Name: link.name, - Hash: bs58.encode(link.hash).toString(), - Size: link.size - })) - }) + return reply(dagNode.toJSON()) }) } } @@ -181,14 +138,7 @@ exports.stat = { }).code(500) } - return reply({ - Hash: bs58.encode(key).toString(), - NumLinks: stats.NumLinks, - BlockSize: stats.BlockSize, - LinksSize: stats.LinksSize, - DataSize: stats.DataSize - // CumulativeSize: stats.CumulativeSize - }) + return reply(stats) }) } } @@ -223,7 +173,7 @@ exports.links = { handler: (request, reply) => { const key = request.pre.args.key - request.server.app.ipfs.object.links(key, (err, links) => { + request.server.app.ipfs.object.get(key, (err, node) => { if (err) { log.error(err) return reply({ @@ -232,13 +182,10 @@ exports.links = { }).code(500) } + const res = node.toJSON() return reply({ - Hash: bs58.encode(key).toString(), - Links: links.map((link) => ({ - Name: link.name, - Hash: bs58.encode(link.hash).toString(), - Size: link.size - })) + Hash: res.Hash, + Links: res.Links }) }) } @@ -291,7 +238,7 @@ exports.patchAppendData = { const key = request.pre.args.key const data = request.pre.args.data - request.server.app.ipfs.object.patch.appendData(key, data, (err, obj) => { + request.server.app.ipfs.object.patch.appendData(key, data, (err, node) => { if (err) { log.error(err) @@ -301,14 +248,7 @@ exports.patchAppendData = { }).code(500) } - return reply({ - Hash: bs58.encode(obj.multihash()).toString(), - Links: obj.links.map((link) => ({ - Name: link.name, - Hash: bs58.encode(link.hash).toString(), - Size: link.size - })) - }) + return reply(node.toJSON()) }) } } @@ -322,7 +262,7 @@ exports.patchSetData = { const key = request.pre.args.key const data = request.pre.args.data - request.server.app.ipfs.object.patch.setData(key, data, (err, obj) => { + request.server.app.ipfs.object.patch.setData(key, data, (err, node) => { if (err) { log.error(err) @@ -332,13 +272,10 @@ exports.patchSetData = { }).code(500) } + const res = node.toJSON() return reply({ - Hash: bs58.encode(obj.multihash()).toString(), - Links: obj.links.map((link) => ({ - Name: link.name, - Hash: bs58.encode(link.hash).toString(), - Size: link.size - })) + Hash: res.Hash, + Links: res.Links }) }) } @@ -397,7 +334,7 @@ exports.patchAddLink = { const link = new DAGLink(name, linkedObj.size(), linkedObj.multihash()) - request.server.app.ipfs.object.patch.addLink(root, link, (err, obj) => { + request.server.app.ipfs.object.patch.addLink(root, link, (err, node) => { if (err) { log.error(err) @@ -407,14 +344,7 @@ exports.patchAddLink = { }).code(500) } - return reply({ - Hash: bs58.encode(obj.multihash()).toString(), - Links: obj.links.map((link) => ({ - Name: link.name, - Hash: bs58.encode(link.hash).toString(), - Size: link.size - })) - }) + return reply(node.toJSON()) }) }) } @@ -453,7 +383,7 @@ exports.patchRmLink = { const root = request.pre.args.root const link = request.pre.args.link - request.server.app.ipfs.object.patch.rmLink(root, link, (err, obj) => { + request.server.app.ipfs.object.patch.rmLink(root, link, (err, node) => { if (err) { log.error(err) @@ -463,14 +393,7 @@ exports.patchRmLink = { }).code(500) } - return reply({ - Hash: bs58.encode(obj.multihash()).toString(), - Links: obj.links.map((link) => ({ - Name: link.name, - Hash: bs58.encode(link.hash).toString(), - Size: link.size - })) - }) + return reply(node.toJSON()) }) } } diff --git a/src/http-api/routes/object.js b/src/http-api/routes/object.js index 4ca4ee59b3..51d7ba9fe9 100644 --- a/src/http-api/routes/object.js +++ b/src/http-api/routes/object.js @@ -9,10 +9,7 @@ module.exports = (server) => { method: '*', path: '/api/v0/object/new', config: { - pre: [ - { method: resources.object.new.parseArgs, assign: 'args' } - ], - handler: resources.object.new.handler + handler: resources.object.new } }) diff --git a/test/cli-tests/test-object.js b/test/cli-tests/test-object.js index fa49aa131a..ce33d47f87 100644 --- a/test/cli-tests/test-object.js +++ b/test/cli-tests/test-object.js @@ -28,6 +28,7 @@ describe('object', () => { .run((err, stdout, exitcode) => { expect(err).to.not.exist expect(exitcode).to.equal(0) + const result = JSON.parse(stdout[0]) expect(result.Links) .to.deep.equal([]) @@ -58,7 +59,7 @@ describe('object', () => { expect(stdout[1]) .to.equal('BlockSize: 60') expect(stdout[2]) - .to.equal('LinksSize: 8') + .to.equal('LinksSize: 53') expect(stdout[3]) .to.equal('DataSize: 7') done() @@ -132,6 +133,7 @@ describe('object', () => { }) }) + // Waiting for js-ipfs-api to be updated describe('api running', () => { let httpAPI @@ -180,6 +182,7 @@ describe('object', () => { .run((err, stdout, exitcode) => { expect(err).to.not.exist expect(exitcode).to.equal(0) + expect(stdout[0]) .to.equal('added QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') done() @@ -196,7 +199,7 @@ describe('object', () => { expect(stdout[1]) .to.equal('BlockSize: 60') expect(stdout[2]) - .to.equal('LinksSize: 8') + .to.equal('LinksSize: 53') expect(stdout[3]) .to.equal('DataSize: 7') done() diff --git a/test/core-tests/test-object.js b/test/core-tests/test-object.js index cee1c12a6b..b646f7699b 100644 --- a/test/core-tests/test-object.js +++ b/test/core-tests/test-object.js @@ -1,141 +1,21 @@ -'use strict' - /* eslint-env mocha */ -const expect = require('chai').expect -const IPFS = require('../../src/core') -const bs58 = require('bs58') -const mDAG = require('ipfs-merkle-dag') -const DAGNode = mDAG.DAGNode -const DAGLink = mDAG.DAGLink - -// TODO use arrow funtions again when https://github.com/webpack/webpack/issues/1944 is fixed -describe('object', () => { - var ipfs - - before((done) => { - ipfs = new IPFS(require('./repo-path')) - ipfs.load(done) - }) - - it('new', (done) => { - ipfs.object.new((err, obj) => { - expect(err).to.not.exist - expect(obj).to.have.property('Size', 0) - expect(obj).to.have.property('Name', '') - expect(obj).to.have.property('Hash') - expect(bs58.encode(obj.Hash).toString()) - .to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - expect(obj.Size).to.equal(0) - done() - }) - }) - - it('patch append-data', (done) => { - const mh = new Buffer(bs58.decode('QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG')) - - ipfs.object.patch.appendData(mh, new Buffer('data data'), (err, obj) => { - expect(err).to.not.exist - expect(mh).to.not.deep.equal(obj.multihash()) - done() - }) - }) - - it('patch add-link', (done) => { - const mh = new Buffer(bs58.decode('QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG')) - - ipfs.object.patch.addLink(mh, new DAGLink('prev', 0, mh), (err, obj) => { - expect(err).to.not.exist - expect(mh).to.not.deep.equal(obj.multihash()) - done() - }) - }) - - describe('patch rm-link', () => { - it('remove link by name', (done) => { - const name = 'about' - const mh = new Buffer(bs58.decode('QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG')) - - ipfs.object.patch.rmLink(mh, name, (err, obj) => { - expect(err).to.not.exist - expect(mh).to.not.deep.equal(obj.multihash()) - done() - }) - }) - - it('remove link by multihash', (done) => { - const rmmh = new Buffer(bs58.decode('QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V')) - const mh = new Buffer(bs58.decode('QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG')) - - ipfs.object.patch.rmLink(mh, rmmh, (err, obj) => { - expect(err).to.not.exist - expect(mh).to.not.deep.equal(obj.multihash()) - done() - }) - }) - }) - - it('patch set-data', (done) => { - const mh = new Buffer(bs58.decode('QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG')) - - ipfs.object.patch.setData(mh, new Buffer('data data data'), (err, obj) => { - expect(err).to.not.exist - expect(mh).to.not.deep.equal(obj.multihash()) - done() - }) - }) - - it('data', (done) => { - const mh = new Buffer(bs58.decode('QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG')) - ipfs.object.data(mh, (err, data) => { - expect(err).to.not.exist - expect(data).to.deep.equal(new Buffer('\u0008\u0001')) - done() - }) - }) +'use strict' - it('links', (done) => { - const mh = new Buffer(bs58.decode('QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG')) - ipfs.object.links(mh, (err, links) => { - expect(err).to.not.exist - expect(links.length).to.equal(6) - done() - }) - }) +const test = require('interface-ipfs-core') - it('get', (done) => { - const mh = new Buffer(bs58.decode('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n')) - ipfs.object.get(mh, (err, obj) => { - expect(err).to.not.exist - expect(obj.size()).to.equal(0) - expect(obj).to.have.property('data') - expect(obj).to.have.property('links') - done() - }) - }) +const IPFS = require('../../src/core') - it('put', (done) => { - const node = new DAGNode(new Buffer('Hello, is it me you are looking for')) - ipfs.object.put(node, (err) => { - expect(err).to.not.exist - done() +const common = { + setup: function (cb) { + const ipfs = new IPFS(require('./repo-path')) + ipfs.load(() => { + cb(null, ipfs) }) - }) - - it('stat', (done) => { - const mh = new Buffer(bs58.decode('QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG')) - ipfs.object.stat(mh, (err, stats) => { - expect(err).to.not.exist + }, + teardown: function (cb) { + cb() + } +} - var expected = { - NumLinks: 6, - BlockSize: 309, - LinksSize: 6067, - DataSize: 2, - CumulativeSize: '' - } - expect(stats).to.deep.equal(expected) - done() - }) - }) -}) +test.object(common) diff --git a/test/http-api-tests/test-object.js b/test/http-api-tests/test-object.js index d74c0edf70..0f89c5f4de 100644 --- a/test/http-api-tests/test-object.js +++ b/test/http-api-tests/test-object.js @@ -6,6 +6,7 @@ const APIctl = require('ipfs-api') const fs = require('fs') const FormData = require('form-data') const streamToPromise = require('stream-to-promise') +const DAGLink = require('ipfs-merkle-dag').DAGLink module.exports = (httpAPI) => { describe('object', () => { @@ -17,18 +18,6 @@ module.exports = (httpAPI) => { }) describe('/object/new', () => { - it('returns 500 for request with invalid argument', (done) => { - api.inject({ - method: 'GET', - url: '/api/v0/object/new?arg' - }, (res) => { - expect(res.statusCode).to.equal(500) - expect(res.result.Code).to.equal(0) - expect(res.result.Message).to.be.a('string') - done() - }) - }) - it('returns value', (done) => { api.inject({ method: 'GET', @@ -37,8 +26,7 @@ module.exports = (httpAPI) => { expect(res.statusCode).to.equal(200) expect(res.result.Hash) .to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - expect(res.result.Links) - .to.equal(null) + expect(res.result.Links).to.be.eql([]) done() }) }) @@ -74,10 +62,8 @@ module.exports = (httpAPI) => { url: '/api/v0/object/get?arg=QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' }, (res) => { expect(res.statusCode).to.equal(200) - expect(res.result.Links) - .to.deep.equal([]) - expect(res.result.Data) - .to.equal('') + expect(res.result.Links).to.be.eql([]) + expect(res.result.Data).to.be.empty done() }) }) @@ -126,12 +112,14 @@ module.exports = (httpAPI) => { form.append('data', fs.createReadStream(filePath)) const headers = form.getHeaders() const expectedResult = { + Data: new Buffer('another'), Hash: 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', Links: [{ Name: 'some link', Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', Size: 8 - }] + }], + Size: 68 } streamToPromise(form).then((payload) => { @@ -182,8 +170,9 @@ module.exports = (httpAPI) => { expect(res.result.Hash).to.equal('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') expect(res.result.NumLinks).to.equal(1) expect(res.result.BlockSize).to.equal(60) - expect(res.result.LinksSize).to.equal(8) + expect(res.result.LinksSize).to.equal(60 - 7) expect(res.result.DataSize).to.equal(7) + expect(res.result.CumulativeSize).to.equal(60 + 8) done() }) }) @@ -322,8 +311,10 @@ module.exports = (httpAPI) => { form.append('data', fs.createReadStream(filePath)) const headers = form.getHeaders() const expectedResult = { + Data: fs.readFileSync(filePath), Hash: 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6', - Links: [] + Links: [], + Size: 19 } streamToPromise(form).then((payload) => { @@ -547,12 +538,12 @@ module.exports = (httpAPI) => { }) it('ipfs.object.new', (done) => { - ctl.object.new(null, (err, result) => { + ctl.object.new((err, result) => { expect(err).to.not.exist - expect(result.Hash) + const res = result.toJSON() + expect(res.Hash) .to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - expect(result.Links) - .to.equal(null) + expect(res.Links).to.be.eql([]) done() }) }) @@ -566,19 +557,18 @@ module.exports = (httpAPI) => { }) it('returns error for request with invalid argument', (done) => { - ctl.object.get('invalid', (err, result) => { + ctl.object.get('invalid', {enc: 'base58'}, (err, result) => { expect(err).to.exist done() }) }) it('returns value', (done) => { - ctl.object.get('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', (err, result) => { + ctl.object.get('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', {enc: 'base58'}, (err, result) => { expect(err).to.not.exist - expect(result.Links) - .to.deep.equal([]) - expect(result.Data) - .to.equal('') + const res = result.toJSON() + expect(res.Links).to.be.eql([]) + expect(res.Data).to.equal('') done() }) }) @@ -588,26 +578,28 @@ module.exports = (httpAPI) => { it('returns error if the node is invalid', (done) => { const filePath = 'test/test-data/badnode.json' - ctl.object.put(filePath, 'json', (err) => { + ctl.object.put(filePath, {enc: 'json'}, (err) => { expect(err).to.exist done() }) }) it('updates value', (done) => { - const filePath = 'test/test-data/node.json' + const filePath = fs.readFileSync('test/test-data/node.json') const expectedResult = { + Data: 'another', Hash: 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', Links: [{ Name: 'some link', Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', Size: 8 - }] + }], + Size: 68 } - ctl.object.put(filePath, 'json', (err, res) => { + ctl.object.put(filePath, {enc: 'json'}, (err, res) => { expect(err).not.to.exist - expect(res).to.deep.equal(expectedResult) + expect(res.toJSON()).to.deep.equal(expectedResult) done() }) }) @@ -622,20 +614,21 @@ module.exports = (httpAPI) => { }) it('returns error for request with invalid argument', (done) => { - ctl.object.stat('invalid', (err, result) => { + ctl.object.stat('invalid', {enc: 'base58'}, (err, result) => { expect(err).to.exist done() }) }) it('returns value', (done) => { - ctl.object.stat('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', (err, result) => { + ctl.object.stat('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', {enc: 'base58'}, (err, result) => { expect(err).to.not.exist expect(result.Hash).to.equal('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm') expect(result.NumLinks).to.equal(1) expect(result.BlockSize).to.equal(60) - expect(result.LinksSize).to.equal(8) + expect(result.LinksSize).to.equal(60 - 7) expect(result.DataSize).to.equal(7) + expect(result.CumulativeSize).to.equal(60 + 8) done() }) }) @@ -650,14 +643,14 @@ module.exports = (httpAPI) => { }) it('returns error for request with invalid argument', (done) => { - ctl.object.data('invalid', (err, result) => { + ctl.object.data('invalid', {enc: 'base58'}, (err, result) => { expect(err).to.exist done() }) }) it('returns value', (done) => { - ctl.object.data('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', (err, result) => { + ctl.object.data('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', {enc: 'base58'}, (err, result) => { expect(err).to.not.exist expect(result.toString()).to.equal('another') done() @@ -674,7 +667,7 @@ module.exports = (httpAPI) => { }) it('returns error for request with invalid argument', (done) => { - ctl.object.links('invalid', (err, result) => { + ctl.object.links('invalid', {enc: 'base58'}, (err, result) => { expect(err).to.exist done() }) @@ -682,15 +675,14 @@ module.exports = (httpAPI) => { it('returns value', (done) => { const expectedResult = { - Hash: 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', - Links: [ - { Name: 'some link', Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', Size: 8 } - ] + Name: 'some link', + Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', + Size: 8 } - ctl.object.links('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', (err, result) => { + ctl.object.links('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', {enc: 'base58'}, (err, result) => { expect(err).to.not.exist - expect(result).to.deep.equal(expectedResult) + expect(result[0].toJSON()).to.deep.equal(expectedResult) done() }) }) @@ -707,7 +699,7 @@ module.exports = (httpAPI) => { it('returns error for request without key', (done) => { const key = 'QmVLUHkjGg3duGb5w3dnwK5w2P9QWuJmtVNuDPLc9ZDjzk' - ctl.object.patch.appendData(key, null, (err) => { + ctl.object.patch.appendData(key, null, {enc: 'base58'}, (err) => { expect(err).to.exist done() }) @@ -726,13 +718,15 @@ module.exports = (httpAPI) => { const key = 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' const filePath = 'test/test-data/badnode.json' const expectedResult = { + Data: fs.readFileSync(filePath).toString(), Hash: 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6', - Links: [] + Links: [], + Size: 19 } - ctl.object.patch.appendData(key, filePath, (err, res) => { + ctl.object.patch.appendData(key, filePath, {enc: 'base58'}, (err, res) => { expect(err).not.to.exist - expect(res).to.deep.equal(expectedResult) + expect(res.toJSON()).to.deep.equal(expectedResult) done() }) }) @@ -749,7 +743,7 @@ module.exports = (httpAPI) => { it('returns error for request without key', (done) => { const key = 'QmVLUHkjGg3duGb5w3dnwK5w2P9QWuJmtVNuDPLc9ZDjzk' - ctl.object.patch.setData(key, null, (err) => { + ctl.object.patch.setData(key, null, {enc: 'base58'}, (err) => { expect(err).to.exist done() }) @@ -768,13 +762,15 @@ module.exports = (httpAPI) => { const key = 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6' const filePath = 'test/test-data/badnode.json' const expectedResult = { + Data: fs.readFileSync(filePath).toString(), Hash: 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6', - Links: [] + Links: [], + Size: 19 } - ctl.object.patch.setData(key, filePath, (err, res) => { + ctl.object.patch.setData(key, filePath, {enc: 'base58'}, (err, res) => { expect(err).not.to.exist - expect(res).to.deep.equal(expectedResult) + expect(res.toJSON()).to.deep.equal(expectedResult) done() }) }) @@ -799,8 +795,8 @@ module.exports = (httpAPI) => { const root = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' const name = '' const ref = 'QmTz3oc4gdpRMKP2sdGUPZTAGRngqjsi99BPoztyP53JMM' - - ctl.object.patch.addLink(root, name, ref, (err) => { + const link = new DAGLink(name, 2, ref) + ctl.object.patch.addLink(root, link, {enc: 'base58'}, (err) => { expect(err).to.exist done() }) @@ -810,9 +806,10 @@ module.exports = (httpAPI) => { const root = 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' const name = 'foo' const ref = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - - ctl.object.patch.addLink(root, name, ref, (err, res) => { + const link = new DAGLink(name, 10, ref) + ctl.object.patch.addLink(root, link, {enc: 'base58'}, (err, result) => { expect(err).not.to.exist + const res = result.toJSON() expect(res.Hash).to.equal('QmdVHE8fUD6FLNLugtNxqDFyhaCgdob372hs6BYEe75VAK') expect(res.Links[0]).to.deep.equal({ Name: 'foo', @@ -851,11 +848,11 @@ module.exports = (httpAPI) => { it('updates value', (done) => { const root = 'QmdVHE8fUD6FLNLugtNxqDFyhaCgdob372hs6BYEe75VAK' - const link = 'foo' + const link = new DAGLink('foo') - ctl.object.patch.rmLink(root, link, (err, res) => { + ctl.object.patch.rmLink(root, link, {enc: 'base58'}, (err, res) => { expect(err).not.to.exist - expect(res.Hash).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + expect(res.toJSON().Hash).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') done() }) })