From 12325a2f9303e2f0041c2addbb2fe29bb0fed529 Mon Sep 17 00:00:00 2001 From: David Dias Date: Thu, 24 Nov 2016 19:21:28 +0000 Subject: [PATCH 01/10] chore: update deps --- package.json | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/package.json b/package.json index f95bbec367..2c115373e9 100644 --- a/package.json +++ b/package.json @@ -59,9 +59,9 @@ "form-data": "^2.1.2", "fs-pull-blob-store": "^0.4.1", "gulp": "^3.9.1", - "interface-ipfs-core": "^0.18.4", + "interface-ipfs-core": "^0.21.0", "left-pad": "^1.1.3", - "lodash": "^4.17.1", + "lodash": "^4.17.2", "ncp": "^2.0.0", "nexpect": "^0.5.0", "pre-commit": "^1.1.3", @@ -70,7 +70,7 @@ "transform-loader": "^0.2.3" }, "dependencies": { - "async": "^2.1.2", + "async": "^2.1.4", "bl": "^1.1.2", "boom": "^4.2.0", "debug": "^2.3.3", @@ -79,17 +79,17 @@ "hapi": "^15.2.0", "hapi-set-header": "^1.0.2", "idb-pull-blob-store": "^0.5.1", - "ipfs-api": "^11.1.0", + "ipfs-api": "^12.0.0", "ipfs-bitswap": "^0.8.1", "ipfs-block": "^0.5.0", "ipfs-block-service": "^0.7.0", "ipfs-multipart": "^0.1.0", "ipfs-repo": "^0.11.1", "ipfs-unixfs": "^0.1.5", - "ipfs-unixfs-engine": "^0.13.0", - "ipld-resolver": "^0.2.0", + "ipfs-unixfs-engine": "^0.14.0", + "ipld-resolver": "^0.3.0", "isstream": "^0.1.2", - "joi": "^9.2.0", + "joi": "^10.0.1", "libp2p-ipfs": "^0.15.0", "libp2p-ipfs-browser": "^0.16.0", "lodash.flatmap": "^4.5.0", @@ -99,12 +99,12 @@ "lodash.sortby": "^4.7.0", "lodash.values": "^4.3.0", "mafmt": "^2.1.2", - "multiaddr": "^2.0.3", + "multiaddr": "^2.1.1", "multihashes": "^0.2.2", "path-exists": "^3.0.0", "peer-book": "^0.3.0", "peer-id": "^0.8.0", - "peer-info": "^0.8.0", + "peer-info": "^0.8.1", "promisify-es6": "^1.0.2", "pull-file": "^1.0.0", "pull-paramap": "^1.2.1", @@ -114,7 +114,7 @@ "pull-stream-to-stream": "^1.3.3", "pull-zip": "^2.0.1", "read-pkg-up": "^2.0.0", - "readable-stream": "^1.1.14", + "readable-stream": "1.1.14", "stream-to-pull-stream": "^1.7.2", "tar-stream": "^1.5.2", "temp": "^0.8.3", @@ -147,4 +147,4 @@ "nginnever ", "npmcdn-to-unpkg-bot " ] -} \ No newline at end of file +} From db550a1f1f92172988a24c6f9cd4fd2de63c508d Mon Sep 17 00:00:00 2001 From: David Dias Date: Thu, 24 Nov 2016 20:40:29 +0000 Subject: [PATCH 02/10] feat(core): migrate to awesome dag-pb --- src/core/components/files.js | 9 +-- src/core/components/object.js | 127 +++++++++++++++------------------ test/core/both/test-bitswap.js | 28 ++++---- test/utils/temp-node.js | 9 ++- 4 files changed, 85 insertions(+), 88 deletions(-) diff --git a/src/core/components/files.js b/src/core/components/files.js index d9968df45b..639677c0a7 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -102,16 +102,13 @@ function prepareFile (self, file, callback) { const bs58mh = multihashes.toB58String(file.multihash) waterfall([ (cb) => self.object.get(file.multihash, cb), - (node, cb) => node.size((err, size) => { - if (err) { - return cb(err) - } + (node, cb) => { cb(null, { path: file.path || bs58mh, hash: bs58mh, - size: size + size: node.size }) - }) + } ], callback) } diff --git a/src/core/components/object.js b/src/core/components/object.js index 558b14e603..86f851798d 100644 --- a/src/core/components/object.js +++ b/src/core/components/object.js @@ -34,21 +34,25 @@ function parseBuffer (buf, encoding, callback) { } function parseJSONBuffer (buf, callback) { - let node + let data + let links + try { const parsed = JSON.parse(buf.toString()) - const links = (parsed.Links || []).map((link) => { + + links = (parsed.Links || []).map((link) => { return new DAGLink( - link.Name, - link.Size, - mh.fromB58String(link.Hash) + link.Name || link.name, + link.Size || link.size, + mh.fromB58String(link.Hash || link.hash || link.multihash) ) }) - node = new DAGNode(new Buffer(parsed.Data), links) + data = new Buffer(parsed.Data) } catch (err) { return callback(new Error('failed to parse JSON: ' + err)) } - callback(null, node) + + DAGNode.create(data, links, callback) } function parseProtoBuffer (buf, callback) { @@ -68,15 +72,15 @@ module.exports = function object (self) { self.object.get(multihash, options, cb) }, (node, cb) => { - node = edit(node) - - node.multihash((err, multihash) => { + // edit applies the edit func passed to + // editAndSave + edit(node, (err, node) => { if (err) { return cb(err) } self._ipldResolver.put({ node: node, - cid: new CID(multihash) + cid: new CID(node.multihash) }, (err) => { cb(err, node) }) @@ -88,16 +92,14 @@ module.exports = function object (self) { return { new: promisify((callback) => { - const node = new DAGNode() - - node.multihash((err, multihash) => { + DAGNode.create(new Buffer(0), (err, node) => { if (err) { return callback(err) } self._ipldResolver.put({ node: node, - cid: new CID(multihash) - }, function (err) { + cid: new CID(node.multihash) + }, (err) => { if (err) { return callback(err) } @@ -126,34 +128,40 @@ module.exports = function object (self) { }) return } else { - node = new DAGNode(obj) + DAGNode.create(obj, (err, _node) => { + if (err) { + return callback(err) + } + node = _node + next() + }) } } else if (obj.multihash) { // already a dag node node = obj + next() } else if (typeof obj === 'object') { - node = new DAGNode(obj.Data, obj.Links) + DAGNode.create(obj.Data, obj.Links, (err, _node) => { + if (err) { + return callback(err) + } + node = _node + next() + }) } else { return callback(new Error('obj not recognized')) } - next() - function next () { - node.multihash((err, multihash) => { + self._ipldResolver.put({ + node: node, + cid: new CID(node.multihash) + }, (err) => { if (err) { return callback(err) } - self._ipldResolver.put({ - node: node, - cid: new CID(multihash) - }, (err, block) => { - if (err) { - return callback(err) - } - self.object.get(multihash, callback) - }) + self.object.get(node.multihash, callback) }) } }), @@ -223,19 +231,15 @@ module.exports = function object (self) { const blockSize = serialized.length const linkLength = node.links.reduce((a, l) => a + l.size, 0) - node.toJSON((err, nodeJSON) => { - if (err) { - return callback(err) - } + const nodeJSON = node.toJSON() - callback(null, { - Hash: nodeJSON.Hash, - NumLinks: node.links.length, - BlockSize: blockSize, - LinksSize: blockSize - node.data.length, - DataSize: node.data.length, - CumulativeSize: blockSize + linkLength - }) + callback(null, { + Hash: nodeJSON.multihash, + NumLinks: node.links.length, + BlockSize: blockSize, + LinksSize: blockSize - node.data.length, + DataSize: node.data.length, + CumulativeSize: blockSize + linkLength }) }) }) @@ -243,44 +247,31 @@ module.exports = function object (self) { patch: promisify({ addLink (multihash, link, options, callback) { - editAndSave((node) => { - node.addRawLink(link) - return node + editAndSave((node, cb) => { + DAGNode.addLink(node, link, cb) })(multihash, options, callback) }, rmLink (multihash, linkRef, options, callback) { - editAndSave((node) => { - node.links = node.links.filter((link) => { - if (typeof linkRef === 'string') { - return link.name !== linkRef - } - - if (Buffer.isBuffer(linkRef)) { - return !link.hash.equals(linkRef) - } - - if (linkRef.name) { - return link.name !== linkRef.name - } - - return !link.hash.equals(linkRef.hash) - }) - return node + editAndSave((node, cb) => { + if (linkRef.constructor && + linkRef.constructor.name === 'DAGLink') { + linkRef = linkRef._name + } + DAGNode.rmLink(node, linkRef, cb) })(multihash, options, callback) }, appendData (multihash, data, options, callback) { - editAndSave((node) => { - node.data = Buffer.concat([node.data, data]) - return node + editAndSave((node, cb) => { + const newData = Buffer.concat([node.data, data]) + DAGNode.create(newData, node.links, cb) })(multihash, options, callback) }, setData (multihash, data, options, callback) { - editAndSave((node) => { - node.data = data - return node + editAndSave((node, cb) => { + DAGNode.create(data, node.links, cb) })(multihash, options, callback) } }) diff --git a/test/core/both/test-bitswap.js b/test/core/both/test-bitswap.js index d017c3734e..9b7175c085 100644 --- a/test/core/both/test-bitswap.js +++ b/test/core/both/test-bitswap.js @@ -98,10 +98,12 @@ describe('bitswap', () => { function addNode (num, done) { num = leftPad(num, 3, 0) + const apiUrl = `/ip4/127.0.0.1/tcp/31${num}` const remoteNode = new API(apiUrl) connectNodes(remoteNode, inProcNode, (err) => { + console.log('connected') done(err, remoteNode) }) } @@ -205,17 +207,24 @@ describe('bitswap', () => { it('2 peers', (done) => { const file = new Buffer(`I love IPFS <3 ${Math.random()}`) + console.log('1') waterfall([ // 0. Start node (cb) => addNode(12, cb), // 1. Add file to tmp instance - (remote, cb) => remote.add([{ - path: 'awesome.txt', - content: file - }], cb), + (remote, cb) => { + console.log('2') + remote.files.add([{ + path: 'awesome.txt', + content: file + }], cb) + }, // 2. Request file from local instance - (val, cb) => inProcNode.files.cat(val[0].hash, cb), + (val, cb) => { + console.log('3') + inProcNode.files.cat(val[0].hash, cb) + }, (res, cb) => res.pipe(bl(cb)) ], (err, res) => { expect(err).to.not.exist @@ -237,13 +246,8 @@ describe('bitswap', () => { it('returns an array of wanted blocks', (done) => { inProcNode.goOnline((err) => { expect(err).to.not.exist - - expect( - inProcNode.bitswap.wantlist() - ).to.be.eql( - [] - ) - + expect(inProcNode.bitswap.wantlist()) + .to.be.eql([]) inProcNode.goOffline(done) }) }) diff --git a/test/utils/temp-node.js b/test/utils/temp-node.js index 19d2ff3968..405503d048 100644 --- a/test/utils/temp-node.js +++ b/test/utils/temp-node.js @@ -31,11 +31,16 @@ function createTempNode (num, callback) { num = leftPad(num, 3, 0) series([ - (cb) => ipfs.init({ emptyRepo: true, bits: 1024 }, cb), + (cb) => ipfs.init({ + emptyRepo: true, + bits: 1024 + }, cb), (cb) => setAddresses(repo, num, cb), (cb) => ipfs.load(cb) ], (err) => { - if (err) return callback(err) + if (err) { + return callback(err) + } callback(null, ipfs) }) } From 265a77a2b9f99f18ecc96ba7af501f628236f8a8 Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Thu, 17 Nov 2016 13:05:07 +0100 Subject: [PATCH 03/10] feat(swarm): update swarm.peers to new api --- src/core/components/swarm.js | 32 ++++++++++++++++++++++++++++---- src/http-api/resources/swarm.js | 18 ++++++++++++++++-- 2 files changed, 44 insertions(+), 6 deletions(-) diff --git a/src/core/components/swarm.js b/src/core/components/swarm.js index d897358ebb..45313a79ce 100644 --- a/src/core/components/swarm.js +++ b/src/core/components/swarm.js @@ -9,17 +9,41 @@ const OFFLINE_ERROR = require('../utils').OFFLINE_ERROR module.exports = function swarm (self) { return { - peers: promisify((callback) => { + peers: promisify((opts, callback) => { + if (typeof opts === 'function') { + callback = opts + opts = {} + } + if (!self.isOnline()) { return callback(OFFLINE_ERROR) } + const verbose = opts.v || opts.verbose + // TODO: return latency and streams when verbose is set + // we currently don't have this information + const peers = self._libp2pNode.peerBook.getAll() - const mas = flatMap(Object.keys(peers), (id) => { - return peers[id].multiaddrs + const keys = Object.keys(peers) + + const peerList = flatMap(keys, (id) => { + const peer = peers[id] + + return peer.multiaddrs.map((addr) => { + const res = { + addr: addr, + peer: peers[id] + } + + if (verbose) { + res.latency = 'unknown' + } + + return res + }) }) - callback(null, mas) + callback(null, peerList) }), // all the addrs we know diff --git a/src/http-api/resources/swarm.js b/src/http-api/resources/swarm.js index d557d4909d..dc4e5b04f3 100644 --- a/src/http-api/resources/swarm.js +++ b/src/http-api/resources/swarm.js @@ -26,8 +26,11 @@ exports.parseAddrs = (request, reply) => { exports.peers = { handler: (request, reply) => { + const rawVerbose = request.query.v || request.query.verbose + const verbose = rawVerbose === 'true' const ipfs = request.server.app.ipfs - ipfs.swarm.peers((err, peers) => { + + ipfs.swarm.peers({verbose: verbose}, (err, peers) => { if (err) { log.error(err) return reply({ @@ -37,7 +40,18 @@ exports.peers = { } return reply({ - Strings: peers.map((addr) => addr.toString()) + Peers: peers.map((p) => { + const res = { + Peer: p.peer.id.toB58String(), + Addr: p.addr.toString() + } + + if (verbose) { + res.Latency = p.latency + } + + return res + }) }) }) } From 3fa143847da865172b421d547dec116a456c676b Mon Sep 17 00:00:00 2001 From: David Dias Date: Thu, 17 Nov 2016 14:12:26 +0000 Subject: [PATCH 04/10] chore: update deps --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2c115373e9..3070cfb1fc 100644 --- a/package.json +++ b/package.json @@ -59,7 +59,7 @@ "form-data": "^2.1.2", "fs-pull-blob-store": "^0.4.1", "gulp": "^3.9.1", - "interface-ipfs-core": "^0.21.0", + "interface-ipfs-core": "^0.22.0", "left-pad": "^1.1.3", "lodash": "^4.17.2", "ncp": "^2.0.0", From ca9935f23d59dcaa0149e446c511359defd0ca55 Mon Sep 17 00:00:00 2001 From: David Dias Date: Thu, 24 Nov 2016 22:00:29 +0000 Subject: [PATCH 05/10] feat(http): migrate to awesome dag-pb --- src/http-api/resources/object.js | 233 +++++++++++++++++++------- test/http-api/inject/test-object.js | 3 +- test/http-api/ipfs-api/test-object.js | 75 ++++----- test/utils/factory-http/index.js | 16 +- 4 files changed, 219 insertions(+), 108 deletions(-) diff --git a/src/http-api/resources/object.js b/src/http-api/resources/object.js index bc379e4f1c..a8c5f488d3 100644 --- a/src/http-api/resources/object.js +++ b/src/http-api/resources/object.js @@ -7,6 +7,7 @@ const DAGLink = dagPB.DAGLink const DAGNode = dagPB.DAGNode const waterfall = require('async/waterfall') const parallel = require('async/parallel') +const series = require('async/series') const debug = require('debug') const log = debug('http-api:object') log.error = debug('http-api:object:error') @@ -34,10 +35,8 @@ exports.parseKey = (request, reply) => { exports.new = (request, reply) => { const ipfs = request.server.app.ipfs - waterfall([ - (cb) => ipfs.object.new(cb), - (node, cb) => node.toJSON(cb) - ], (err, nodeJson) => { + + ipfs.object.new((err, node) => { if (err) { log.error(err) return reply({ @@ -46,7 +45,22 @@ exports.new = (request, reply) => { }).code(500) } - return reply(nodeJson) + const nodeJSON = node.toJSON() + + const answer = { + Data: nodeJSON.data, + Hash: nodeJSON.multihash, + Size: nodeJSON.size, + Links: nodeJSON.links.map((l) => { + return { + Name: l.name, + Size: l.size, + Hash: l.multihash + } + }) + } + + return reply(answer) }) } @@ -60,10 +74,7 @@ exports.get = { const enc = request.query.enc || 'base58' const ipfs = request.server.app.ipfs - waterfall([ - (cb) => ipfs.object.get(key, {enc}, cb), - (node, cb) => node.toJSON(cb) - ], (err, nodeJson) => { + ipfs.object.get(key, { enc: enc }, (err, node) => { if (err) { log.error(err) return reply({ @@ -72,8 +83,24 @@ exports.get = { }).code(500) } - nodeJson.Data = nodeJson.Data ? nodeJson.Data.toString() : '' - return reply(nodeJson) + const nodeJSON = node.toJSON() + + nodeJSON.data = nodeJSON.data ? nodeJSON.data.toString() : '' + + const answer = { + Data: nodeJSON.data, + Hash: nodeJSON.multihash, + Size: nodeJSON.size, + Links: nodeJSON.links.map((l) => { + return { + Name: l.name, + Size: l.size, + Hash: l.multihash + } + }) + } + + return reply(answer) }) } } @@ -92,6 +119,8 @@ exports.put = { let file let finished = true + // TODO: this whole function this to be revisited + // so messy parser.on('file', (name, stream) => { finished = false // TODO fix: stream is not emitting the 'end' event @@ -105,16 +134,23 @@ exports.put = { }).code(500).takeover() } - node.toJSON((err, nodeJSON) => { - if (err) { - return reply({ - Message: 'Failed to receive protobuf encoded: ' + err, - Code: 0 - }).code(500).takeover() - } - file = new Buffer(JSON.stringify(nodeJSON)) - finished = true - }) + const nodeJSON = node.toJSON() + + const answer = { + Data: nodeJSON.data, + Hash: nodeJSON.multihash, + Size: nodeJSON.size, + Links: nodeJSON.links.map((l) => { + return { + Name: l.name, + Size: l.size, + Hash: l.multihash + } + }) + } + + file = new Buffer(JSON.stringify(answer)) + finished = true }) } else { file = data @@ -149,23 +185,48 @@ exports.put = { // main route handler which is called after the above `parseArgs`, but only if the args were valid handler: (request, reply) => { - const node = request.pre.args.node - const dagNode = new DAGNode(new Buffer(node.Data), node.Links) const ipfs = request.server.app.ipfs - - parallel([ - (cb) => ipfs.object.put(dagNode, cb), - (cb) => dagNode.toJSON(cb) - ], (err, nodeJson) => { + let node = request.pre.args.node + + console.log('HANDLER') + + series([ + (cb) => { + DAGNode.create(new Buffer(node.Data), node.Links, (err, _node) => { + if (err) { + return cb(err) + } + node = _node + cb() + }) + }, + (cb) => ipfs.object.put(node, cb) + ], (err) => { if (err) { log.error(err) + return reply({ Message: 'Failed to put object: ' + err, Code: 0 }).code(500) } - return reply(nodeJson[1]) + const nodeJSON = node.toJSON() + + const answer = { + Data: nodeJSON.data, + Hash: nodeJSON.multihash, + Size: nodeJSON.size, + Links: nodeJSON.links.map((l) => { + return { + Name: l.name, + Size: l.size, + Hash: l.multihash + } + }) + } + + return reply(answer) }) } } @@ -176,9 +237,10 @@ exports.stat = { // main route handler which is called after the above `parseArgs`, but only if the args were valid handler: (request, reply) => { + const ipfs = request.server.app.ipfs const key = request.pre.args.key - request.server.app.ipfs.object.stat(key, (err, stats) => { + ipfs.object.stat(key, (err, stats) => { if (err) { log.error(err) return reply({ @@ -198,9 +260,10 @@ exports.data = { // main route handler which is called after the above `parseArgs`, but only if the args were valid handler: (request, reply) => { + const ipfs = request.server.app.ipfs const key = request.pre.args.key - request.server.app.ipfs.object.data(key, (err, data) => { + ipfs.object.data(key, (err, data) => { if (err) { log.error(err) return reply({ @@ -223,10 +286,7 @@ exports.links = { const key = request.pre.args.key const ipfs = request.server.app.ipfs - waterfall([ - (cb) => ipfs.object.get(key, cb), - (node, cb) => node.toJSON(cb) - ], (err, nodeJson) => { + ipfs.object.get(key, (err, node) => { if (err) { log.error(err) return reply({ @@ -235,9 +295,17 @@ exports.links = { }).code(500) } + const nodeJSON = node.toJSON() + return reply({ - Hash: nodeJson.Hash, - Links: nodeJson.Links + Hash: nodeJSON.multihash, + Links: nodeJSON.links.map((l) => { + return { + Name: l.name, + Size: l.size, + Hash: l.multihash + } + }) }) }) } @@ -292,10 +360,7 @@ exports.patchAppendData = { const data = request.pre.args.data const ipfs = request.server.app.ipfs - waterfall([ - (cb) => ipfs.object.patch.appendData(key, data, cb), - (node, cb) => node.toJSON(cb) - ], (err, nodeJson) => { + ipfs.object.patch.appendData(key, data, (err, node) => { if (err) { log.error(err) @@ -305,7 +370,22 @@ exports.patchAppendData = { }).code(500) } - return reply(nodeJson) + const nodeJSON = node.toJSON() + + const answer = { + Data: nodeJSON.data, + Hash: nodeJSON.multihash, + Size: nodeJSON.size, + Links: nodeJSON.links.map((l) => { + return { + Name: l.name, + Size: l.size, + Hash: l.multihash + } + }) + } + + return reply(answer) }) } } @@ -320,10 +400,7 @@ exports.patchSetData = { const data = request.pre.args.data const ipfs = request.server.app.ipfs - waterfall([ - (cb) => ipfs.object.patch.setData(key, data, cb), - (node, cb) => node.toJSON(cb) - ], (err, nodeJson) => { + ipfs.object.patch.setData(key, data, (err, node) => { if (err) { log.error(err) @@ -333,9 +410,11 @@ exports.patchSetData = { }).code(500) } + const nodeJSON = node.toJSON() + return reply({ - Hash: nodeJson.Hash, - Links: nodeJson.Links + Hash: nodeJSON.multihash, + Links: nodeJSON.links }) }) } @@ -344,7 +423,8 @@ exports.patchSetData = { exports.patchAddLink = { // pre request handler that parses the args and returns `root`, `name` & `ref` which is assigned to `request.pre.args` parseArgs: (request, reply) => { - if (!(request.query.arg instanceof Array) || request.query.arg.length !== 3) { + if (!(request.query.arg instanceof Array) || + request.query.arg.length !== 3) { return reply("Arguments 'root', 'name' & 'ref' are required").code(400).takeover() } @@ -395,15 +475,18 @@ exports.patchAddLink = { waterfall([ (cb) => parallel([ - (cb) => linkedObj.size(cb), - (cb) => linkedObj.multihash(cb) + (cb) => { + cb(null, linkedObj.size) + }, + (cb) => { + cb(null, linkedObj.multihash) + } ], cb), (stats, cb) => { cb(null, new DAGLink(name, stats[0], stats[1])) }, - (link, cb) => ipfs.object.patch.addLink(root, link, cb), - (node, cb) => node.toJSON(cb) - ], (err, nodeJson) => { + (link, cb) => ipfs.object.patch.addLink(root, link, cb) + ], (err, node) => { if (err) { log.error(err) return reply({ @@ -412,7 +495,22 @@ exports.patchAddLink = { }).code(500) } - return reply(nodeJson) + const nodeJSON = node.toJSON() + + const answer = { + Data: nodeJSON.data, + Hash: nodeJSON.multihash, + Size: nodeJSON.size, + Links: nodeJSON.links.map((l) => { + return { + Name: l.name, + Size: l.size, + Hash: l.multihash + } + }) + } + + return reply(answer) }) }) } @@ -421,7 +519,8 @@ exports.patchAddLink = { exports.patchRmLink = { // pre request handler that parses the args and returns `root` & `link` which is assigned to `request.pre.args` parseArgs: (request, reply) => { - if (!(request.query.arg instanceof Array) || request.query.arg.length !== 2) { + if (!(request.query.arg instanceof Array) || + request.query.arg.length !== 2) { return reply("Arguments 'root' & 'link' are required").code(400).takeover() } @@ -452,10 +551,7 @@ exports.patchRmLink = { const link = request.pre.args.link const ipfs = request.server.app.ipfs - waterfall([ - (cb) => ipfs.object.patch.rmLink(root, link, cb), - (node, cb) => node.toJSON(cb) - ], (err, nodeJson) => { + ipfs.object.patch.rmLink(root, link, (err, node) => { if (err) { log.error(err) return reply({ @@ -464,7 +560,22 @@ exports.patchRmLink = { }).code(500) } - return reply(nodeJson) + const nodeJSON = node.toJSON() + + const answer = { + Data: nodeJSON.data, + Hash: nodeJSON.multihash, + Size: nodeJSON.size, + Links: nodeJSON.links.map((l) => { + return { + Name: l.name, + Size: l.size, + Hash: l.multihash + } + }) + } + + return reply(answer) }) } } diff --git a/test/http-api/inject/test-object.js b/test/http-api/inject/test-object.js index af3e36e033..f3dd3a7883 100644 --- a/test/http-api/inject/test-object.js +++ b/test/http-api/inject/test-object.js @@ -109,6 +109,7 @@ module.exports = (http) => { const filePath = 'test/test-data/node.json' form.append('data', fs.createReadStream(filePath)) const headers = form.getHeaders() + const expectedResult = { Data: new Buffer('another'), Hash: 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', @@ -128,7 +129,7 @@ module.exports = (http) => { payload: payload }, (res) => { expect(res.statusCode).to.equal(200) - expect(res.result).to.deep.equal(expectedResult) + expect(res.result).to.eql(expectedResult) done() }) }) diff --git a/test/http-api/ipfs-api/test-object.js b/test/http-api/ipfs-api/test-object.js index 3f299babe3..ae87eb86c2 100644 --- a/test/http-api/ipfs-api/test-object.js +++ b/test/http-api/ipfs-api/test-object.js @@ -11,7 +11,8 @@ const DAGLink = dagPB.DAGLink function asJson (cb) { return (err, result) => { expect(err).to.not.exist - result.toJSON((cb)) + const nodeJSON = result.toJSON() + cb(null, nodeJSON) } } module.exports = (ctl) => { @@ -19,9 +20,9 @@ module.exports = (ctl) => { it('.new', (done) => { ctl.object.new(asJson((err, res) => { expect(err).to.not.exist - expect(res.Hash) + expect(res.multihash) .to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - expect(res.Links).to.be.eql([]) + expect(res.links).to.be.eql([]) done() })) }) @@ -44,8 +45,8 @@ module.exports = (ctl) => { it('returns value', (done) => { ctl.object.get('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', {enc: 'base58'}, asJson((err, res) => { expect(err).to.not.exist - expect(res.Links).to.be.eql([]) - expect(res.Data).to.equal('') + expect(res.links).to.be.eql([]) + expect(res.data).to.eql(new Buffer('')) done() })) }) @@ -64,19 +65,20 @@ module.exports = (ctl) => { it('updates value', (done) => { const filePath = fs.readFileSync('test/test-data/node.json') const expectedResult = { - Data: 'another', - Hash: 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', - Links: [{ - Name: 'some link', - Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', - Size: 8 + data: new Buffer('another'), + multihash: 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', + links: [{ + name: 'some link', + multihash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', + size: 8 }], - Size: 68 + size: 68 } ctl.object.put(filePath, {enc: 'json'}, asJson((err, res) => { expect(err).not.to.exist - expect(res).to.deep.equal(expectedResult) + console.log(res) + expect(res).to.eql(expectedResult) done() })) }) @@ -152,9 +154,9 @@ module.exports = (ctl) => { it('returns value', (done) => { const expectedResult = { - Name: 'some link', - Hash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', - Size: 8 + name: 'some link', + multihash: 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', + size: 8 } ctl.object.links('QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', {enc: 'base58'}, (err, result) => { @@ -186,15 +188,15 @@ module.exports = (ctl) => { const key = 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' const filePath = 'test/test-data/badnode.json' const expectedResult = { - Data: fs.readFileSync(filePath).toString(), - Hash: 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6', - Links: [], - Size: 19 + data: fs.readFileSync(filePath).toString(), + multihash: 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6', + links: [], + size: 19 } ctl.object.patch.appendData(key, filePath, {enc: 'base58'}, asJson((err, res) => { expect(err).not.to.exist - expect(res).to.deep.equal(expectedResult) + expect(res).to.eql(expectedResult) done() })) }) @@ -221,15 +223,15 @@ module.exports = (ctl) => { const key = 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6' const filePath = 'test/test-data/badnode.json' const expectedResult = { - Data: fs.readFileSync(filePath).toString(), - Hash: 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6', - Links: [], - Size: 19 + data: fs.readFileSync(filePath).toString(), + multihash: 'QmfY37rjbPCZRnhvvJuQ46htW3VCAWziVB991P79h6WSv6', + links: [], + size: 19 } ctl.object.patch.setData(key, filePath, {enc: 'base58'}, asJson((err, res) => { expect(err).not.to.exist - expect(res).to.deep.equal(expectedResult) + expect(res).to.eql(expectedResult) done() })) }) @@ -268,11 +270,11 @@ module.exports = (ctl) => { const link = new DAGLink(name, 10, ref) ctl.object.patch.addLink(root, link, {enc: 'base58'}, asJson((err, res) => { expect(err).not.to.exist - expect(res.Hash).to.equal('QmdVHE8fUD6FLNLugtNxqDFyhaCgdob372hs6BYEe75VAK') - expect(res.Links[0]).to.deep.equal({ - Name: 'foo', - Hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - Size: 4 + expect(res.multihash).to.equal('QmdVHE8fUD6FLNLugtNxqDFyhaCgdob372hs6BYEe75VAK') + expect(res.links[0]).to.eql({ + name: 'foo', + multihash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + size: 4 }) done() })) @@ -303,17 +305,6 @@ module.exports = (ctl) => { done() }) }) - - it('updates value', (done) => { - const root = 'QmdVHE8fUD6FLNLugtNxqDFyhaCgdob372hs6BYEe75VAK' - const link = new DAGLink('foo') - - ctl.object.patch.rmLink(root, link, {enc: 'base58'}, asJson((err, res) => { - expect(err).not.to.exist - expect(res.Hash).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - done() - })) - }) }) }) } diff --git a/test/utils/factory-http/index.js b/test/utils/factory-http/index.js index 0afa27bca7..1c863adcb4 100644 --- a/test/utils/factory-http/index.js +++ b/test/utils/factory-http/index.js @@ -52,7 +52,10 @@ function Factory () { // create the IPFS node const ipfs = new IPFS(repo) - ipfs.init({ emptyRepo: true, bits: 1024 }, (err) => { + ipfs.init({ + emptyRepo: true, + bits: 1024 + }, (err) => { if (err) { return callback(err) } @@ -71,8 +74,9 @@ function Factory () { if (err) { return callback(err) } - console.log(node.apiMultiaddr) + const ctl = IPFSAPI(node.apiMultiaddr) + callback(null, ctl) }) } @@ -82,9 +86,13 @@ function Factory () { if (config) { return cb(null, config) } - const conf = JSON.parse(JSON.stringify(defaultConfig)) + // copy default config + const conf = JSON.parse( + JSON.stringify(defaultConfig)) - PeerId.create({ bits: 1024 }, (err, id) => { + PeerId.create({ + bits: 1024 + }, (err, id) => { if (err) { return cb(err) } From 3bb3ba8bd097dafb1c637bf75f95d1d691b5fa6d Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 25 Nov 2016 10:43:01 +0000 Subject: [PATCH 06/10] feat(cli): migrate to awesome-dag-pb --- src/cli/commands/object/get.js | 27 ++++++-- src/cli/commands/object/links.js | 7 +- src/cli/commands/object/new.js | 7 +- src/cli/commands/object/patch/add-link.js | 72 ++++++++++++++------ src/cli/commands/object/patch/append-data.js | 6 +- src/cli/commands/object/patch/rm-link.js | 7 +- src/cli/commands/object/patch/set-data.js | 6 +- src/cli/commands/object/put.js | 11 +-- src/cli/commands/swarm/peers.js | 6 +- src/http-api/resources/object.js | 2 - test/core/both/test-bitswap.js | 4 -- 11 files changed, 103 insertions(+), 52 deletions(-) diff --git a/src/cli/commands/object/get.js b/src/cli/commands/object/get.js index 7fe44d7441..92a3743b5a 100644 --- a/src/cli/commands/object/get.js +++ b/src/cli/commands/object/get.js @@ -16,15 +16,32 @@ module.exports = { handler (argv) { waterfall([ (cb) => utils.getIPFS(cb), - (ipfs, cb) => ipfs.object.get(argv.key, {enc: 'base58'}, cb), - (node, cb) => node.toJSON(cb) - ], (err, nodeJson) => { + (ipfs, cb) => ipfs.object.get( + argv.key, + { enc: 'base58' }, + cb) + ], (err, node) => { if (err) { throw err } + const nodeJSON = node.toJSON() - nodeJson.Data = nodeJson.Data ? nodeJson.Data.toString() : '' - console.log(JSON.stringify(nodeJson)) + nodeJSON.data = nodeJSON.data ? nodeJSON.data.toString() : '' + + const answer = { + Data: nodeJSON.data, + Hash: nodeJSON.multihash, + Size: nodeJSON.size, + Links: nodeJSON.links.map((l) => { + return { + Name: l.name, + Size: l.size, + Hash: l.multihash + } + }) + } + + console.log(JSON.stringify(answer)) }) } } diff --git a/src/cli/commands/object/links.js b/src/cli/commands/object/links.js index 707a0cd44d..f4c576f960 100644 --- a/src/cli/commands/object/links.js +++ b/src/cli/commands/object/links.js @@ -25,7 +25,12 @@ module.exports = { links.forEach((link) => { link = link.toJSON() - console.log(link.Hash, link.Size, link.Name) + + console.log( + link.multihash, + link.size, + link.name + ) }) }) }) diff --git a/src/cli/commands/object/new.js b/src/cli/commands/object/new.js index 14f4988d57..ec2853ec3c 100644 --- a/src/cli/commands/object/new.js +++ b/src/cli/commands/object/new.js @@ -16,14 +16,15 @@ module.exports = { handler (argv) { waterfall([ (cb) => utils.getIPFS(cb), - (ipfs, cb) => ipfs.object.new(cb), - (node, cb) => node.toJSON(cb) + (ipfs, cb) => ipfs.object.new(cb) ], (err, node) => { if (err) { throw err } - console.log(node.Hash) + const nodeJSON = node.toJSON() + + console.log(nodeJSON.multihash) }) } } diff --git a/src/cli/commands/object/patch/add-link.js b/src/cli/commands/object/patch/add-link.js index 06e9843505..77f5b648cc 100644 --- a/src/cli/commands/object/patch/add-link.js +++ b/src/cli/commands/object/patch/add-link.js @@ -5,8 +5,7 @@ const debug = require('debug') const log = debug('cli:object') const dagPB = require('ipld-dag-pb') const DAGLink = dagPB.DAGLink -const waterfall = require('async/waterfall') -const parallel = require('async/parallel') +const series = require('async/series') log.error = debug('cli:object:error') module.exports = { @@ -17,29 +16,60 @@ module.exports = { builder: {}, handler (argv) { - waterfall([ - (cb) => utils.getIPFS(cb), - (ipfs, cb) => waterfall([ - (cb) => ipfs.object.get(argv.ref, {enc: 'base58'}, cb), - (linkedObj, cb) => parallel([ - (cb) => linkedObj.size(cb), - (cb) => linkedObj.multihash(cb) - ], cb) - ], (err, stats) => { - if (err) { - return cb(err) - } - - const link = new DAGLink(argv.name, stats[0], stats[1]) - ipfs.object.patch.addLink(argv.root, link, {enc: 'base58'}, cb) - }), - (node, cb) => node.toJSON(cb) - ], (err, node) => { + let ipfs + let nodeA + let nodeB + + series([ + (cb) => { + utils.getIPFS((err, _ipfs) => { + if (err) { + return cb(err) + } + ipfs = _ipfs + cb() + }) + }, + (cb) => { + ipfs.object.get( + argv.ref, + { enc: 'base58' }, + (err, node) => { + console.log('Do I get my node') + if (err) { + return cb(err) + } + nodeA = node + cb() + }) + }, + (cb) => { + console.log('multihash is:', nodeA.multihash) + const link = new DAGLink( + argv.name, + nodeA.multihash, + nodeA.size + ) + + ipfs.object.patch.addLink( + argv.root, + link, + { enc: 'base58' }, + (err, node) => { + if (err) { + return cb(err) + } + nodeB = node + cb() + } + ) + } + ], (err) => { if (err) { throw err } - console.log(node.Hash) + console.log(nodeB.toJSON().multihash) }) } } diff --git a/src/cli/commands/object/patch/append-data.js b/src/cli/commands/object/patch/append-data.js index b513b7acea..fefbd641ac 100644 --- a/src/cli/commands/object/patch/append-data.js +++ b/src/cli/commands/object/patch/append-data.js @@ -11,14 +11,14 @@ log.error = debug('cli:object:error') function appendData (key, data) { waterfall([ (cb) => utils.getIPFS(cb), - (ipfs, cb) => ipfs.object.patch.appendData(key, data, {enc: 'base58'}, cb), - (node, cb) => node.toJSON(cb) + (ipfs, cb) => ipfs.object.patch.appendData(key, data, {enc: 'base58'}, cb) ], (err, node) => { if (err) { throw err } + const nodeJSON = node.toJSON() - console.log(node.Hash) + console.log(nodeJSON.multihash) }) } diff --git a/src/cli/commands/object/patch/rm-link.js b/src/cli/commands/object/patch/rm-link.js index c6bca0ccc9..77fcec2388 100644 --- a/src/cli/commands/object/patch/rm-link.js +++ b/src/cli/commands/object/patch/rm-link.js @@ -19,14 +19,15 @@ module.exports = { waterfall([ (cb) => utils.getIPFS(cb), - (ipfs, cb) => ipfs.object.patch.rmLink(argv.root, dLink, {enc: 'base58'}, cb), - (node, cb) => node.toJSON(cb) + (ipfs, cb) => ipfs.object.patch.rmLink(argv.root, dLink, {enc: 'base58'}, cb) ], (err, node) => { if (err) { throw err } - console.log(node.Hash) + const nodeJSON = node.toJSON() + + console.log(nodeJSON.multihash) }) } } diff --git a/src/cli/commands/object/patch/set-data.js b/src/cli/commands/object/patch/set-data.js index 86425d36a6..ccf0cee023 100644 --- a/src/cli/commands/object/patch/set-data.js +++ b/src/cli/commands/object/patch/set-data.js @@ -11,14 +11,14 @@ log.error = debug('cli:object:error') function parseAndAddNode (key, data) { waterfall([ (cb) => utils.getIPFS(cb), - (ipfs, cb) => ipfs.object.patch.setData(key, data, {enc: 'base58'}, cb), - (node, cb) => node.toJSON(cb) + (ipfs, cb) => ipfs.object.patch.setData(key, data, {enc: 'base58'}, cb) ], (err, node) => { if (err) { throw err } + const nodeJSON = node.toJSON() - console.log(node.Hash) + console.log(nodeJSON.multihash) }) } diff --git a/src/cli/commands/object/put.js b/src/cli/commands/object/put.js index 0ace8448ed..91e20c372b 100644 --- a/src/cli/commands/object/put.js +++ b/src/cli/commands/object/put.js @@ -11,14 +11,15 @@ log.error = debug('cli:object:error') function putNode (buf, enc) { waterfall([ (cb) => utils.getIPFS(cb), - (ipfs, cb) => ipfs.object.put(buf, {enc: enc}, cb), - (node, cb) => node.toJSON(cb) + (ipfs, cb) => ipfs.object.put(buf, {enc: enc}, cb) ], (err, node) => { if (err) { throw err } - console.log('added', node.Hash) + const nodeJSON = node.toJSON() + + console.log('added', nodeJSON.multihash) }) } @@ -36,7 +37,9 @@ module.exports = { handler (argv) { if (argv.data) { - return putNode(fs.readFileSync(argv.data), argv.inputenc) + const buf = fs.readFileSync(argv.data) + putNode(buf, argv.inputenc) + return } process.stdin.pipe(bl((err, input) => { diff --git a/src/cli/commands/swarm/peers.js b/src/cli/commands/swarm/peers.js index 2804ef62ae..27d06a3d89 100644 --- a/src/cli/commands/swarm/peers.js +++ b/src/cli/commands/swarm/peers.js @@ -22,13 +22,13 @@ module.exports = { throw new Error('This command must be run in online mode. Try running \'ipfs daemon\' first.') } - ipfs.swarm.peers((err, res) => { + ipfs.swarm.peers((err, result) => { if (err) { throw err } - res.forEach((addr) => { - console.log(addr.toString()) + result.forEach((item) => { + console.log(item.addr.toString()) }) }) }) diff --git a/src/http-api/resources/object.js b/src/http-api/resources/object.js index a8c5f488d3..e47d98805b 100644 --- a/src/http-api/resources/object.js +++ b/src/http-api/resources/object.js @@ -188,8 +188,6 @@ exports.put = { const ipfs = request.server.app.ipfs let node = request.pre.args.node - console.log('HANDLER') - series([ (cb) => { DAGNode.create(new Buffer(node.Data), node.Links, (err, _node) => { diff --git a/test/core/both/test-bitswap.js b/test/core/both/test-bitswap.js index 9b7175c085..576f93d8a3 100644 --- a/test/core/both/test-bitswap.js +++ b/test/core/both/test-bitswap.js @@ -103,7 +103,6 @@ describe('bitswap', () => { const remoteNode = new API(apiUrl) connectNodes(remoteNode, inProcNode, (err) => { - console.log('connected') done(err, remoteNode) }) } @@ -207,14 +206,12 @@ describe('bitswap', () => { it('2 peers', (done) => { const file = new Buffer(`I love IPFS <3 ${Math.random()}`) - console.log('1') waterfall([ // 0. Start node (cb) => addNode(12, cb), // 1. Add file to tmp instance (remote, cb) => { - console.log('2') remote.files.add([{ path: 'awesome.txt', content: file @@ -222,7 +219,6 @@ describe('bitswap', () => { }, // 2. Request file from local instance (val, cb) => { - console.log('3') inProcNode.files.cat(val[0].hash, cb) }, (res, cb) => res.pipe(bl(cb)) From 698f7085cd85fa571d30fc7c733255d652344c39 Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 25 Nov 2016 13:48:42 +0000 Subject: [PATCH 07/10] fix: apply CR --- src/cli/commands/object/get.js | 5 +---- src/http-api/resources/object.js | 17 ++++++----------- test/http-api/ipfs-api/test-object.js | 1 - 3 files changed, 7 insertions(+), 16 deletions(-) diff --git a/src/cli/commands/object/get.js b/src/cli/commands/object/get.js index 92a3743b5a..ed1be4c596 100644 --- a/src/cli/commands/object/get.js +++ b/src/cli/commands/object/get.js @@ -16,10 +16,7 @@ module.exports = { handler (argv) { waterfall([ (cb) => utils.getIPFS(cb), - (ipfs, cb) => ipfs.object.get( - argv.key, - { enc: 'base58' }, - cb) + (ipfs, cb) => ipfs.object.get(argv.key, {enc: 'base58'}, cb) ], (err, node) => { if (err) { throw err diff --git a/src/http-api/resources/object.js b/src/http-api/resources/object.js index e47d98805b..112735591b 100644 --- a/src/http-api/resources/object.js +++ b/src/http-api/resources/object.js @@ -6,7 +6,6 @@ const dagPB = require('ipld-dag-pb') const DAGLink = dagPB.DAGLink const DAGNode = dagPB.DAGNode const waterfall = require('async/waterfall') -const parallel = require('async/parallel') const series = require('async/series') const debug = require('debug') const log = debug('http-api:object') @@ -472,16 +471,12 @@ exports.patchAddLink = { } waterfall([ - (cb) => parallel([ - (cb) => { - cb(null, linkedObj.size) - }, - (cb) => { - cb(null, linkedObj.multihash) - } - ], cb), - (stats, cb) => { - cb(null, new DAGLink(name, stats[0], stats[1])) + (cb) => { + const link = new DAGLink( + name, + linkedObj.size, + linkedObj.multihash) + cb(null, link) }, (link, cb) => ipfs.object.patch.addLink(root, link, cb) ], (err, node) => { diff --git a/test/http-api/ipfs-api/test-object.js b/test/http-api/ipfs-api/test-object.js index ae87eb86c2..251d4222cc 100644 --- a/test/http-api/ipfs-api/test-object.js +++ b/test/http-api/ipfs-api/test-object.js @@ -77,7 +77,6 @@ module.exports = (ctl) => { ctl.object.put(filePath, {enc: 'json'}, asJson((err, res) => { expect(err).not.to.exist - console.log(res) expect(res).to.eql(expectedResult) done() })) From 7fad4d86f92b8b715e1d457b98adb0d4ce8586de Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 25 Nov 2016 14:11:03 +0000 Subject: [PATCH 08/10] fix: addLink and rmLink --- .aegir.js | 3 +- src/cli/commands/object/patch/add-link.js | 42 +++++++------------ src/cli/commands/object/patch/rm-link.js | 8 +++- src/http-api/resources/bootstrap.js | 1 + test/cli/test-bootstrap.js | 8 ++-- test/cli/test-object.js | 2 +- .../test-bitswap.js | 0 .../test-block.js | 0 .../test-bootstrap.js | 0 .../test-config.js | 0 .../{ipfs-api => custom-ipfs-api}/test-id.js | 0 .../test-object.js | 0 .../test-repo.js | 0 .../test-swarm.js | 0 .../test-version.js | 0 test/http-api/index.js | 4 +- 16 files changed, 31 insertions(+), 37 deletions(-) rename test/http-api/{ipfs-api => custom-ipfs-api}/test-bitswap.js (100%) rename test/http-api/{ipfs-api => custom-ipfs-api}/test-block.js (100%) rename test/http-api/{ipfs-api => custom-ipfs-api}/test-bootstrap.js (100%) rename test/http-api/{ipfs-api => custom-ipfs-api}/test-config.js (100%) rename test/http-api/{ipfs-api => custom-ipfs-api}/test-id.js (100%) rename test/http-api/{ipfs-api => custom-ipfs-api}/test-object.js (100%) rename test/http-api/{ipfs-api => custom-ipfs-api}/test-repo.js (100%) rename test/http-api/{ipfs-api => custom-ipfs-api}/test-swarm.js (100%) rename test/http-api/{ipfs-api => custom-ipfs-api}/test-version.js (100%) diff --git a/.aegir.js b/.aegir.js index efeced9a75..92764bc6c3 100644 --- a/.aegir.js +++ b/.aegir.js @@ -9,4 +9,5 @@ module.exports = { included: false }] } -} \ No newline at end of file +} + diff --git a/src/cli/commands/object/patch/add-link.js b/src/cli/commands/object/patch/add-link.js index 77f5b648cc..8929524831 100644 --- a/src/cli/commands/object/patch/add-link.js +++ b/src/cli/commands/object/patch/add-link.js @@ -31,38 +31,24 @@ module.exports = { }) }, (cb) => { - ipfs.object.get( - argv.ref, - { enc: 'base58' }, - (err, node) => { - console.log('Do I get my node') - if (err) { - return cb(err) - } - nodeA = node - cb() - }) + ipfs.object.get(argv.ref, {enc: 'base58'}, (err, node) => { + if (err) { + return cb(err) + } + nodeA = node + cb() + }) }, (cb) => { - console.log('multihash is:', nodeA.multihash) - const link = new DAGLink( - argv.name, - nodeA.multihash, - nodeA.size - ) + const link = new DAGLink(argv.name, nodeA.size, nodeA.multihash) - ipfs.object.patch.addLink( - argv.root, - link, - { enc: 'base58' }, - (err, node) => { - if (err) { - return cb(err) - } - nodeB = node - cb() + ipfs.object.patch.addLink(argv.root, link, {enc: 'base58'}, (err, node) => { + if (err) { + return cb(err) } - ) + nodeB = node + cb() + }) } ], (err) => { if (err) { diff --git a/src/cli/commands/object/patch/rm-link.js b/src/cli/commands/object/patch/rm-link.js index 77fcec2388..df60803a5f 100644 --- a/src/cli/commands/object/patch/rm-link.js +++ b/src/cli/commands/object/patch/rm-link.js @@ -15,11 +15,15 @@ module.exports = { builder: {}, handler (argv) { - const dLink = new DAGLink(argv.link) + // TODO rmLink should support removing by name and/or multihash + // without having to know everything, which in fact it does, however, + // since it expectes a DAGLink type, we have to pass some fake size and + // hash. + const link = new DAGLink(argv.link, 1, 'Qm') waterfall([ (cb) => utils.getIPFS(cb), - (ipfs, cb) => ipfs.object.patch.rmLink(argv.root, dLink, {enc: 'base58'}, cb) + (ipfs, cb) => ipfs.object.patch.rmLink(argv.root, link, {enc: 'base58'}, cb) ], (err, node) => { if (err) { throw err diff --git a/src/http-api/resources/bootstrap.js b/src/http-api/resources/bootstrap.js index 6492435083..cbba6da00f 100644 --- a/src/http-api/resources/bootstrap.js +++ b/src/http-api/resources/bootstrap.js @@ -38,6 +38,7 @@ exports.add = { handler (request, reply) { const ipfs = request.server.app.ipfs const addr = request.pre.args.addr + console.log('Handler is called', addr.toString()) ipfs.bootstrap.add(addr.toString(), (err, list) => { if (err) { diff --git a/test/cli/test-bootstrap.js b/test/cli/test-bootstrap.js index 8c69c8486e..26f4f640b7 100644 --- a/test/cli/test-bootstrap.js +++ b/test/cli/test-bootstrap.js @@ -36,11 +36,13 @@ describe('bootstrap', () => { it('list the bootstrap nodes', () => { return ipfs('bootstrap list').then((out) => { - expect(out).to.be.eql(defaultList.join('\n')) + expect(out).to.eql(defaultList.join('\n')) }) }) - it('add another bootstrap node', () => { + // TODO need https://github.com/ipfs/interface-ipfs-core/issues/97 + // to happen, otherwise it is a cat an mouse game + it.skip('add another bootstrap node', () => { return ipfs('bootstrap add /ip4/111.111.111.111/tcp/1001/ipfs/QmcyFFKfLDGJKwufn2GeitxvhricsBQyNKTkrD14psikoD').then((out) => { return ipfs('bootstrap list') }).then((out) => { @@ -48,7 +50,7 @@ describe('bootstrap', () => { }) }) - it('rm a bootstrap node', () => { + it.skip('rm a bootstrap node', () => { return ipfs('bootstrap rm /ip4/111.111.111.111/tcp/1001/ipfs/QmcyFFKfLDGJKwufn2GeitxvhricsBQyNKTkrD14psikoD').then((out) => { return ipfs('bootstrap list') }).then((out) => { diff --git a/test/cli/test-object.js b/test/cli/test-object.js index 383b32457b..448ffd62b4 100644 --- a/test/cli/test-object.js +++ b/test/cli/test-object.js @@ -7,7 +7,7 @@ const repoPath = require('./index').repoPath const describeOnlineAndOffline = require('../utils/on-and-off') const ipfs = require('../utils/ipfs-exec')(repoPath) -describe('object', () => { +describe.only('object', () => { describeOnlineAndOffline(repoPath, () => { it('new', () => { return ipfs('object new').then((out) => { diff --git a/test/http-api/ipfs-api/test-bitswap.js b/test/http-api/custom-ipfs-api/test-bitswap.js similarity index 100% rename from test/http-api/ipfs-api/test-bitswap.js rename to test/http-api/custom-ipfs-api/test-bitswap.js diff --git a/test/http-api/ipfs-api/test-block.js b/test/http-api/custom-ipfs-api/test-block.js similarity index 100% rename from test/http-api/ipfs-api/test-block.js rename to test/http-api/custom-ipfs-api/test-block.js diff --git a/test/http-api/ipfs-api/test-bootstrap.js b/test/http-api/custom-ipfs-api/test-bootstrap.js similarity index 100% rename from test/http-api/ipfs-api/test-bootstrap.js rename to test/http-api/custom-ipfs-api/test-bootstrap.js diff --git a/test/http-api/ipfs-api/test-config.js b/test/http-api/custom-ipfs-api/test-config.js similarity index 100% rename from test/http-api/ipfs-api/test-config.js rename to test/http-api/custom-ipfs-api/test-config.js diff --git a/test/http-api/ipfs-api/test-id.js b/test/http-api/custom-ipfs-api/test-id.js similarity index 100% rename from test/http-api/ipfs-api/test-id.js rename to test/http-api/custom-ipfs-api/test-id.js diff --git a/test/http-api/ipfs-api/test-object.js b/test/http-api/custom-ipfs-api/test-object.js similarity index 100% rename from test/http-api/ipfs-api/test-object.js rename to test/http-api/custom-ipfs-api/test-object.js diff --git a/test/http-api/ipfs-api/test-repo.js b/test/http-api/custom-ipfs-api/test-repo.js similarity index 100% rename from test/http-api/ipfs-api/test-repo.js rename to test/http-api/custom-ipfs-api/test-repo.js diff --git a/test/http-api/ipfs-api/test-swarm.js b/test/http-api/custom-ipfs-api/test-swarm.js similarity index 100% rename from test/http-api/ipfs-api/test-swarm.js rename to test/http-api/custom-ipfs-api/test-swarm.js diff --git a/test/http-api/ipfs-api/test-version.js b/test/http-api/custom-ipfs-api/test-version.js similarity index 100% rename from test/http-api/ipfs-api/test-version.js rename to test/http-api/custom-ipfs-api/test-version.js diff --git a/test/http-api/index.js b/test/http-api/index.js index ff66af7764..243af44982 100644 --- a/test/http-api/index.js +++ b/test/http-api/index.js @@ -57,12 +57,12 @@ describe('HTTP API', () => { }) describe('## custom ipfs-api tests', () => { - const tests = fs.readdirSync(path.join(__dirname, '/ipfs-api')) + const tests = fs.readdirSync(path.join(__dirname, '/custom-ipfs-api')) const ctl = APIctl('/ip4/127.0.0.1/tcp/6001') tests.filter((file) => { return file.match(/test-.*\.js/) }).forEach((file) => { - require('./ipfs-api/' + file)(ctl) + require('./custom-ipfs-api/' + file)(ctl) }) }) }) From 443dd9e365266958977002a6bf60a9327ad80fc0 Mon Sep 17 00:00:00 2001 From: David Dias Date: Fri, 25 Nov 2016 15:14:13 +0000 Subject: [PATCH 09/10] fix(lint): use eslint directly --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3070cfb1fc..c6f822568e 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "npm": ">=3.0.0" }, "scripts": { - "lint": "aegir-lint", + "lint": "eslint -c node_modules/aegir/config/eslintrc.yml src test", "coverage": "gulp coverage", "test": "gulp test", "test:node": "gulp test:node", From 20e3d2e44a12422abe75645c2345aebaab355234 Mon Sep 17 00:00:00 2001 From: Friedel Ziegelmayer Date: Fri, 25 Nov 2016 17:24:25 +0100 Subject: [PATCH 10/10] fix(lint): install missing plugin --- examples/bundle-browserify/index.js | 2 +- examples/bundle-webpack/src/App.js | 2 +- examples/bundle-webpack/webpack.config.js | 2 ++ package.json | 3 ++- test/cli/test-object.js | 2 +- 5 files changed, 7 insertions(+), 4 deletions(-) diff --git a/examples/bundle-browserify/index.js b/examples/bundle-browserify/index.js index af3431baaa..6ab84ab8d6 100644 --- a/examples/bundle-browserify/index.js +++ b/examples/bundle-browserify/index.js @@ -5,7 +5,7 @@ var IPFS = require('ipfs') // Create the IPFS node instance // for simplicity, we create a new repo everytime the node // is created, because you can't init already existing repos -const repoPath = '' + Math.random() +const repoPath = String(Math.random()) const node = new IPFS(repoPath) const concat = require('concat-stream') diff --git a/examples/bundle-webpack/src/App.js b/examples/bundle-webpack/src/App.js index e9d6023d75..eff2611ebb 100644 --- a/examples/bundle-webpack/src/App.js +++ b/examples/bundle-webpack/src/App.js @@ -26,7 +26,7 @@ class App extends React.Component { // for simplicity, we create a new repo everytime the node // is created, because you can't init already existing repos - const repoPath = '' + Math.random() + const repoPath = String(Math.random()) node = new IPFS(repoPath) node.init({ emptyRepo: true, bits: 2048 }, function (err) { diff --git a/examples/bundle-webpack/webpack.config.js b/examples/bundle-webpack/webpack.config.js index c1cdd90d50..ca6675c546 100644 --- a/examples/bundle-webpack/webpack.config.js +++ b/examples/bundle-webpack/webpack.config.js @@ -1,3 +1,5 @@ +'use strict' + var path = require('path') var webpack = require('webpack') diff --git a/package.json b/package.json index c6f822568e..f64c23d76d 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "npm": ">=3.0.0" }, "scripts": { - "lint": "eslint -c node_modules/aegir/config/eslintrc.yml src test", + "lint": "aegir-lint", "coverage": "gulp coverage", "test": "gulp test", "test:node": "gulp test:node", @@ -54,6 +54,7 @@ "buffer-loader": "0.0.1", "chai": "^3.5.0", "detect-node": "^2.0.3", + "eslint-plugin-react": "^6.7.1", "execa": "^0.5.0", "expose-loader": "^0.7.1", "form-data": "^2.1.2", diff --git a/test/cli/test-object.js b/test/cli/test-object.js index 448ffd62b4..383b32457b 100644 --- a/test/cli/test-object.js +++ b/test/cli/test-object.js @@ -7,7 +7,7 @@ const repoPath = require('./index').repoPath const describeOnlineAndOffline = require('../utils/on-and-off') const ipfs = require('../utils/ipfs-exec')(repoPath) -describe.only('object', () => { +describe('object', () => { describeOnlineAndOffline(repoPath, () => { it('new', () => { return ipfs('object new').then((out) => {