From b7fad99cbe48c71503701bdc0d1aa5d6d3cafcc1 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 12 Dec 2018 09:07:32 +0000 Subject: [PATCH] test: add CID version agnostic tests (#413) License: MIT Signed-off-by: Alan Shaw --- js/src/block/get.js | 38 ++++++++++++++++++++++++- js/src/dag/get.js | 42 +++++++++++++++++++++++++++ js/src/dht/findprovs.js | 3 +- js/src/files-regular/cat.js | 38 +++++++++++++++++++++++++ js/src/files-regular/get.js | 39 +++++++++++++++++++++++++ js/src/files-regular/ls.js | 57 +++++++++++++++++++++++++++++++++++++ js/src/object/get.js | 4 +-- package.json | 4 +-- 8 files changed, 218 insertions(+), 7 deletions(-) diff --git a/js/src/block/get.js b/js/src/block/get.js index 49bac54fdb..3238e82334 100644 --- a/js/src/block/get.js +++ b/js/src/block/get.js @@ -70,6 +70,42 @@ module.exports = (createCommon, options) => { }) }) - // TODO it.skip('Promises support', (done) => {}) + it('should get a block added as CIDv0 with a CIDv1', done => { + const input = Buffer.from(`TEST${Date.now()}`) + + ipfs.block.put(input, { version: 0 }, (err, res) => { + expect(err).to.not.exist() + + const cidv0 = res.cid + expect(cidv0.version).to.equal(0) + + const cidv1 = cidv0.toV1() + + ipfs.block.get(cidv1, (err, output) => { + expect(err).to.not.exist() + expect(output.data).to.eql(input) + done() + }) + }) + }) + + it('should get a block added as CIDv1 with a CIDv0', done => { + const input = Buffer.from(`TEST${Date.now()}`) + + ipfs.block.put(input, { version: 1 }, (err, res) => { + expect(err).to.not.exist() + + const cidv1 = res.cid + expect(cidv1.version).to.equal(1) + + const cidv0 = cidv1.toV0() + + ipfs.block.get(cidv0, (err, output) => { + expect(err).to.not.exist() + expect(output.data).to.eql(input) + done() + }) + }) + }) }) } diff --git a/js/src/dag/get.js b/js/src/dag/get.js index 35f211ac93..d5b7415e77 100644 --- a/js/src/dag/get.js +++ b/js/src/dag/get.js @@ -5,6 +5,8 @@ const { series, eachSeries } = require('async') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') +const Unixfs = require('ipfs-unixfs') +const CID = require('cids') const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -211,5 +213,45 @@ module.exports = (createCommon, options) => { done() }) }) + + it('should get a node added as CIDv0 with a CIDv1', done => { + const input = Buffer.from(`TEST${Date.now()}`) + + dagPB.DAGNode.create(input, (err, node) => { + expect(err).to.not.exist() + + ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }, (err, cid) => { + expect(err).to.not.exist() + expect(cid.version).to.equal(0) + + const cidv1 = cid.toV1() + + ipfs.dag.get(cidv1, (err, output) => { + expect(err).to.not.exist() + expect(output.value.data).to.eql(input) + done() + }) + }) + }) + }) + + it('should get a node added as CIDv1 with a CIDv0', done => { + const input = Buffer.from(`TEST${Date.now()}`) + + ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { + expect(err).to.not.exist() + + const cidv1 = new CID(res[0].hash) + expect(cidv1.version).to.equal(1) + + const cidv0 = cidv1.toV0() + + ipfs.dag.get(cidv0, (err, output) => { + expect(err).to.not.exist() + expect(Unixfs.unmarshal(output.value.data).data).to.eql(input) + done() + }) + }) + }) }) } diff --git a/js/src/dht/findprovs.js b/js/src/dht/findprovs.js index 2d5614ac03..3ee87cf29e 100644 --- a/js/src/dht/findprovs.js +++ b/js/src/dht/findprovs.js @@ -2,7 +2,6 @@ 'use strict' const multihashing = require('multihashing-async') -const Crypto = require('crypto') const waterfall = require('async/waterfall') const CID = require('cids') const { spawnNodesWithId } = require('../utils/spawn') @@ -10,7 +9,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const { connect } = require('../utils/swarm') function fakeCid (cb) { - const bytes = Crypto.randomBytes(Math.round(Math.random() * 1000)) + const bytes = Buffer.from(`TEST${Date.now()}`) multihashing(bytes, 'sha2-256', (err, mh) => { if (err) { cb(err) diff --git a/js/src/files-regular/cat.js b/js/src/files-regular/cat.js index 6f1e9f464a..048d9a9fc6 100644 --- a/js/src/files-regular/cat.js +++ b/js/src/files-regular/cat.js @@ -76,6 +76,44 @@ module.exports = (createCommon, options) => { }) }) + it('should cat a file added as CIDv0 with a CIDv1', done => { + const input = Buffer.from(`TEST${Date.now()}`) + + ipfs.add(input, { cidVersion: 0 }, (err, res) => { + expect(err).to.not.exist() + + const cidv0 = new CID(res[0].hash) + expect(cidv0.version).to.equal(0) + + const cidv1 = cidv0.toV1() + + ipfs.cat(cidv1, (err, output) => { + expect(err).to.not.exist() + expect(output).to.eql(input) + done() + }) + }) + }) + + it('should cat a file added as CIDv1 with a CIDv0', done => { + const input = Buffer.from(`TEST${Date.now()}`) + + ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { + expect(err).to.not.exist() + + const cidv1 = new CID(res[0].hash) + expect(cidv1.version).to.equal(1) + + const cidv0 = cidv1.toV0() + + ipfs.cat(cidv0, (err, output) => { + expect(err).to.not.exist() + expect(output).to.eql(input) + done() + }) + }) + }) + it('should cat a BIG file', (done) => { ipfs.cat(fixtures.bigFile.cid, (err, data) => { expect(err).to.not.exist() diff --git a/js/src/files-regular/get.js b/js/src/files-regular/get.js index 2fec9718c2..334a0c34a5 100644 --- a/js/src/files-regular/get.js +++ b/js/src/files-regular/get.js @@ -5,6 +5,7 @@ const { fixtures } = require('./utils') const bs58 = require('bs58') const parallel = require('async/parallel') const series = require('async/series') +const CID = require('cids') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -73,6 +74,44 @@ module.exports = (createCommon, options) => { }) }) + it('should get a file added as CIDv0 with a CIDv1', done => { + const input = Buffer.from(`TEST${Date.now()}`) + + ipfs.add(input, { cidVersion: 0 }, (err, res) => { + expect(err).to.not.exist() + + const cidv0 = new CID(res[0].hash) + expect(cidv0.version).to.equal(0) + + const cidv1 = cidv0.toV1() + + ipfs.get(cidv1, (err, output) => { + expect(err).to.not.exist() + expect(output[0].content).to.eql(input) + done() + }) + }) + }) + + it('should get a file added as CIDv1 with a CIDv0', done => { + const input = Buffer.from(`TEST${Date.now()}`) + + ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { + expect(err).to.not.exist() + + const cidv1 = new CID(res[0].hash) + expect(cidv1.version).to.equal(1) + + const cidv0 = cidv1.toV0() + + ipfs.get(cidv0, (err, output) => { + expect(err).to.not.exist() + expect(output[0].content).to.eql(input) + done() + }) + }) + }) + it('should get a BIG file', (done) => { ipfs.get(fixtures.bigFile.cid, (err, files) => { expect(err).to.not.exist() diff --git a/js/src/files-regular/ls.js b/js/src/files-regular/ls.js index 7f441d8360..286d02bbe9 100644 --- a/js/src/files-regular/ls.js +++ b/js/src/files-regular/ls.js @@ -3,6 +3,7 @@ const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const CID = require('cids') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -104,6 +105,62 @@ module.exports = (createCommon, options) => { }) }) + it('should ls files added as CIDv0 with a CIDv1', done => { + const randomName = prefix => `${prefix}${Math.round(Math.random() * 1000)}` + const dir = randomName('DIR') + + const input = [ + { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')) }, + { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } + ] + + ipfs.add(input, { cidVersion: 0 }, (err, res) => { + expect(err).to.not.exist() + + const cidv0 = new CID(res[res.length - 1].hash) + expect(cidv0.version).to.equal(0) + + const cidv1 = cidv0.toV1() + + ipfs.ls(cidv1, (err, output) => { + expect(err).to.not.exist() + expect(output.length).to.equal(input.length) + output.forEach(({ hash }) => { + expect(res.find(file => file.hash === hash)).to.exist() + }) + done() + }) + }) + }) + + it('should ls files added as CIDv1 with a CIDv0', done => { + const randomName = prefix => `${prefix}${Math.round(Math.random() * 1000)}` + const dir = randomName('DIR') + + const input = [ + { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')) }, + { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } + ] + + ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { + expect(err).to.not.exist() + + const cidv1 = new CID(res[res.length - 1].hash) + expect(cidv1.version).to.equal(1) + + const cidv0 = cidv1.toV1() + + ipfs.ls(cidv0, (err, output) => { + expect(err).to.not.exist() + expect(output.length).to.equal(input.length) + output.forEach(({ hash }) => { + expect(res.find(file => file.hash === hash)).to.exist() + }) + done() + }) + }) + }) + it('should correctly handle a non existing hash', (done) => { ipfs.ls('surelynotavalidhashheh?', (err, res) => { expect(err).to.exist() diff --git a/js/src/object/get.js b/js/src/object/get.js index 14087be5d5..438459ae27 100644 --- a/js/src/object/get.js +++ b/js/src/object/get.js @@ -7,7 +7,7 @@ const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') const UnixFs = require('ipfs-unixfs') -const crypto = require('crypto') +const randomBytes = require('randombytes') const { asDAGLink } = require('./utils') module.exports = (createCommon, options) => { @@ -326,7 +326,7 @@ module.exports = (createCommon, options) => { let next = maxBytes while (data.length !== required) { - data = Buffer.concat([data, crypto.randomBytes(next)]) + data = Buffer.concat([data, randomBytes(next)]) next = maxBytes if (data.length + maxBytes > required) { diff --git a/package.json b/package.json index 9debfbdb53..0e5914f794 100644 --- a/package.json +++ b/package.json @@ -43,7 +43,6 @@ "chai": "^4.2.0", "cids": "~0.5.5", "concat-stream": "^1.6.2", - "crypto": "^1.0.1", "dirty-chai": "^2.0.1", "es6-promisify": "^6.0.1", "hat": "0.0.3", @@ -62,7 +61,8 @@ "peer-id": "~0.12.0", "peer-info": "~0.15.0", "pull-stream": "^3.6.9", - "pump": "^3.0.0" + "pump": "^3.0.0", + "randombytes": "^2.0.6" }, "devDependencies": { "aegir": "^17.0.1"