Skip to content
This repository has been archived by the owner on Mar 10, 2020. It is now read-only.

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
nginnever committed Jun 5, 2016
1 parent b86d017 commit 4c337c2
Showing 1 changed file with 116 additions and 102 deletions.
218 changes: 116 additions & 102 deletions src/files.js
Original file line number Diff line number Diff line change
@@ -1,155 +1,163 @@
/* eslint-env mocha */
'use strict'

var expect = require('chai').expect
var bs58 = require('bs58')
var Readable = require('readable-stream')
var path = require('path')
var isNode = require('detect-node')
var fs = require('fs')
var bl = require('bl')

module.exports = function (common) {
describe('.files', function () {
var smallFile = void 0
var bigFile = void 0
var ipfs = void 0

before(function (done) {
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt'))
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random'))

common.setup(function (err, _ipfs) {
const expect = require('chai').expect
const bs58 = require('bs58')
const Readable = require('readable-stream')
const path = require('path')
const fs = require('fs')
const isNode = require('detect-node')
const bl = require('bl')

module.exports = (common) => {
describe('.files', () => {
let smallFile
let bigFile
let ipfs

before((done) => {
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt')
)
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random')
)

common.setup((err, _ipfs) => {
expect(err).to.not.exist
ipfs = _ipfs
done()
})
})

after(function (done) {
after((done) => {
common.teardown(done)
})

describe('.add', function () {
it('stream', function (done) {
var buffered = new Buffer('some data')
var rs = new Readable()
describe('.add', () => {
it('stream', (done) => {
const buffered = new Buffer('some data')
const rs = new Readable()
rs.push(buffered)
rs.push(null)

var arr = []
var filePair = { path: 'data.txt', content: rs }
const arr = []
const filePair = {path: 'data.txt', content: rs}
arr.push(filePair)

ipfs.files.add(arr, function (err, res) {
ipfs.files.add(arr, (err, res) => {
expect(err).to.not.exist
expect(res).to.be.length(1)
expect(res[0].path).to.equal('data.txt')
expect(res[0].node.size()).to.equal(17)
var mh = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS'
const mh = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS'
expect(bs58.encode(res[0].node.multihash()).toString()).to.equal(mh)
done()
})
})

it('buffer as tuple', function (done) {
if (!isNode) return done()

var file = {
it('buffer as tuple', (done) => {
const file = {
path: 'testfile.txt',
content: smallFile
}

ipfs.files.add([file], function (err, res) {
ipfs.files.add([file], (err, res) => {
expect(err).to.not.exist

var added = res[0] != null ? res[0] : res
var mh = bs58.encode(added.node.multihash()).toString()
const added = res[0] != null ? res[0] : res
const mh = bs58.encode(added.node.multihash()).toString()
expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')
expect(added.path).to.equal('testfile.txt')
expect(added.node.links).to.have.length(0)
done()
})
})

it('buffer', function (done) {
ipfs.files.add(smallFile, function (err, res) {
it('buffer', (done) => {
ipfs.files.add(smallFile, (err, res) => {
expect(err).to.not.exist

expect(res).to.have.length(1)
var mh = bs58.encode(res[0].node.multihash()).toString()
const mh = bs58.encode(res[0].node.multihash()).toString()
expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')
expect(res[0].path).to.equal(mh)
expect(res[0].node.links).to.have.length(0)
done()
})
})

it('BIG buffer', function (done) {
ipfs.files.add(bigFile, function (err, res) {
it('BIG buffer', (done) => {
ipfs.files.add(bigFile, (err, res) => {
expect(err).to.not.exist

expect(res).to.have.length(1)
expect(res[0].node.links).to.have.length(58)
var mh = bs58.encode(res[0].node.multihash()).toString()
const mh = bs58.encode(res[0].node.multihash()).toString()
expect(res[0].path).to.equal(mh)
expect(mh).to.equal('Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq')
done()
})
})

it('add a nested dir as array', function (done) {
it('add a nested dir as array', (done) => {
if (!isNode) {
return done()
// can't run this test cause browserify
// can't shim readFileSync in runtime
}
var base = path.join(__dirname, 'data/test-folder')
var content = function content (name) {
return {
path: 'test-folder/' + name,
content: fs.readFileSync(path.join(base, name))
}
}
var emptyDir = function emptyDir (name) {
return {
path: 'test-folder/' + name,
dir: true
}
}
var dirs = [content('pp.txt'), content('holmes.txt'), content('jungle.txt'), content('alice.txt'), emptyDir('empty-folder'), content('files/hello.txt'), content('files/ipfs.txt'), emptyDir('files/empty')]
const base = path.join(__dirname, 'data/test-folder')
const content = (name) => ({
path: `test-folder/${name}`,
content: fs.readFileSync(path.join(base, name))
})
const emptyDir = (name) => ({
path: `test-folder/${name}`
})
const dirs = [
content('pp.txt'),
content('holmes.txt'),
content('jungle.txt'),
content('alice.txt'),
emptyDir('empty-folder'),
content('files/hello.txt'),
content('files/ipfs.txt'),
emptyDir('files/empty')
]

ipfs.files.add(dirs, function (err, res) {
ipfs.files.add(dirs, (err, res) => {
expect(err).to.not.exist

var added = res[res.length - 1]
var mh = bs58.encode(added.node.multihash()).toString()
const added = res[res.length - 1]
const mh = bs58.encode(added.node.multihash()).toString()
expect(mh).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP')
expect(added.path).to.equal('test-folder')
expect(added.node.links).to.have.length(6)
done()
})
})

describe('promise', function () {
it('buffer', function () {
return ipfs.files.add(smallFile).then(function (res) {
var added = res[0] != null ? res[0] : res
var mh = bs58.encode(added.node.multihash()).toString()
expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')
expect(added.path).to.equal(mh)
expect(added.node.links).to.have.length(0)
}).catch(function (err) {
expect(err).to.not.exist
})
describe('promise', () => {
it('buffer', () => {
return ipfs.files.add(smallFile)
.then((res) => {
const added = res[0] != null ? res[0] : res
const mh = bs58.encode(added.node.multihash()).toString()
expect(mh).to.equal('Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP')
expect(added.path).to.equal(mh)
expect(added.node.links).to.have.length(0)
})
.catch((err) => {
expect(err).to.not.exist
})
})
})
})

describe('.cat', function () {
it('returns file stream', function (done) {
var hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
ipfs.cat(hash, function (err, file) {
describe('.cat', () => {
it('returns file stream', (done) => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
ipfs.cat(hash, (err, file) => {
expect(err).to.not.exist
file.pipe(bl(function (err, bldata) {
file.pipe(bl((err, bldata) => {
expect(err).to.not.exist
expect(bldata.toString()).to.contain('Check out some of the other files in this directory:')
done()
Expand All @@ -158,11 +166,11 @@ module.exports = function (common) {
})

// This fails on js-ipfs-api
it('takes a buffer input', function (done) {
var mhBuf = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'))
ipfs.cat(mhBuf, function (err, file) {
it('takes a buffer input', (done) => {
const mhBuf = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'))
ipfs.cat(mhBuf, (err, file) => {
expect(err).to.not.exist
file.pipe(bl(function (err, bldata) {
file.pipe(bl((err, bldata) => {
expect(err).to.not.exist
expect(bldata.toString()).to.contain('Check out some of the other files in this directory:')
done()
Expand All @@ -171,23 +179,23 @@ module.exports = function (common) {
})

// You can add a large file to your ipfs repo and change the hash to the file after installing js-ipfs
it('returns a large file', function (done) {
var hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
ipfs.cat(hash, function (err, file) {
it('returns a large file', (done) => {
const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
ipfs.cat(hash, (err, file) => {
expect(err).to.not.exist
file.pipe(bl(function (err, bldata) {
file.pipe(bl((err, bldata) => {
expect(err).to.not.exist
expect(bldata).to.deep.equal(bigFile)
done()
}))
})
})

it('returns error on invalid key', function (done) {
var hash = 'somethingNotMultihash'
ipfs.cat(hash, function (err, file) {
it('returns error on invalid key', (done) => {
const hash = 'somethingNotMultihash'
ipfs.cat(hash, (err, file) => {
expect(err).to.exist
var errString = err.toString()
const errString = err.toString()
if (errString === 'Error: invalid ipfs ref path') {
expect(err.toString()).to.contain('Error: invalid ipfs ref path')
}
Expand All @@ -198,25 +206,29 @@ module.exports = function (common) {
})
})

describe('promise', function () {
it('files.cat', function (done) {
var hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
ipfs.cat(hash).then(function (stream) {
stream.pipe(bl(function (err, bldata) {
describe('promise', () => {
it('files.cat', (done) => {
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
ipfs.cat(hash)
.then((stream) => {
stream.pipe(bl((err, bldata) => {
expect(err).to.not.exist
expect(bldata.toString()).to.contain('Check out some of the other files in this directory:')
done()
}))
}).catch(function (err) {
})
.catch((err) => {
expect(err).to.not.exist
})
})

it('returns error on invalid key', function (done) {
var hash = 'somethingNotMultihash'
ipfs.cat(hash).then(function (stream) {}).catch(function (err) {
it('returns error on invalid key', (done) => {
const hash = 'somethingNotMultihash'
ipfs.cat(hash)
.then((stream) => {})
.catch((err) => {
expect(err).to.exist
var errString = err.toString()
const errString = err.toString()
if (errString === 'Error: invalid ipfs ref path') {
expect(err.toString()).to.contain('Error: invalid ipfs ref path')
}
Expand All @@ -227,15 +239,17 @@ module.exports = function (common) {
})
})

it('takes a buffer input', function (done) {
var hash = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'))
ipfs.cat(hash).then(function (stream) {
stream.pipe(bl(function (err, bldata) {
it('takes a buffer input', (done) => {
const hash = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'))
ipfs.cat(hash)
.then((stream) => {
stream.pipe(bl((err, bldata) => {
expect(err).to.not.exist
expect(bldata.toString()).to.contain('Check out some of the other files in this directory:')
done()
}))
}).catch(function (err) {
})
.catch((err) => {
expect(err).to.not.exist
})
})
Expand Down

0 comments on commit 4c337c2

Please sign in to comment.