Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Files core fix #204

Merged
merged 6 commits into from
May 9, 2016
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 15 additions & 12 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,45 +38,48 @@
"homepage": "https://github.com/ipfs/js-ipfs#readme",
"devDependencies": {
"aegir": "^3.0.1",
"async": "^2.0.0-rc.3",
"async": "^2.0.0-rc.4",
"buffer-loader": "0.0.1",
"chai": "^3.5.0",
"expose-loader": "^0.7.1",
"form-data": "^1.0.0-rc3",
"idb-plus-blob-store": "^1.1.2",
"lodash": "^4.11.1",
"mocha": "^2.3.4",
"lodash": "^4.11.2",
"mocha": "^2.4.5",
"ncp": "^2.0.0",
"nexpect": "^0.5.0",
"pre-commit": "^1.1.2",
"rimraf": "^2.4.4",
"rimraf": "^2.5.2",
"stream-to-promise": "^1.1.0",
"transform-loader": "^0.2.3"
},
"dependencies": {
"babel-runtime": "^6.6.1",
"bl": "^1.1.2",
"boom": "^3.1.1",
"boom": "^3.1.2",
"bs58": "^3.0.0",
"debug": "^2.2.0",
"fs-blob-store": "^5.2.1",
"glob": "^7.0.3",
"hapi": "^13.3.0",
"ipfs-api": "^3.0.1",
"ipfs-api": "^3.0.2",
"ipfs-block": "^0.3.0",
"ipfs-block-service": "^0.3.0",
"ipfs-data-importing": "^0.3.3",
"ipfs-block-service": "^0.4.0",
"ipfs-merkle-dag": "^0.5.0",
"ipfs-multipart": "^0.1.0",
"ipfs-repo": "^0.8.0",
"joi": "^8.0.2",
"ipfs-unixfs-engine": "^0.6.1",
"joi": "^8.0.5",
"libp2p-ipfs": "^0.3.3",
"libp2p-swarm": "^0.12.5",
"lodash.get": "^4.2.1",
"lodash.set": "^4.0.0",
"multiaddr": "^1.3.0",
"lodash.set": "^4.1.0",
"multiaddr": "^1.4.1",
"path-exists": "^3.0.0",
"peer-book": "0.1.0",
"peer-id": "^0.6.6",
"peer-info": "^0.6.2",
"readable-stream": "1.1.13",
"ronin": "^0.3.11",
"temp": "^0.8.3"
},
Expand Down Expand Up @@ -112,4 +115,4 @@
"kumavis <kumavis@users.noreply.github.com>",
"nginnever <ginneversource@gmail.com>"
]
}
}
86 changes: 77 additions & 9 deletions src/cli/commands/files/add.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,44 @@
'use strict'

const Command = require('ronin').Command
const IPFS = require('../../../core')
const utils = require('../../utils')
const debug = require('debug')
const log = debug('cli:version')
log.error = debug('cli:version:error')
const bs58 = require('bs58')
const fs = require('fs')
const async = require('async')
const path = require('path')
const glob = require('glob')

function checkPath (inPath, recursive) {
// This function is to check for the following possible inputs
// 1) "." add the cwd but throw error for no recursion flag
// 2) "." -r return the cwd
// 3) "/some/path" but throw error for no recursion
// 4) "/some/path" -r
// 5) No path, throw err
// 6) filename.type return the cwd + filename

if (!inPath) {
throw new Error('Error: Argument \'path\' is required')
}

var s = fs.statSync(inPath)

if (s.isDirectory() && recursive === false) {
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
}
if (inPath === '.' && recursive === true) {
inPath = process.cwd()
} else if (inPath === '.' && recursive === false) {
s = fs.statSync(process.cwd())
if (s.isDirectory()) {
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

won't process.cwd() (current working directory) always be a directory?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah

}
return inPath
}

module.exports = Command.extend({
desc: 'Add a file to IPFS using the UnixFS data format',
Expand All @@ -18,16 +51,51 @@ module.exports = Command.extend({
}
},

run: (recursive, path) => {
var node = new IPFS()
path = process.cwd() + '/' + path
node.files.add(path, {
recursive: recursive
}, (err, stats) => {
run: (recursive, inPath) => {
let rs

inPath = checkPath(inPath, recursive)

glob(path.join(inPath, '/**/*'), (err, res) => {
if (err) {
return console.log(err)
throw err
}
console.log('added', bs58.encode(stats.Hash).toString(), stats.Name)
utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
const i = ipfs.files.add()
var filePair
i.on('data', (file) => {
console.log('added', bs58.encode(file.multihash).toString(), file.path)
})
i.once('end', () => {
return
})
if (res.length !== 0) {
const index = inPath.lastIndexOf('/')
async.eachLimit(res, 10, (element, callback) => {
if (!fs.statSync(element).isDirectory()) {
i.write({
path: element.substring(index + 1, element.length),
stream: fs.createReadStream(element)
})
}
callback()
}, (err) => {
if (err) {
throw err
}
i.end()
})
} else {
rs = fs.createReadStream(inPath)
inPath = inPath.substring(inPath.lastIndexOf('/') + 1, inPath.length)
filePair = {path: inPath, stream: rs}
i.write(filePair)
i.end()
}
})
})
}
})
37 changes: 37 additions & 0 deletions src/cli/commands/files/cat.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const utils = require('../../utils')
const log = debug('cli:files')
log.error = debug('cli:files:error')

module.exports = Command.extend({
desc: 'Download IPFS objects',

options: {},

run: (path, options) => {
if (!path) {
throw new Error("Argument 'path' is required")
}
if (!options) {
options = {}
}
utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
ipfs.files.cat(path, (err, res) => {
if (err) {
throw (err)
}
if (res) {
res.on('file', (data) => {
data.stream.pipe(process.stdout)
})
}
})
})
}
})
91 changes: 91 additions & 0 deletions src/cli/commands/files/get.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const utils = require('../../utils')
const log = debug('cli:files')
log.error = debug('cli:files:error')
var fs = require('fs')
const path = require('path')
const pathExists = require('path-exists')

function checkArgs (hash, outPath) {
if (!hash) {
throw new Error("Argument 'path' is required")
}
// format the output directory
if (!outPath) {
var cwd = process.cwd()
return cwd
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can be reduced to just the return

} else {
if (!outPath.endsWith('/')) {
outPath += '/'
}
if (!outPath.startsWith('/')) {
outPath = path.join('/', outPath)
}
var directory = outPath
return directory
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same here

}
}

function ensureDir (dir, cb) {
pathExists(dir)
.then((exists) => {
if (!exists) {
fs.mkdir(dir, cb)
} else {
cb()
}
})
.catch(cb)
}

function fileHandler (result, dir) {
return function onFile (file) {
// Check to see if the result is in a directory
if (file.path.lastIndexOf('/') === -1) {
const dirPath = path.join(dir, file.path)
// Check to see if the result is a directory
if (file.dir === false) {
file.stream.pipe(fs.createWriteStream(dirPath))
} else {
ensureDir(dirPath, (err) => {
if (err) {
throw err
}
})
}
} else {
const filePath = file.path.substring(0, file.path.lastIndexOf('/') + 1)
const dirPath = path.join(dir, filePath)
ensureDir(dirPath, (err) => {
if (err) {
throw err
}

file.stream.pipe(fs.createWriteStream(dirPath))
})
}
}
}

module.exports = Command.extend({
desc: 'Download IPFS objects',

run: (hash, outPath) => {
const dir = checkArgs(hash, outPath)

utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
ipfs.files.get(hash, (err, result) => {
if (err) {
throw err
}
result.on('file', fileHandler(result, dir))
})
})
}
})
56 changes: 50 additions & 6 deletions src/core/ipfs/files.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,57 @@
'use strict'

const importer = require('ipfs-data-importing').import
const Importer = require('ipfs-unixfs-engine').importer
const Exporter = require('ipfs-unixfs-engine').exporter
const UnixFS = require('ipfs-unixfs')

module.exports = function libp2p (self) {
module.exports = function files (self) {
return {
add: (path, options, callback) => {
options.path = path
options.dagService = self._dagS
importer(options, callback)
add: (arr, callback) => {
if (typeof arr === 'function') {
callback = arr
arr = undefined
}
if (callback === undefined) {
callback = function noop () {}
}
if (arr === undefined) {
return new Importer(self._dagS)
}

const i = new Importer(self._dagS)
const res = []

i.on('data', (info) => {
res.push(info)
})

i.once('end', () => {
callback(null, res)
})

arr.forEach((tuple) => {
i.write(tuple)
})

i.end()
},
cat: (hash, callback) => {
self._dagS.get(hash, (err, fetchedNode) => {
if (err) {
return callback(err, null)
}
const data = UnixFS.unmarshal(fetchedNode.data)
if (data.type === 'directory') {
callback('This dag node is a directory', null)
} else {
const exportEvent = Exporter(hash, self._dagS)
callback(null, exportEvent)
}
})
},
get: (hash, callback) => {
var exportFile = Exporter(hash, self._dagS)
callback(null, exportFile)
}
}
}
Loading