Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
Merge branch 'master' into fix/dns-cache-and-http-throttling-in-browser
Browse files Browse the repository at this point in the history
License: MIT
Signed-off-by: Marcin Rataj <lidel@lidel.org>
  • Loading branch information
lidel committed Sep 9, 2019
2 parents ad65329 + 3878f0f commit 11ab304
Show file tree
Hide file tree
Showing 22 changed files with 533 additions and 729 deletions.
48 changes: 34 additions & 14 deletions .aegir.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,11 @@
const IPFSFactory = require('ipfsd-ctl')
const parallel = require('async/parallel')
const MockPreloadNode = require('./test/utils/mock-preload-node')
const EchoHttpServer = require('interface-ipfs-core/src/utils/echo-http-server')
const EchoServer = require('interface-ipfs-core/src/utils/echo-http-server')

const ipfsdServer = IPFSFactory.createServer()
const preloadNode = MockPreloadNode.createNode()
const httpEchoServer = EchoHttpServer.createServer() // used by addFromURL

const batch = (call, done, ...srvs) => parallel(srvs.map(srv => cb => {
if (srv === ipfsdServer) {
(srv[call]()).then(() => cb())
} else {
srv[call](cb)
}
}), done)
const echoServer = EchoServer.createServer()

module.exports = {
bundlesize: { maxSize: '692kB' },
Expand All @@ -36,12 +28,40 @@ module.exports = {
},
hooks: {
node: {
pre: (cb) => batch('start', cb, preloadNode, httpEchoServer),
post: (cb) => batch('stop', cb, preloadNode, httpEchoServer)
pre: (cb) => {
parallel([
(cb) => preloadNode.start(cb),
(cb) => echoServer.start(cb)
], cb)
},
post: (cb) => {
parallel([
(cb) => preloadNode.stop(cb),
(cb) => echoServer.stop(cb)
], cb)
}
},
browser: {
pre: (cb) => batch('start', cb, ipfsdServer, preloadNode, httpEchoServer),
post: (cb) => batch('stop', cb, ipfsdServer, preloadNode, httpEchoServer)
pre: (cb) => {
parallel([
(cb) => {
ipfsdServer.start()
cb()
},
(cb) => preloadNode.start(cb),
(cb) => echoServer.start(cb)
], cb)
},
post: (cb) => {
parallel([
(cb) => {
ipfsdServer.stop()
cb()
},
(cb) => preloadNode.stop(cb),
(cb) => echoServer.stop(cb)
], cb)
}
}
}
}
12 changes: 7 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -95,10 +95,10 @@
"ipfs-bitswap": "~0.25.1",
"ipfs-block": "~0.8.1",
"ipfs-block-service": "~0.15.2",
"ipfs-http-client": "^34.0.0",
"ipfs-http-client": "^35.1.0",
"ipfs-http-response": "~0.3.1",
"ipfs-mfs": "~0.12.0",
"ipfs-multipart": "~0.1.1",
"ipfs-mfs": "^0.12.2",
"ipfs-multipart": "^0.2.0",
"ipfs-repo": "~0.26.6",
"ipfs-unixfs": "~0.1.16",
"ipfs-unixfs-exporter": "~0.37.7",
Expand All @@ -118,6 +118,8 @@
"is-pull-stream": "~0.0.0",
"is-stream": "^2.0.0",
"iso-url": "~0.4.6",
"it-pipe": "^1.0.1",
"it-to-stream": "^0.1.1",
"just-safe-set": "^2.1.0",
"kind-of": "^6.0.2",
"ky": "~0.13.0",
Expand All @@ -127,7 +129,7 @@
"libp2p-crypto": "~0.16.0",
"libp2p-delegated-content-routing": "^0.2.4",
"libp2p-delegated-peer-routing": "^0.2.4",
"libp2p-floodsub": "^0.17.2",
"libp2p-floodsub": "^0.18.0",
"libp2p-gossipsub": "~0.0.5",
"libp2p-kad-dht": "~0.15.3",
"libp2p-keychain": "~0.4.2",
Expand All @@ -143,7 +145,7 @@
"merge-options": "^1.0.1",
"mime-types": "^2.1.21",
"mkdirp": "~0.5.1",
"mortice": "^1.2.2",
"mortice": "^2.0.0",
"multiaddr": "^6.1.0",
"multiaddr-to-uri": "^5.0.0",
"multibase": "~0.6.0",
Expand Down
92 changes: 40 additions & 52 deletions src/cli/commands/add.js
Original file line number Diff line number Diff line change
@@ -1,65 +1,19 @@
'use strict'

const pull = require('pull-stream/pull')
const through = require('pull-stream/throughs/through')
const end = require('pull-stream/sinks/on-end')
const promisify = require('promisify-es6')
const getFolderSize = promisify(require('get-folder-size'))
const byteman = require('byteman')
const mh = require('multihashes')
const multibase = require('multibase')
const toPull = require('stream-to-pull-stream')
const { createProgressBar } = require('../utils')
const { cidToString } = require('../../utils/cid')
const globSource = require('../../utils/files/glob-source')
const globSource = require('ipfs-utils/src/files/glob-source')

async function getTotalBytes (paths) {
const sizes = await Promise.all(paths.map(p => getFolderSize(p)))
return sizes.reduce((total, size) => total + size, 0)
}

function addPipeline (source, addStream, options, log) {
let finalHash

return new Promise((resolve, reject) => {
pull(
source,
addStream,
through((file) => {
const cid = finalHash = cidToString(file.hash, { base: options.cidBase })

if (options.silent || options.quieter) {
return
}

let message = cid

if (!options.quiet) {
// print the hash twice if we are piping from stdin
message = `added ${cid} ${options.file ? file.path || '' : cid}`.trim()
}

log(message)
}),
end((err) => {
if (err) {
// Tweak the error message and add more relevant infor for the CLI
if (err.code === 'ERR_DIR_NON_RECURSIVE') {
err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories`
}
return reject(err)
}

if (options.quieter) {
log(finalHash)
}

resolve()
})
)
})
}

module.exports = {
command: 'add [file...]',

Expand Down Expand Up @@ -199,17 +153,51 @@ module.exports = {
}

const source = argv.file
? globSource(...argv.file, { recursive: argv.recursive })
: toPull.source(process.stdin) // Pipe directly to ipfs.add
? globSource(argv.file, { recursive: argv.recursive })
: process.stdin // Pipe directly to ipfs.add

const adder = ipfs.addPullStream(options)
let finalHash

try {
await addPipeline(source, adder, argv, log)
} finally {
for await (const file of ipfs._addAsyncIterator(source, options)) {
if (argv.silent) {
continue
}

if (argv.quieter) {
finalHash = file.hash
continue
}

const cid = cidToString(file.hash, { base: argv.cidBase })
let message = cid

if (!argv.quiet) {
// print the hash twice if we are piping from stdin
message = `added ${cid} ${argv.file ? file.path || '' : cid}`.trim()
}

log(message)
}
} catch (err) {
if (bar) {
bar.terminate()
}

// Tweak the error message and add more relevant infor for the CLI
if (err.code === 'ERR_DIR_NON_RECURSIVE') {
err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories`
}

throw err
}

if (bar) {
bar.terminate()
}

if (argv.quieter) {
log(cidToString(finalHash, { base: argv.cidBase }))
}
})())
}
Expand Down
148 changes: 148 additions & 0 deletions src/core/components/files-regular/add-async-iterator.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
'use strict'

const importer = require('ipfs-unixfs-importer')
const normaliseAddInput = require('ipfs-utils/src/files/normalise-input')
const { parseChunkerString } = require('./utils')
const pipe = require('it-pipe')
const log = require('debug')('ipfs:add')
log.error = require('debug')('ipfs:add:error')

function noop () {}

module.exports = function (self) {
// Internal add func that gets used by all add funcs
return async function * addAsyncIterator (source, options) {
options = options || {}

const chunkerOptions = parseChunkerString(options.chunker)

const opts = Object.assign({}, {
shardSplitThreshold: self._options.EXPERIMENTAL.sharding
? 1000
: Infinity
}, options, {
chunker: chunkerOptions.chunker,
chunkerOptions: chunkerOptions.chunkerOptions
})

// CID v0 is for multihashes encoded with sha2-256
if (opts.hashAlg && opts.cidVersion !== 1) {
opts.cidVersion = 1
}

let total = 0

const prog = opts.progress || noop
const progress = (bytes) => {
total += bytes
prog(total)
}

opts.progress = progress

const iterator = pipe(
normaliseAddInput(source),
doImport(self, opts),
transformFile(self, opts),
preloadFile(self, opts),
pinFile(self, opts)
)

const releaseLock = await self._gcLock.readLock()

try {
yield * iterator
} finally {
releaseLock()
}
}
}

function doImport (ipfs, opts) {
return async function * (source) { // eslint-disable-line require-await
yield * importer(source, ipfs._ipld, opts)
}
}

function transformFile (ipfs, opts) {
return async function * (source) {
for await (const file of source) {
let cid = file.cid
const hash = cid.toBaseEncodedString()
let path = file.path ? file.path : hash

if (opts.wrapWithDirectory && !file.path) {
path = ''
}

if (opts.onlyHash) {
yield {
path,
hash,
size: file.unixfs.fileSize()
}

return
}

const node = await ipfs.object.get(file.cid, Object.assign({}, opts, { preload: false }))

if (opts.cidVersion === 1) {
cid = cid.toV1()
}

let size = node.size

if (Buffer.isBuffer(node)) {
size = node.length
}

yield {
path,
hash,
size
}
}
}
}

function preloadFile (ipfs, opts) {
return async function * (source) {
for await (const file of source) {
const isRootFile = !file.path || opts.wrapWithDirectory
? file.path === ''
: !file.path.includes('/')

const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false

if (shouldPreload) {
ipfs._preload(file.hash)
}

yield file
}
}
}

function pinFile (ipfs, opts) {
return async function * (source) {
for await (const file of source) {
// Pin a file if it is the root dir of a recursive add or the single file
// of a direct add.
const pin = 'pin' in opts ? opts.pin : true
const isRootDir = !file.path.includes('/')
const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg

if (shouldPin) {
// Note: addAsyncIterator() has already taken a GC lock, so tell
// pin.add() not to take a (second) GC lock
await ipfs.pin.add(file.hash, {
preload: false,
lock: false
})
}

yield file
}
}
}
Loading

0 comments on commit 11ab304

Please sign in to comment.