diff --git a/lib/content/read.js b/lib/content/read.js index 47587e4..f41b539 100644 --- a/lib/content/read.js +++ b/lib/content/read.js @@ -1,6 +1,6 @@ 'use strict' -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const fsm = require('fs-minipass') const ssri = require('ssri') const contentPath = require('./path') diff --git a/lib/content/rm.js b/lib/content/rm.js index f733305..ce58d67 100644 --- a/lib/content/rm.js +++ b/lib/content/rm.js @@ -1,10 +1,8 @@ 'use strict' -const util = require('util') - +const fs = require('fs/promises') const contentPath = require('./path') const { hasContent } = require('./read') -const rimraf = util.promisify(require('rimraf')) module.exports = rm @@ -12,7 +10,7 @@ async function rm (cache, integrity) { const content = await hasContent(cache, integrity) // ~pretty~ sure we can't end up with a content lacking sri, but be safe if (content && content.sri) { - await rimraf(contentPath(cache, content.sri)) + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) return true } else { return false diff --git a/lib/content/write.js b/lib/content/write.js index 0e8c0f4..d799ae8 100644 --- a/lib/content/write.js +++ b/lib/content/write.js @@ -1,17 +1,14 @@ 'use strict' const events = require('events') -const util = require('util') const contentPath = require('./path') -const fixOwner = require('../util/fix-owner') -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const moveFile = require('../util/move-file') const Minipass = require('minipass') const Pipeline = require('minipass-pipeline') const Flush = require('minipass-flush') const path = require('path') -const rimraf = util.promisify(require('rimraf')) const ssri = require('ssri') const uniqueFilename = require('unique-filename') const fsm = require('fs-minipass') @@ -40,7 +37,7 @@ async function write (cache, data, opts = {}) { return { integrity: sri, size: data.length } } finally { if (!tmp.moved) { - await rimraf(tmp.target) + await fs.rm(tmp.target, { recursive: true, force: true }) } } } @@ -111,7 +108,7 @@ async function handleContent (inputStream, cache, opts) { return res } finally { if (!tmp.moved) { - await rimraf(tmp.target) + await fs.rm(tmp.target, { recursive: true, force: true }) } } } @@ -152,7 +149,7 @@ async function pipeToTmp (inputStream, cache, tmpTarget, opts) { async function makeTmp (cache, opts) { const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fixOwner.mkdirfix(cache, path.dirname(tmpTarget)) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) return { target: tmpTarget, moved: false, @@ -163,10 +160,9 @@ async function moveToDestination (tmp, cache, sri, opts) { const destination = contentPath(cache, sri) const destDir = path.dirname(destination) - await fixOwner.mkdirfix(cache, destDir) + await fs.mkdir(destDir, { recursive: true }) await moveFile(tmp.target, destination) tmp.moved = true - await fixOwner.chownr(cache, destination) } function sizeError (expected, found) { diff --git a/lib/entry-index.js b/lib/entry-index.js index 96a5ef9..e81ef52 100644 --- a/lib/entry-index.js +++ b/lib/entry-index.js @@ -1,20 +1,23 @@ 'use strict' -const util = require('util') const crypto = require('crypto') -const fs = require('@npmcli/fs') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') const Minipass = require('minipass') const path = require('path') const ssri = require('ssri') const uniqueFilename = require('unique-filename') const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') const hashToSegments = require('./util/hash-to-segments') const indexV = require('../package.json')['cache-version'].index const moveFile = require('@npmcli/move-file') -const _rimraf = require('rimraf') -const rimraf = util.promisify(_rimraf) module.exports.NotFoundError = class NotFoundError extends Error { constructor (cache, key) { @@ -65,7 +68,7 @@ async function compact (cache, key, matchFn, opts = {}) { const setup = async () => { const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fixOwner.mkdirfix(cache, path.dirname(target)) + await mkdir(path.dirname(target), { recursive: true }) return { target, moved: false, @@ -74,24 +77,17 @@ async function compact (cache, key, matchFn, opts = {}) { const teardown = async (tmp) => { if (!tmp.moved) { - return rimraf(tmp.target) + return rm(tmp.target, { recursive: true, force: true }) } } const write = async (tmp) => { - await fs.writeFile(tmp.target, newIndex, { flag: 'wx' }) - await fixOwner.mkdirfix(cache, path.dirname(bucket)) + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) // we use @npmcli/move-file directly here because we // want to overwrite the existing file await moveFile(tmp.target, bucket) tmp.moved = true - try { - await fixOwner.chownr(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } } // write the file atomically @@ -123,7 +119,7 @@ async function insert (cache, key, integrity, opts = {}) { metadata, } try { - await fixOwner.mkdirfix(cache, path.dirname(bucket)) + await mkdir(path.dirname(bucket), { recursive: true }) const stringified = JSON.stringify(entry) // NOTE - Cleverness ahoy! // @@ -133,19 +129,13 @@ async function insert (cache, key, integrity, opts = {}) { // // Thanks to @isaacs for the whiteboarding session that ended up with // this. - await fs.appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - await fixOwner.chownr(cache, bucket) + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) } catch (err) { if (err.code === 'ENOENT') { return undefined } throw err - // There's a class of race conditions that happen when things get deleted - // during fixOwner, or between the two mkdirfix/chownr calls. - // - // It's perfectly fine to just not bother in those cases and lie - // that the index entry was written. Because it's a cache. } return formatEntry(cache, entry) } @@ -180,7 +170,7 @@ function del (cache, key, opts = {}) { } const bucket = bucketPath(cache, key) - return rimraf(bucket) + return rm(bucket, { recursive: true, force: true }) } module.exports.lsStream = lsStream @@ -246,7 +236,7 @@ async function ls (cache) { module.exports.bucketEntries = bucketEntries async function bucketEntries (bucket, filter) { - const data = await fs.readFile(bucket, 'utf8') + const data = await readFile(bucket, 'utf8') return _bucketEntries(data, filter) } @@ -266,9 +256,8 @@ function _bucketEntries (data, filter) { let obj try { obj = JSON.parse(pieces[1]) - } catch (e) { - // Entry is corrupted! - return + } catch (_) { + // eslint-ignore-next-line no-empty-block } // coverage disabled here, no need to test with an entry that parses to something falsey // istanbul ignore else @@ -331,7 +320,7 @@ function formatEntry (cache, entry, keepAll) { } function readdirOrEmpty (dir) { - return fs.readdir(dir).catch((err) => { + return readdir(dir).catch((err) => { if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { return [] } diff --git a/lib/rm.js b/lib/rm.js index 5f00071..a94760c 100644 --- a/lib/rm.js +++ b/lib/rm.js @@ -1,11 +1,10 @@ 'use strict' -const util = require('util') - +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') const index = require('./entry-index') const memo = require('./memoization') const path = require('path') -const rimraf = util.promisify(require('rimraf')) const rmContent = require('./content/rm') module.exports = entry @@ -25,7 +24,8 @@ function content (cache, integrity) { module.exports.all = all -function all (cache) { +async function all (cache) { memo.clearMemoized() - return rimraf(path.join(cache, '*(content-*|index-*)')) + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) } diff --git a/lib/util/fix-owner.js b/lib/util/fix-owner.js deleted file mode 100644 index c6a5960..0000000 --- a/lib/util/fix-owner.js +++ /dev/null @@ -1,91 +0,0 @@ -'use strict' - -const util = require('util') - -const chownr = util.promisify(require('chownr')) -const mkdirp = require('mkdirp') -const inflight = require('promise-inflight') -const inferOwner = require('infer-owner') - -// Memoize getuid()/getgid() calls. -// patch process.setuid/setgid to invalidate cached value on change -const self = { uid: null, gid: null } -const getSelf = () => { - if (typeof self.uid !== 'number') { - self.uid = process.getuid() - const setuid = process.setuid - process.setuid = (uid) => { - self.uid = null - process.setuid = setuid - return process.setuid(uid) - } - } - if (typeof self.gid !== 'number') { - self.gid = process.getgid() - const setgid = process.setgid - process.setgid = (gid) => { - self.gid = null - process.setgid = setgid - return process.setgid(gid) - } - } -} - -module.exports.chownr = fixOwner - -async function fixOwner (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return - } - - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return - } - - const { uid, gid } = await inferOwner(cache) - - // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) { - return - } - - return inflight('fixOwner: fixing ownership on ' + filepath, () => - chownr( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ).catch((err) => { - if (err.code === 'ENOENT') { - return null - } - - throw err - }) - ) -} - -module.exports.mkdirfix = mkdirfix - -async function mkdirfix (cache, p, cb) { - // we have to infer the owner _before_ making the directory, even though - // we aren't going to use the results, since the cache itself might not - // exist yet. If we mkdirp it, then our current uid/gid will be assumed - // to be correct if it creates the cache folder in the process. - await inferOwner(cache) - try { - const made = await mkdirp(p) - if (made) { - await fixOwner(cache, made) - return made - } - } catch (err) { - if (err.code === 'EEXIST') { - await fixOwner(cache, p) - return null - } - throw err - } -} diff --git a/lib/util/glob.js b/lib/util/glob.js new file mode 100644 index 0000000..8908a3a --- /dev/null +++ b/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { promisify } = require('util') +const glob = promisify(require('glob')) + +const globify = (pattern) => pattern.split('//').join('/') +module.exports = (path, options) => glob(globify(path), options) diff --git a/lib/util/move-file.js b/lib/util/move-file.js index a0b4041..2d4de41 100644 --- a/lib/util/move-file.js +++ b/lib/util/move-file.js @@ -1,6 +1,6 @@ 'use strict' -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const move = require('@npmcli/move-file') const pinflight = require('promise-inflight') diff --git a/lib/util/tmp.js b/lib/util/tmp.js index b4437cf..0bf5302 100644 --- a/lib/util/tmp.js +++ b/lib/util/tmp.js @@ -1,8 +1,7 @@ 'use strict' -const fs = require('@npmcli/fs') - -const fixOwner = require('./fix-owner') +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') const path = require('path') module.exports.mkdir = mktmpdir @@ -23,11 +22,5 @@ function withTmp (cache, opts, cb) { cb = opts opts = {} } - return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) -} - -module.exports.fix = fixtmpdir - -function fixtmpdir (cache) { - return fixOwner(cache, path.join(cache, 'tmp')) + return withTempDir(path.join(cache, 'tmp'), cb, opts) } diff --git a/lib/verify.js b/lib/verify.js index 1ee9a69..33f566c 100644 --- a/lib/verify.js +++ b/lib/verify.js @@ -1,20 +1,21 @@ 'use strict' -const util = require('util') - +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') const pMap = require('p-map') const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const fs = require('@npmcli/fs') const fsm = require('fs-minipass') -const glob = util.promisify(require('glob')) +const glob = require('./util/glob.js') const index = require('./entry-index') const path = require('path') -const rimraf = util.promisify(require('rimraf')) const ssri = require('ssri') -const globify = pattern => pattern.split('\\').join('/') - const hasOwnProperty = (obj, key) => Object.prototype.hasOwnProperty.call(obj, key) @@ -77,9 +78,7 @@ async function markEndTime (cache, opts) { async function fixPerms (cache, opts) { opts.log.silly('verify', 'fixing cache permissions') - await fixOwner.mkdirfix(cache, cache) - // TODO - fix file permissions too - await fixOwner.chownr(cache, cache) + await mkdir(cache, { recursive: true }) return null } @@ -90,7 +89,7 @@ async function fixPerms (cache, opts) { // 2. Mark each integrity value as "live" // 3. Read entire filesystem tree in `content-vX/` dir // 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rimraf it. +// 5. If content is not marked as live, rm it. // async function garbageCollect (cache, opts) { opts.log.silly('verify', 'garbage collecting content') @@ -107,7 +106,7 @@ async function garbageCollect (cache, opts) { indexStream.on('end', resolve).on('error', reject) }) const contentDir = contentPath.contentDir(cache) - const files = await glob(globify(path.join(contentDir, '**')), { + const files = await glob(path.join(contentDir, '**'), { follow: false, nodir: true, nosort: true, @@ -139,8 +138,8 @@ async function garbageCollect (cache, opts) { } else { // No entries refer to this content. We can delete. stats.reclaimedCount++ - const s = await fs.stat(f) - await rimraf(f) + const s = await stat(f) + await rm(f, { recursive: true, force: true }) stats.reclaimedSize += s.size } return stats @@ -153,7 +152,7 @@ async function garbageCollect (cache, opts) { async function verifyContent (filepath, sri) { const contentInfo = {} try { - const { size } = await fs.stat(filepath) + const { size } = await stat(filepath) contentInfo.size = size contentInfo.valid = true await ssri.checkStream(new fsm.ReadStream(filepath), sri) @@ -165,7 +164,7 @@ async function verifyContent (filepath, sri) { throw err } - await rimraf(filepath) + await rm(filepath, { recursive: true, force: true }) contentInfo.valid = false } return contentInfo @@ -211,13 +210,13 @@ async function rebuildIndex (cache, opts) { } async function rebuildBucket (cache, bucket, stats, opts) { - await fs.truncate(bucket._path) + await truncate(bucket._path) // This needs to be serialized because cacache explicitly // lets very racy bucket conflicts clobber each other. for (const entry of bucket) { const content = contentPath(cache, entry.integrity) try { - await fs.stat(content) + await stat(content) await index.insert(cache, entry.key, entry.integrity, { metadata: entry.metadata, size: entry.size, @@ -236,19 +235,18 @@ async function rebuildBucket (cache, bucket, stats, opts) { function cleanTmp (cache, opts) { opts.log.silly('verify', 'cleaning tmp directory') - return rimraf(path.join(cache, 'tmp')) + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) } async function writeVerifile (cache, opts) { const verifile = path.join(cache, '_lastverified') opts.log.silly('verify', 'writing verifile to ' + verifile) - await fs.writeFile(verifile, `${Date.now()}`) - return fixOwner.chownr(cache, verifile) + return writeFile(verifile, `${Date.now()}`) } module.exports.lastRun = lastRun async function lastRun (cache) { - const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) return new Date(+data) } diff --git a/package.json b/package.json index b23eb7f..888bb8c 100644 --- a/package.json +++ b/package.json @@ -45,21 +45,17 @@ ], "license": "ISC", "dependencies": { - "@npmcli/fs": "^2.1.0", + "@npmcli/fs": "^3.0.0", "@npmcli/move-file": "^2.0.0", - "chownr": "^2.0.0", "fs-minipass": "^2.1.0", "glob": "^8.0.1", - "infer-owner": "^1.0.4", "lru-cache": "^7.7.1", "minipass": "^3.1.6", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", "p-map": "^4.0.0", "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", "ssri": "^9.0.0", "tar": "^6.1.11", "unique-filename": "^2.0.0" diff --git a/test/content/read.js b/test/content/read.js index 77b1d2d..7ba83f2 100644 --- a/test/content/read.js +++ b/test/content/read.js @@ -1,6 +1,6 @@ 'use strict' -const fs = require('@npmcli/fs') +const fs = require('fs') const path = require('path') const ssri = require('ssri') const t = require('tap') @@ -18,14 +18,18 @@ permissionError.code = 'EPERM' // helpers const getRead = (t, opts) => t.mock('../../lib/content/read', opts) const getReadStatFailure = (t, err) => getRead(t, { - '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { - async stat (path) { + fs: { + ...fs, + statSync: () => { throw err }, - statSync () { + }, + 'fs/promises': { + ...fs.promises, + stat: async (path) => { throw err }, - }), + }, }) t.test('read: returns a Promise with cache content data', async t => { @@ -215,11 +219,12 @@ t.test('read: returns only first result if other hashes fails', function (t) { t.test('read: opening large files', function (t) { const CACHE = t.testdir() const mockedRead = getRead(t, { - '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { - async stat (path) { + 'fs/promises': { + ...fs.promises, + stat: async (path) => { return { size: Number.MAX_SAFE_INTEGER } }, - }), + }, 'fs-minipass': { ReadStream: class { constructor (path, opts) { @@ -307,6 +312,6 @@ t.test('copy: copies content to a destination path', async t => { ) const DEST = path.join(CACHE, 'foobar-file') await read.copy(CACHE, INTEGRITY, DEST) - const data = await fs.readFile(DEST) + const data = await fs.promises.readFile(DEST) t.same(data, CONTENT, 'file successfully copied') }) diff --git a/test/content/rm.js b/test/content/rm.js index 5db1172..fc09f1d 100644 --- a/test/content/rm.js +++ b/test/content/rm.js @@ -1,7 +1,7 @@ 'use strict' const contentPath = require('../../lib/content/path') -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const t = require('tap') const CacheContent = require('../fixtures/cache-content') diff --git a/test/content/write.chownr.js b/test/content/write.chownr.js deleted file mode 100644 index b998da8..0000000 --- a/test/content/write.chownr.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict' - -const NEWUID = process.getuid() + 1 -const NEWGID = process.getgid() + 1 - -process.getuid = () => 0 - -const path = require('path') - -const ssri = require('ssri') -const t = require('tap') - -const contentPath = require('../../lib/content/path') - -t.test('infers ownership from cache folder owner', async t => { - const CACHE = t.testdir({ cache: {} }) - const CONTENT = 'foobarbaz' - const INTEGRITY = ssri.fromData(CONTENT) - const updatedPaths = [] - const write = t.mock('../../lib/content/write', { - 'infer-owner': async function (c) { - return { uid: NEWUID, gid: NEWGID } - }, - chownr: function (p, uid, gid, cb) { - process.nextTick(function () { - const rel = path.relative(CACHE, p) - t.equal(uid, NEWUID, 'new uid set for ' + rel) - t.equal(gid, NEWGID, 'new gid set for ' + rel) - updatedPaths.push(p) - cb(null) - }) - }, - }) - t.plan(7) - await write.stream(CACHE, { hashAlgorithm: 'sha1' }).end(CONTENT).promise() - const cpath = contentPath(CACHE, INTEGRITY) - const expectedPaths = [ - path.join(CACHE, path.relative(CACHE, cpath).split(path.sep)[0]), - cpath, - path.join(CACHE, 'tmp'), - ] - t.same( - updatedPaths.sort(), - expectedPaths, - 'all paths that needed user stuff set got set' - ) -}) diff --git a/test/content/write.js b/test/content/write.js index e5abf50..e91050a 100644 --- a/test/content/write.js +++ b/test/content/write.js @@ -1,10 +1,9 @@ 'use strict' const events = require('events') -const fs = require('@npmcli/fs') +const fs = require('fs') const Minipass = require('minipass') const path = require('path') -const rimraf = require('rimraf') const ssri = require('ssri') const t = require('tap') @@ -211,7 +210,7 @@ t.test('exits normally if file already open', (t) => { .promise() t.same(integrity, INTEGRITY, 'returns a matching digest') fs.closeSync(fd) - rimraf.sync(contentPath(CACHE, INTEGRITY)) + fs.rmSync(contentPath(CACHE, INTEGRITY), { recursive: true, force: true }) t.end() }) }) diff --git a/test/entry-index.insert.js b/test/entry-index.insert.js index 79fa501..fb54820 100644 --- a/test/entry-index.insert.js +++ b/test/entry-index.insert.js @@ -2,7 +2,7 @@ const CacheIndex = require('./fixtures/cache-index') const contentPath = require('../lib/content/path') -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const t = require('tap') const index = require('../lib/entry-index') @@ -220,5 +220,34 @@ t.test('extremely long keys', async t => { ) }) +t.test('ENOENT from appendFile is ignored', async (t) => { + const cache = t.testdir() + + const indexMocked = t.mock('../lib/entry-index.js', { + 'fs/promises': { + ...fs, + appendFile: async () => { + throw Object.assign(new Error('fake enoent'), { code: 'ENOENT' }) + }, + }, + }) + + await t.resolves(() => indexMocked.insert(cache, key, integrity, { size })) +}) + +t.test('generic error from appendFile rejects', async (t) => { + const cache = t.testdir() + + const indexMocked = t.mock('../lib/entry-index.js', { + 'fs/promises': { + ...fs, + appendFile: async () => { + throw Object.assign(new Error('fake eperm'), { code: 'EPERM' }) + }, + }, + }) + + await t.rejects(() => indexMocked.insert(cache, key, integrity, { size }), { code: 'EPERM' }) +}) + t.test('concurrent writes') -t.test('correct ownership') diff --git a/test/entry-index.js b/test/entry-index.js index 4082eb0..24382a4 100644 --- a/test/entry-index.js +++ b/test/entry-index.js @@ -1,6 +1,6 @@ 'use strict' -const fs = require('@npmcli/fs') +const fs = require('fs') const path = require('path') const ssri = require('ssri') @@ -17,29 +17,20 @@ missingFileError.code = 'ENOENT' const getEntryIndex = (t, opts) => t.mock('../lib/entry-index', opts) const getEntryIndexReadFileFailure = (t, err) => getEntryIndex(t, { - '@npmcli/fs': Object.assign({}, fs, { + 'fs/promises': { + ...fs.promises, readFile: async (path, encode) => { throw err }, + }, + fs: { + ...fs, readFileSync: () => { throw genericError }, - }), + }, }) -const getEntryIndexFixOwnerFailure = (err) => { - const chownr = () => Promise.reject(err) - chownr.sync = () => { - throw err - } - return getEntryIndex(t, { - '../lib/util/fix-owner': { - mkdirfix: require('../lib/util/fix-owner').mkdirfix, - chownr, - }, - }) -} - // helpers const CONTENT = Buffer.from('foobarbaz', 'utf8') const INTEGRITY = ssri.fromData(CONTENT).toString() @@ -140,35 +131,6 @@ t.test('compact: validateEntry allows for keeping null integrity', async (t) => t.equal(newEntries.length, 1, 'bucket was deduplicated') }) -t.test('compact: ENOENT in chownr does not cause failure', async (t) => { - const cache = t.testdir(cacheContent) - await Promise.all([ - index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), - index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), - index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), - index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), - ]) - - const { compact } = getEntryIndexFixOwnerFailure(missingFileError) - const filter = (entryA, entryB) => entryA.metadata.rev === entryB.metadata.rev - const compacted = await compact(cache, KEY, filter) - t.equal(compacted.length, 2, 'deduplicated') -}) - -t.test('compact: generic error in chownr does cause failure', async (t) => { - const cache = t.testdir(cacheContent) - await Promise.all([ - index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), - index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), - index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 2 } }), - index.insert(cache, KEY, INTEGRITY, { metadata: { rev: 1 } }), - ]) - - const { compact } = getEntryIndexFixOwnerFailure(genericError) - const filter = (entryA, entryB) => entryA.metadata.rev === entryB.metadata.rev - return t.rejects(compact(cache, KEY, filter), { code: 'ERR' }, 'promise rejected') -}) - t.test('compact: error in moveFile removes temp', async (t) => { const cache = t.testdir(cacheContent) await Promise.all([ @@ -241,33 +203,9 @@ t.test('find: unknown error on finding entries', (t) => { ) }) -t.test('insert: missing files on fixing ownership', (t) => { - const cache = t.testdir(cacheContent) - const { insert } = getEntryIndexFixOwnerFailure(missingFileError) - - t.plan(1) - t.resolves( - insert(cache, KEY, INTEGRITY), - 'should insert entry with no errors' - ) -}) - -t.test('insert: unknown errors on fixing ownership', (t) => { - const cache = t.testdir(cacheContent) - const { insert } = getEntryIndexFixOwnerFailure(genericError) - - t.plan(1) - t.rejects( - insert(cache, KEY, INTEGRITY), - genericError, - 'should throw the unknown error' - ) -}) - t.test('lsStream: unknown error reading files', async (t) => { const cache = t.testdir(cacheContent) await index.insert(cache, KEY, INTEGRITY) - const { lsStream } = getEntryIndexReadFileFailure(t, genericError) return new Promise((resolve) => { @@ -295,11 +233,12 @@ t.test('lsStream: missing files error', async (t) => { t.test('lsStream: unknown error reading dirs', (t) => { const cache = t.testdir(cacheContent) const { lsStream } = getEntryIndex(t, { - '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { + 'fs/promises': { + ...fs.promises, readdir: async (path) => { throw genericError }, - }), + }, }) lsStream(cache) diff --git a/test/get.js b/test/get.js index 9deb0ae..687f177 100644 --- a/test/get.js +++ b/test/get.js @@ -1,12 +1,9 @@ 'use strict' -const util = require('util') - -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const index = require('../lib/entry-index') const memo = require('../lib/memoization') const path = require('path') -const rimraf = util.promisify(require('rimraf')) const t = require('tap') const ssri = require('ssri') @@ -326,13 +323,13 @@ t.test('get.copy with fs.copyfile', (t) => { }) .then((data) => { t.same(data, CONTENT, 'data copied by key matches') - return rimraf(DEST) + return fs.rm(DEST, { recursive: true, force: true }) }) .then(() => get.copy.byDigest(CACHE, INTEGRITY, DEST)) .then(() => fs.readFile(DEST)) .then((data) => { t.same(data, CONTENT, 'data copied by digest matches') - return rimraf(DEST) + return fs.rm(DEST, { recursive: true, force: true }) }) }) @@ -368,7 +365,7 @@ t.test('memoizes data on bulk read', (t) => { }, 'data inserted into memoization cache' ) - return rimraf(CACHE) + return fs.rm(CACHE, { recursive: true, force: true }) }) .then(() => { return get(CACHE, KEY) @@ -467,7 +464,7 @@ t.test('memoizes data on stream read', async t => { null, 'entry memoization filtered by cache' ) - await rimraf(CACHE) + await fs.rm(CACHE, { recursive: true, force: true }) await t.resolveMatch( streamGet(false, CACHE, KEY), { diff --git a/test/put.js b/test/put.js index 9e3a5c0..375e3dd 100644 --- a/test/put.js +++ b/test/put.js @@ -1,6 +1,6 @@ 'use strict' -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const index = require('../lib/entry-index') const memo = require('../lib/memoization') const t = require('tap') diff --git a/test/rm.js b/test/rm.js index dd534a1..783bf83 100644 --- a/test/rm.js +++ b/test/rm.js @@ -1,6 +1,6 @@ 'use strict' -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const index = require('../lib/entry-index') const path = require('path') const t = require('tap') diff --git a/test/util/fix-owner.js b/test/util/fix-owner.js deleted file mode 100644 index 71df9c3..0000000 --- a/test/util/fix-owner.js +++ /dev/null @@ -1,146 +0,0 @@ -'use strict' - -const os = require('os') - -const t = require('tap') -const uniqueFilename = require('unique-filename') - -// defines reusable errors -const genericError = new Error('ERR') -genericError.code = 'ERR' -const missingFileError = new Error('ENOENT') -missingFileError.code = 'ENOENT' -const pathExistsError = new Error('EEXIST') -pathExistsError.code = 'EEXIST' - -// helpers -const CACHE = t.testdir() -const filename = uniqueFilename(os.tmpdir()) -const getuid = process.getuid -const patchesGetuid = (t) => { - process.getuid = () => 0 - t.teardown(() => { - process.getuid = getuid - }) -} -const getFixOwner = (t, opts) => t.mock('../../lib/util/fix-owner', opts) - -// chownr error handling tests - -t.test('attempt to chownr existing path', async t => { - patchesGetuid(t) - const fixOwner = getFixOwner(t, { - chownr: function chownr (path, uid, gid, cb) { - cb(missingFileError) - }, - 'infer-owner': () => Promise.resolve({}), - }) - - await t.resolves(fixOwner.chownr(CACHE, filename), 'should not throw if path exists') -}) - -t.test('attempt to chownr unknown error', (t) => { - patchesGetuid(t) - const fixOwner = getFixOwner(t, { - chownr: function chownr (path, uid, gid, cb) { - cb(genericError) - }, - 'infer-owner': () => Promise.resolve({}), - }) - - t.plan(1) - t.rejects(() => fixOwner.chownr(CACHE, filename), 'should throw unknown errors') -}) - -t.test('attempt to chownr using same user', async t => { - patchesGetuid(t) - const fixOwner = getFixOwner(t, { - 'infer-owner': () => Promise.resolve({ - uid: process.getuid(), - gid: process.getgid(), - }), - }) - - await t.resolves(fixOwner.chownr(CACHE, filename), 'should not throw') -}) - -t.test('calls setuid setgid to replace user', async t => { - const setuid = process.setuid - const setgid = process.setgid - process.getuid = () => 0 - process.setuid = () => undefined - process.setgid = () => undefined - t.teardown(() => { - process.getuid = getuid - process.stuid = setuid - process.stgid = setgid - }) - const fixOwner = getFixOwner(t, { - 'infer-owner': () => { - process.setuid(process.getuid()) - process.setgid(process.getgid()) - return Promise.resolve({ - uid: process.getuid(), - gid: process.getgid(), - }) - }, - }) - - await t.resolves(fixOwner.chownr(CACHE, filename), 'should not throw') -}) - -t.test('attempt to chownr on platforms that do not need ownership fix', async t => { - process.getuid = undefined - t.teardown(() => { - process.getuid = getuid - }) - const fixOwner = require('../../lib/util/fix-owner') - - await t.resolves(fixOwner.chownr(CACHE, filename), 'should not throw') -}) - -t.test('uses infer-owner ids instead of process-retrieved if valid', async (t) => { - const getgid = process.getgid - process.getuid = () => 0 - process.getgid = () => 1 - t.teardown(() => { - process.getuid = getuid - process.getgid = getgid - }) - const fixOwner = getFixOwner(t, { - chownr: (path, uid, gid, cb) => { - t.equal(path, filename, 'should match filename') - t.equal(uid, 501, 'should match uid') - t.equal(gid, 20, 'should match gid') - return cb() - }, - 'infer-owner': () => { - return Promise.resolve({ - uid: 501, - gid: 20, - }) - }, - }) - - await fixOwner.chownr(CACHE, filename) -}) - -// mkdirfix error handling tests - -t.test('attempt to mkdirfix existing path', async t => { - const fixOwner = getFixOwner(t, { - mkdirp: () => Promise.reject(pathExistsError), - }) - - const res = await fixOwner.mkdirfix(CACHE, filename) - t.notOk(res, 'should not throw if path exists') -}) - -t.test('attempt to mkdirfix unknown error', (t) => { - const fixOwner = getFixOwner(t, { - mkdirp: () => Promise.reject(genericError), - }) - - t.plan(1) - t.rejects(() => fixOwner.mkdirfix(CACHE, filename), 'should throw unknown errors') -}) diff --git a/test/util/move-file.js b/test/util/move-file.js index e21e318..2c35b34 100644 --- a/test/util/move-file.js +++ b/test/util/move-file.js @@ -1,6 +1,13 @@ 'use strict' -const fs = require('@npmcli/fs') +const { + chmod, + open, + readFile, + readdir, + stat, +} = require('fs/promises') +const fs = require('fs') const path = require('path') const t = require('tap') @@ -12,11 +19,11 @@ t.test('move a file', function (t) { }) return moveFile(testDir + '/src', testDir + '/dest') .then(() => { - return fs.readFile(testDir + '/dest', 'utf8') + return readFile(testDir + '/dest', 'utf8') }) .then((data) => { t.equal(data, 'foo', 'file data correct') - return fs.stat(testDir + '/src').catch((err) => { + return stat(testDir + '/src').catch((err) => { t.ok(err, 'src read error') t.equal(err.code, 'ENOENT', 'src does not exist') }) @@ -30,11 +37,11 @@ t.test('does not clobber existing files', function (t) { }) return moveFile(testDir + '/src', testDir + '/dest') .then(() => { - return fs.readFile(testDir + '/dest', 'utf8') + return readFile(testDir + '/dest', 'utf8') }) .then((data) => { t.equal(data, 'bar', 'conflicting file left intact') - return fs.stat(testDir + '/src').catch((err) => { + return stat(testDir + '/src').catch((err) => { t.ok(err, 'src read error') t.equal(err.code, 'ENOENT', 'src file still deleted') }) @@ -47,7 +54,7 @@ t.test('does not move a file into an existing directory', async t => { dest: {}, }) await moveFile(testDir + '/src', testDir + '/dest') - const files = await fs.readdir(testDir + '/dest') + const files = await readdir(testDir + '/dest') t.equal(files.length, 0, 'directory remains empty') }) @@ -57,17 +64,17 @@ t.test('does not error if destination file is open', function (t) { dest: 'bar', }) - return fs.open(testDir + '/dest', 'r+').then((fd) => { + return open(testDir + '/dest', 'r+').then((fh) => { return moveFile(testDir + '/src', testDir + '/dest') .then(() => { - return fs.close(fd) + return fh.close() }) .then(() => { - return fs.readFile(testDir + '/dest', 'utf8') + return readFile(testDir + '/dest', 'utf8') }) .then((data) => { t.equal(data, 'bar', 'destination left intact') - return fs.stat(testDir + '/src').catch((err) => { + return stat(testDir + '/src').catch((err) => { t.ok(err, 'src read error') t.equal(err.code, 'ENOENT', 'src does not exist') }) @@ -83,7 +90,7 @@ t.test('fallback to renaming on missing files post-move', async t => { const missingFileError = new Error('ENOENT') missingFileError.code = 'ENOENT' const mockFS = { - ...fs, + ...fs.promises, async unlink (path) { throw missingFileError }, @@ -92,14 +99,14 @@ t.test('fallback to renaming on missing files post-move', async t => { }, } const mockedMoveFile = t.mock('../../lib/util/move-file', { - '@npmcli/fs': mockFS, + 'fs/promises': mockFS, }) await mockedMoveFile(testDir + '/src', testDir + '/dest') - const data = await fs.readFile(testDir + '/dest', 'utf8') + const data = await readFile(testDir + '/dest', 'utf8') t.equal(data, 'foo', 'file data correct') await t.rejects( - fs.stat(testDir + '/src'), + stat(testDir + '/src'), { code: 'ENOENT' }, './src does not exist' ) @@ -115,7 +122,7 @@ t.test('non ENOENT error on move fallback', async function (t) { const otherError = new Error('UNKNOWN') otherError.code = 'OTHER' const mockFS = { - ...fs, + ...fs.promises, async unlink (path) { throw missingFileError }, @@ -125,7 +132,7 @@ t.test('non ENOENT error on move fallback', async function (t) { } const mockedMoveFile = t.mock('../../lib/util/move-file', { - '@npmcli/fs': mockFS, + 'fs/promises': mockFS, }) await t.rejects( @@ -141,28 +148,32 @@ t.test('verify weird EPERM on Windows behavior', t => { t.teardown(() => { Object.defineProperty(process, 'platform', { value: processPlatform }) }) - const gfsLink = fs.link - const gfs = require('@npmcli/fs') - let calledMonkeypatch = false - gfs.link = async (src, dest) => { - calledMonkeypatch = true - gfs.link = gfsLink - throw Object.assign(new Error('yolo'), { - code: 'EPERM', - }) + + let calledMonkeyPatch = false + const mockFS = { + ...fs.promises, + link: async (src, dest) => { + calledMonkeyPatch = true + throw Object.assign(new Error('yolo'), { code: 'EPERM' }) + }, } + + const mockedMoveFile = t.mock('../../lib/util/move-file.js', { + 'fs/promises': mockFS, + }) + const testDir = t.testdir({ eperm: { src: 'epermmy', }, }) - return moveFile(testDir + '/eperm/src', testDir + '/eperm/dest') - .then(() => t.ok(calledMonkeypatch, 'called the patched fs.link fn')) - .then(() => t.rejects(fs.readFile('eperm/dest'), { + return mockedMoveFile(testDir + '/eperm/src', testDir + '/eperm/dest') + .then(() => t.ok(calledMonkeyPatch, 'called the patched fs.link fn')) + .then(() => t.rejects(readFile('eperm/dest'), { code: 'ENOENT', }, 'destination file did not get written')) - .then(() => t.rejects(fs.readFile('eperm/src'), { + .then(() => t.rejects(readFile('eperm/src'), { code: 'ENOENT', }, 'src file did get deleted')) }) @@ -178,14 +189,14 @@ t.test( dest: {}, }) - await fs.chmod(testDir + '/dest', parseInt('400', 8)) + await chmod(testDir + '/dest', parseInt('400', 8)) await t.rejects( moveFile(testDir + '/src', path.join(testDir + '/dest', 'file')), { code: 'EACCES' }, 'error is about permissions' ) - const data = await fs.readFile(testDir + '/src', 'utf8') + const data = await readFile(testDir + '/src', 'utf8') t.equal(data, 'foo', 'src contents left intact') } ) diff --git a/test/util/tmp.js b/test/util/tmp.js index f579e58..865bf81 100644 --- a/test/util/tmp.js +++ b/test/util/tmp.js @@ -1,17 +1,11 @@ 'use strict' -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const path = require('path') const t = require('tap') const CACHE = t.testdir() - -const mockedFixOwner = () => Promise.resolve(1) -// temporarily points to original mkdirfix implementation -mockedFixOwner.mkdirfix = require('../../lib/util/fix-owner').mkdirfix -const tmp = t.mock('../../lib/util/tmp', { - '../../lib/util/fix-owner': mockedFixOwner, -}) +const tmp = require('../../lib/util/tmp.js') t.test('creates a unique tmpdir inside the cache', async t => { const dir = await tmp.mkdir(CACHE) @@ -53,8 +47,3 @@ t.test('withTmp should accept both opts and cb params', async t => { t.ok(dir, 'dir should contain a valid response') }) }) - -t.test('provides a function for fixing ownership in the tmp dir', async t => { - const res = await tmp.fix(CACHE) - t.ok(res, 'fixOwner is successfully called') -}) diff --git a/test/verify.js b/test/verify.js index 4a22c13..d0a64de 100644 --- a/test/verify.js +++ b/test/verify.js @@ -2,7 +2,7 @@ const contentPath = require('../lib/content/path') const index = require('../lib/entry-index') -const fs = require('@npmcli/fs') +const fs = require('fs/promises') const path = require('path') const t = require('tap') const ssri = require('ssri') @@ -211,13 +211,11 @@ t.test('writes a file with last verification time', async t => { t.equal(+fromLastRun, +fromFile, 'last verified was writen') }) -t.test('fixes permissions and users on cache contents') - t.test('missing file error when validating cache content', async t => { const missingFileError = new Error('ENOENT') missingFileError.code = 'ENOENT' const mockVerify = getVerify(t, { - '@npmcli/fs': Object.assign({}, fs, { + 'fs/promises': Object.assign({}, fs, { stat: async (path) => { throw missingFileError }, @@ -239,7 +237,7 @@ t.test('missing file error when validating cache content', async t => { t.test('unknown error when validating content', async t => { const mockVerify = getVerify(t, { - '@npmcli/fs': Object.assign({}, fs, { + 'fs/promises': Object.assign({}, fs, { stat: async (path) => { throw genericError }, @@ -275,7 +273,7 @@ t.test('unknown error when rebuilding bucket', async t => { // shouldFail controls the right time to mock the error let shouldFail = false const mockVerify = getVerify(t, { - '@npmcli/fs': Object.assign({}, fs, { + 'fs/promises': Object.assign({}, fs, { stat: async (path) => { if (shouldFail) { throw genericError