From 22384cd695664d35747bd1b33985cc6f7a2cf5ee Mon Sep 17 00:00:00 2001 From: nlf Date: Tue, 11 Oct 2022 14:18:17 -0700 Subject: [PATCH] feat: remove sync methods BREAKING CHANGE: this package is now async only, all synchronous methods have been removed --- README.md | 2 +- lib/content/read.js | 75 ---------------------- lib/entry-index.js | 67 +------------------ lib/get.js | 55 ---------------- lib/index.js | 3 - lib/util/fix-owner.js | 54 ---------------- lib/verify.js | 9 +-- test/content/read.js | 141 ---------------------------------------- test/entry-index.js | 142 +++++------------------------------------ test/get.js | 100 ++--------------------------- test/put.js | 2 +- test/util/fix-owner.js | 121 ++++------------------------------- 12 files changed, 41 insertions(+), 730 deletions(-) diff --git a/README.md b/README.md index cd39b37..0d91ee9 100644 --- a/README.md +++ b/README.md @@ -601,7 +601,7 @@ See: [options](#tmp-options) ```javascript cacache.tmp.withTmp(cache, dir => { - return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...) + return fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...) }).then(() => { // `dir` no longer exists }) diff --git a/lib/content/read.js b/lib/content/read.js index 7c20c75..47587e4 100644 --- a/lib/content/read.js +++ b/lib/content/read.js @@ -46,24 +46,6 @@ const readPipeline = (cpath, size, sri, stream) => { return stream } -module.exports.sync = readSync - -function readSync (cache, integrity, opts = {}) { - const { size } = opts - return withContentSriSync(cache, integrity, (cpath, sri) => { - const data = fs.readFileSync(cpath, { encoding: null }) - if (typeof size === 'number' && size !== data.length) { - throw sizeError(size, data.length) - } - - if (ssri.checkData(data, sri)) { - return data - } - - throw integrityError(sri, cpath) - }) -} - module.exports.stream = readStream module.exports.readStream = readStream @@ -88,7 +70,6 @@ function readStream (cache, integrity, opts = {}) { } module.exports.copy = copy -module.exports.copy.sync = copySync function copy (cache, integrity, dest) { return withContentSri(cache, integrity, (cpath, sri) => { @@ -96,12 +77,6 @@ function copy (cache, integrity, dest) { }) } -function copySync (cache, integrity, dest) { - return withContentSriSync(cache, integrity, (cpath, sri) => { - return fs.copyFileSync(cpath, dest) - }) -} - module.exports.hasContent = hasContent async function hasContent (cache, integrity) { @@ -130,34 +105,6 @@ async function hasContent (cache, integrity) { } } -module.exports.hasContent.sync = hasContentSync - -function hasContentSync (cache, integrity) { - if (!integrity) { - return false - } - - return withContentSriSync(cache, integrity, (cpath, sri) => { - try { - const stat = fs.statSync(cpath) - return { size: stat.size, sri, stat } - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } - }) -} - async function withContentSri (cache, integrity, fn) { const sri = ssri.parse(integrity) // If `integrity` has multiple entries, pick the first digest @@ -201,28 +148,6 @@ async function withContentSri (cache, integrity, fn) { } } -function withContentSriSync (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - let lastErr = null - for (const meta of digests) { - try { - return withContentSriSync(cache, meta, fn) - } catch (err) { - lastErr = err - } - } - throw lastErr - } -} - function sizeError (expected, found) { /* eslint-disable-next-line max-len */ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) diff --git a/lib/entry-index.js b/lib/entry-index.js index 1dc73a9..96a5ef9 100644 --- a/lib/entry-index.js +++ b/lib/entry-index.js @@ -15,7 +15,6 @@ const indexV = require('../package.json')['cache-version'].index const moveFile = require('@npmcli/move-file') const _rimraf = require('rimraf') const rimraf = util.promisify(_rimraf) -rimraf.sync = _rimraf.sync module.exports.NotFoundError = class NotFoundError extends Error { constructor (cache, key) { @@ -151,31 +150,6 @@ async function insert (cache, key, integrity, opts = {}) { return formatEntry(cache, entry) } -module.exports.insert.sync = insertSync - -function insertSync (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) - const stringified = JSON.stringify(entry) - fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - try { - fixOwner.chownr.sync(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - return formatEntry(cache, entry) -} - module.exports.find = find async function find (cache, key) { @@ -198,27 +172,6 @@ async function find (cache, key) { } } -module.exports.find.sync = findSync - -function findSync (cache, key) { - const bucket = bucketPath(cache, key) - try { - return bucketEntriesSync(bucket).reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - module.exports.delete = del function del (cache, key, opts = {}) { @@ -230,17 +183,6 @@ function del (cache, key, opts = {}) { return rimraf(bucket) } -module.exports.delete.sync = delSync - -function delSync (cache, key, opts = {}) { - if (!opts.removeFully) { - return insertSync(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf.sync(bucket) -} - module.exports.lsStream = lsStream function lsStream (cache) { @@ -308,13 +250,6 @@ async function bucketEntries (bucket, filter) { return _bucketEntries(data, filter) } -module.exports.bucketEntries.sync = bucketEntriesSync - -function bucketEntriesSync (bucket, filter) { - const data = fs.readFileSync(bucket, 'utf8') - return _bucketEntries(data, filter) -} - function _bucketEntries (data, filter) { const entries = [] data.split('\n').forEach((entry) => { @@ -335,6 +270,8 @@ function _bucketEntries (data, filter) { // Entry is corrupted! return } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else if (obj) { entries.push(obj) } diff --git a/lib/get.js b/lib/get.js index 254b4ec..272ddb6 100644 --- a/lib/get.js +++ b/lib/get.js @@ -53,61 +53,6 @@ async function getDataByDigest (cache, key, opts = {}) { } module.exports.byDigest = getDataByDigest -function getDataSync (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - const entry = index.find.sync(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = read.sync(cache, entry.integrity, { - integrity: integrity, - size: size, - }) - const res = { - metadata: entry.metadata, - data: data, - size: entry.size, - integrity: entry.integrity, - } - if (memoize) { - memo.put(cache, entry, res.data, opts) - } - - return res -} - -module.exports.sync = getDataSync - -function getDataByDigestSync (cache, digest, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, digest, opts) - - if (memoized && memoize !== false) { - return memoized - } - - const res = read.sync(cache, digest, { - integrity: integrity, - size: size, - }) - if (memoize) { - memo.put.byDigest(cache, digest, res, opts) - } - - return res -} -module.exports.sync.byDigest = getDataByDigestSync - const getMemoizedStream = (memoized) => { const stream = new Minipass() stream.on('newListener', function (ev, cb) { diff --git a/lib/index.js b/lib/index.js index 1c56be6..c9b0da5 100644 --- a/lib/index.js +++ b/lib/index.js @@ -17,15 +17,12 @@ module.exports.ls.stream = index.lsStream module.exports.get = get module.exports.get.byDigest = get.byDigest -module.exports.get.sync = get.sync -module.exports.get.sync.byDigest = get.sync.byDigest module.exports.get.stream = get.stream module.exports.get.stream.byDigest = get.stream.byDigest module.exports.get.copy = get.copy module.exports.get.copy.byDigest = get.copy.byDigest module.exports.get.info = get.info module.exports.get.hasContent = get.hasContent -module.exports.get.hasContent.sync = get.hasContent.sync module.exports.put = put module.exports.put.stream = put.stream diff --git a/lib/util/fix-owner.js b/lib/util/fix-owner.js index 182fcb0..c6a5960 100644 --- a/lib/util/fix-owner.js +++ b/lib/util/fix-owner.js @@ -67,40 +67,6 @@ async function fixOwner (cache, filepath) { ) } -module.exports.chownr.sync = fixOwnerSync - -function fixOwnerSync (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return - } - const { uid, gid } = inferOwner.sync(cache) - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return - } - - if (self.uid === uid && self.gid === gid) { - // No need to override if it's already what we used. - return - } - try { - chownr.sync( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ) - } catch (err) { - // only catch ENOENT, any other error is a problem. - if (err.code === 'ENOENT') { - return null - } - - throw err - } -} - module.exports.mkdirfix = mkdirfix async function mkdirfix (cache, p, cb) { @@ -123,23 +89,3 @@ async function mkdirfix (cache, p, cb) { throw err } } - -module.exports.mkdirfix.sync = mkdirfixSync - -function mkdirfixSync (cache, p) { - try { - inferOwner.sync(cache) - const made = mkdirp.sync(p) - if (made) { - fixOwnerSync(cache, made) - return made - } - } catch (err) { - if (err.code === 'EEXIST') { - fixOwnerSync(cache, p) - return null - } else { - throw err - } - } -} diff --git a/lib/verify.js b/lib/verify.js index 52692a0..1ee9a69 100644 --- a/lib/verify.js +++ b/lib/verify.js @@ -239,14 +239,11 @@ function cleanTmp (cache, opts) { return rimraf(path.join(cache, 'tmp')) } -function writeVerifile (cache, opts) { +async function writeVerifile (cache, opts) { const verifile = path.join(cache, '_lastverified') opts.log.silly('verify', 'writing verifile to ' + verifile) - try { - return fs.writeFile(verifile, `${Date.now()}`) - } finally { - fixOwner.chownr.sync(cache, verifile) - } + await fs.writeFile(verifile, `${Date.now()}`) + return fixOwner.chownr(cache, verifile) } module.exports.lastRun = lastRun diff --git a/test/content/read.js b/test/content/read.js index 7accce3..77b1d2d 100644 --- a/test/content/read.js +++ b/test/content/read.js @@ -40,19 +40,6 @@ t.test('read: returns a Promise with cache content data', async t => { t.same(data, CONTENT, 'cache contents read correctly') }) -t.test('read.sync: reads synchronously', (t) => { - const CONTENT = Buffer.from('foobarbaz') - const INTEGRITY = ssri.fromData(CONTENT) - const CACHE = t.testdir( - CacheContent({ - [INTEGRITY]: CONTENT, - }) - ) - const data = read.sync(CACHE, INTEGRITY) - t.same(data, CONTENT, 'cache contents read correctly') - t.end() -}) - t.test('read.stream: returns a stream with cache content data', async t => { const CONTENT = Buffer.from('foobarbaz') const INTEGRITY = ssri.fromData(CONTENT) @@ -254,64 +241,6 @@ t.test('read: opening large files', function (t) { mockedRead(CACHE, 'sha1-deadbeef') }) -t.test('read.sync: unknown error parsing nested integrity data', (t) => { - const CACHE = t.testdir() - const INTEGRITY = 'sha1-deadbeef sha1-13371337' - - // patches method in order to force a last error scenario - const mockedRead = getRead(t, { - ssri: { - parse (sri) { - if (sri !== INTEGRITY) { - throw genericError - } - - return ssri.parse(sri) - }, - }, - }) - - t.throws( - () => mockedRead.sync(CACHE, INTEGRITY), - genericError, - 'should throw last error found when parsing multiple hashes' - ) - t.end() -}) - -t.test('read.sync: cache contains mismatching data', (t) => { - const CONTENT = Buffer.from('foobarbaz') - const INTEGRITY = ssri.fromData(CONTENT) - const CACHE = t.testdir( - CacheContent({ - [INTEGRITY]: CONTENT.slice(3), - }) - ) - t.throws( - () => read.sync(CACHE, INTEGRITY), - { code: 'EINTEGRITY' }, - 'should throw integrity error' - ) - t.end() -}) - -t.test('read.sync: content size value does not match option', (t) => { - const CONTENT = Buffer.from('foobarbaz') - const INTEGRITY = ssri.fromData(CONTENT) - const CACHE = t.testdir( - CacheContent({ - [INTEGRITY]: CONTENT.slice(3), - }) - ) - - t.throws( - () => read.sync(CACHE, INTEGRITY, { size: CONTENT.length }), - { code: 'EBADSIZE' }, - 'should throw size error' - ) - t.end() -}) - t.test('hasContent: tests content existence', async t => { const CACHE = t.testdir( CacheContent({ @@ -368,62 +297,6 @@ t.test('hasContent: no integrity provided', (t) => { t.end() }) -t.test('hasContent.sync: checks content existence synchronously', (t) => { - const CACHE = t.testdir( - CacheContent({ - 'sha1-deadbeef': '', - }) - ) - const content = read.hasContent.sync(CACHE, 'sha1-deadbeef') - t.ok(content.sri, 'returned sri for this content') - t.equal(content.size, 0, 'returned the right size for this content') - t.ok(content.stat.isFile(), 'returned actual stat object') - t.equal( - read.hasContent.sync(CACHE, 'sha1-not-there'), - false, - 'returned false for missing content' - ) - t.equal( - read.hasContent.sync(CACHE, 'sha1-not-here sha1-also-not-here'), - false, - 'multi-content hash failures work ok' - ) - t.end() -}) - -t.test('hasContent.sync: permission error', (t) => { - const CACHE = t.testdir() - const mockedRead = getReadStatFailure(t, permissionError) - - t.throws( - () => mockedRead.hasContent.sync(CACHE, 'sha1-deadbeef sha1-13371337'), - permissionError, - 'should throw on permission errors' - ) - t.end() -}) - -t.test('hasContent.sync: generic error', (t) => { - const CACHE = t.testdir() - const mockedRead = getReadStatFailure(t, genericError) - - t.notOk( - mockedRead.hasContent.sync(CACHE, 'sha1-deadbeef sha1-13371337'), - 'should not throw on generic errors' - ) - t.end() -}) - -t.test('hasContent.sync: no integrity provided', (t) => { - const CACHE = t.testdir() - t.equal( - read.hasContent.sync(CACHE, ''), - false, - 'should returns false if no integrity provided' - ) - t.end() -}) - t.test('copy: copies content to a destination path', async t => { const CONTENT = Buffer.from('foobarbaz') const INTEGRITY = ssri.fromData(CONTENT) @@ -437,17 +310,3 @@ t.test('copy: copies content to a destination path', async t => { const data = await fs.readFile(DEST) t.same(data, CONTENT, 'file successfully copied') }) - -t.test('copy.sync: copies content to a destination path synchronously', (t) => { - const CONTENT = Buffer.from('foobarbaz') - const INTEGRITY = ssri.fromData(CONTENT) - const CACHE = t.testdir( - CacheContent({ - [INTEGRITY]: CONTENT, - }) - ) - const DEST = path.join(CACHE, 'foobar-file') - read.copy.sync(CACHE, INTEGRITY, DEST) - t.same(fs.readFileSync(DEST), CONTENT, 'file successfully copied') - t.end() -}) diff --git a/test/entry-index.js b/test/entry-index.js index 84ffc66..4082eb0 100644 --- a/test/entry-index.js +++ b/test/entry-index.js @@ -188,42 +188,6 @@ t.test('compact: error in moveFile removes temp', async (t) => { t.equal(tmpFiles.length, 0, 'temp file is gone') }) -t.test('delete.sync: removes a cache entry', async t => { - const cache = t.testdir(cacheContent) - await index.insert(cache, KEY, INTEGRITY) - const lsResults = await index.ls(cache) - t.ok(lsResults[KEY], 'should have entry') - t.equal( - index.delete.sync(cache, KEY), - null, - 'should return null on successful deletion' - ) - const emptyResults = await index.ls(cache) - t.notOk(Object.keys(emptyResults).length, 'should have no entries') -}) - -t.test('delete.sync: removeFully deletes the index entirely', async (t) => { - const cache = t.testdir(cacheContent) - const bucket = index.bucketPath(cache, KEY) - await index.insert(cache, KEY, INTEGRITY) - const entries = await index.bucketEntries(bucket) - t.equal(entries.length, 1, 'has an entry') - - // do a normal delete first, this appends a null integrity - index.delete.sync(cache, KEY) - const delEntries = await index.bucketEntries(bucket) - t.equal(delEntries.length, 2, 'should now have 2 entries') - t.equal(delEntries[1].integrity, null, 'has a null integrity last') - - // then a full delete - index.delete.sync(cache, KEY, { removeFully: true }) - await t.rejects( - index.bucketEntries(bucket), - { code: 'ENOENT' }, - 'rejects with ENOENT because file is gone' - ) -}) - t.test('delete: removeFully deletes the index entirely', async (t) => { const cache = t.testdir(cacheContent) const bucket = index.bucketPath(cache, KEY) @@ -277,55 +241,6 @@ t.test('find: unknown error on finding entries', (t) => { ) }) -t.test('find.sync: retrieve from bucket containing multiple entries', (t) => { - const cache = t.testdir(cacheContent) - const entries = [ - '\na7eb00332fe51ff62b1bdb1564855f2624f16f34\t{"key":"foo", "integrity": "foo"}', - '\n46b1607f427665a99668c02d3a4cc52061afd83a\t{"key":"bar", "integrity": "bar"}', - ] - const { find } = getEntryIndex(t, { - '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { - readFileSync: (path, encode) => entries.join(''), - }), - }) - - t.match( - find.sync(cache, 'foo'), - { key: 'foo' }, - 'should retrieve entry using key' - ) - t.end() -}) - -t.test('find.sync: unknown error on finding entries', (t) => { - const cache = t.testdir(cacheContent) - const { find } = getEntryIndexReadFileFailure(t, genericError) - - t.throws( - () => find.sync(cache, KEY), - genericError, - 'should throw the unknown error' - ) - t.end() -}) - -t.test('find.sync: retrieve entry with invalid content', (t) => { - const cache = t.testdir(cacheContent) - const { find } = getEntryIndex(t, { - '@npmcli/fs': Object.assign({}, require('@npmcli/fs'), { - readFileSync: (path, encode) => - '\nb6589fc6ab0dc82cf12099d1c2d40ab994e8410c\t0', - }), - }) - - t.match( - find.sync(cache, 'foo'), - null, - 'should return null' - ) - t.end() -}) - t.test('insert: missing files on fixing ownership', (t) => { const cache = t.testdir(cacheContent) const { insert } = getEntryIndexFixOwnerFailure(missingFileError) @@ -349,57 +264,32 @@ t.test('insert: unknown errors on fixing ownership', (t) => { ) }) -t.test('insert.sync: missing files on fixing ownership', (t) => { +t.test('lsStream: unknown error reading files', async (t) => { const cache = t.testdir(cacheContent) - const { insert } = getEntryIndexFixOwnerFailure(missingFileError) - - t.plan(1) - t.doesNotThrow( - () => insert.sync(cache, KEY, INTEGRITY), - 'should insert entry with no errors' - ) -}) - -t.test('insert.sync: unknown errors on fixing ownership', (t) => { - const cache = t.testdir(cacheContent) - const { insert } = getEntryIndexFixOwnerFailure(genericError) - - t.throws( - () => insert.sync(cache, KEY, INTEGRITY), - genericError, - 'should throw the unknown error' - ) - t.end() -}) - -t.test('lsStream: unknown error reading files', (t) => { - const cache = t.testdir(cacheContent) - index.insert.sync(cache, KEY, INTEGRITY) + await index.insert(cache, KEY, INTEGRITY) const { lsStream } = getEntryIndexReadFileFailure(t, genericError) - lsStream(cache) - .on('error', err => { - t.equal(err, genericError, 'should emit an error') - t.end() - }) + return new Promise((resolve) => { + lsStream(cache) + .on('error', err => { + t.equal(err, genericError, 'should emit an error') + resolve() + }) + }) }) -t.test('lsStream: missing files error', (t) => { +t.test('lsStream: missing files error', async (t) => { const cache = t.testdir(cacheContent) - index.insert.sync(cache, KEY, INTEGRITY) + await index.insert(cache, KEY, INTEGRITY) const { lsStream } = getEntryIndexReadFileFailure(t, missingFileError) - lsStream(cache) - .on('error', () => { - t.fail('should not error') - t.end() - }) - .on('end', () => { - t.ok('should end successfully') - t.end() - }) + return new Promise((resolve, reject) => { + lsStream(cache) + .on('error', reject) + .on('end', resolve) + }) }) t.test('lsStream: unknown error reading dirs', (t) => { diff --git a/test/get.js b/test/get.js index f6e88c9..a19763d 100644 --- a/test/get.js +++ b/test/get.js @@ -63,16 +63,6 @@ t.test('get.info index entry lookup', async t => { t.same(entry, indexInsert, 'get.info() returned the right entry') }) -t.test('get.sync will throw ENOENT if not found', (t) => { - try { - get.sync('foo', 'bar') - } catch (err) { - t.same(err.message, 'No cache entry for bar found in foo') - t.same(err.code, 'ENOENT') - t.end() - } -}) - t.test('get will throw ENOENT if not found', (t) => { const CACHE = t.testdir() return get(CACHE, KEY) @@ -114,95 +104,13 @@ t.test('basic bulk get', async t => { ) }) -t.test('get.sync.byDigest without memoization', (t) => { - const CACHE = t.testdir( - CacheContent({ - [INTEGRITY]: CONTENT, - }) - ) - index.insert.sync(CACHE, KEY, INTEGRITY, opts()) - const res = get.sync(CACHE, KEY) - t.same( - res, - { - metadata: METADATA, - data: CONTENT, - integrity: INTEGRITY, - size: SIZE, - }, - 'bulk key get returned proper data' - ) - const resByDig = get.sync.byDigest(CACHE, INTEGRITY) - t.same(resByDig, CONTENT, 'byDigest returned proper data') - t.end() -}) - -t.test('get.sync.byDigest with memoization', (t) => { - const CACHE = t.testdir( - CacheContent({ - [INTEGRITY]: CONTENT, - }) - ) - index.insert.sync(CACHE, KEY, INTEGRITY, opts()) - const res = get.sync(CACHE, KEY, { memoize: true }) - t.same( - res, - { - metadata: METADATA, - data: CONTENT, - integrity: INTEGRITY, - size: SIZE, - }, - 'bulk key get returned proper data' - ) - memo.clearMemoized() - t.same(memo.get.byDigest(CACHE, INTEGRITY), undefined) - const resByDig = get.sync.byDigest(CACHE, INTEGRITY, { memoize: true }) - t.same(resByDig, CONTENT, 'byDigest returned proper data') - t.notSame(memo.get.byDigest(CACHE, INTEGRITY), undefined) - const resByDig2 = get.sync.byDigest(CACHE, INTEGRITY, { memoize: true }) - t.same(resByDig2, CONTENT, 'byDigest returned proper data') - t.end() -}) - -t.test('get.sync with memoization', (t) => { - const CACHE = t.testdir( - CacheContent({ - [INTEGRITY]: CONTENT, - }) - ) - index.insert.sync(CACHE, KEY, INTEGRITY, opts()) - memo.clearMemoized() - t.same(memo.get(CACHE, KEY), undefined) - const res = get.sync(CACHE, KEY, { memoize: true }) - t.same( - res, - { - metadata: METADATA, - data: CONTENT, - integrity: INTEGRITY, - size: SIZE, - }, - 'bulk key get returned proper data' - ) - t.notSame(memo.get(CACHE, KEY), undefined) - const resByDig = get.sync(CACHE, KEY, { memoize: true }) - t.same(resByDig, { - metadata: METADATA, - data: CONTENT, - integrity: INTEGRITY, - size: SIZE, - }, 'get returned proper data') - t.end() -}) - t.test('get.byDigest without memoization', async t => { const CACHE = t.testdir( CacheContent({ [INTEGRITY]: CONTENT, }) ) - index.insert.sync(CACHE, KEY, INTEGRITY, opts()) + await index.insert(CACHE, KEY, INTEGRITY, opts()) const res = await get(CACHE, KEY) t.same( res, @@ -230,7 +138,7 @@ t.test('get.byDigest with memoization', async t => { [INTEGRITY]: CONTENT, }) ) - index.insert.sync(CACHE, KEY, INTEGRITY, opts()) + await index.insert(CACHE, KEY, INTEGRITY, opts()) const res = await get(CACHE, KEY) t.same( res, @@ -258,7 +166,7 @@ t.test('get without memoization', async t => { [INTEGRITY]: CONTENT, }) ) - index.insert.sync(CACHE, KEY, INTEGRITY, opts()) + await index.insert(CACHE, KEY, INTEGRITY, opts()) const res = await get(CACHE, KEY) t.same( res, @@ -296,7 +204,7 @@ t.test('get with memoization', async t => { [INTEGRITY]: CONTENT, }) ) - index.insert.sync(CACHE, KEY, INTEGRITY, opts()) + await index.insert(CACHE, KEY, INTEGRITY, opts()) const res = await get(CACHE, KEY) t.same( res, diff --git a/test/put.js b/test/put.js index 180fa91..9e3a5c0 100644 --- a/test/put.js +++ b/test/put.js @@ -108,7 +108,7 @@ t.test('errors if integrity errors', async t => { ) }) -t.test('signals error if error writing to cache', { saveFixture: true }, async t => { +t.test('signals error if error writing to cache', async t => { const CACHE = t.testdir() const [bulkErr, streamErr] = await Promise.all([ put(CACHE, KEY, CONTENT, { diff --git a/test/util/fix-owner.js b/test/util/fix-owner.js index f3c3c14..71df9c3 100644 --- a/test/util/fix-owner.js +++ b/test/util/fix-owner.js @@ -25,7 +25,7 @@ const patchesGetuid = (t) => { } const getFixOwner = (t, opts) => t.mock('../../lib/util/fix-owner', opts) -// chownr and chownr.fix error handling tests +// chownr error handling tests t.test('attempt to chownr existing path', async t => { patchesGetuid(t) @@ -89,7 +89,7 @@ t.test('calls setuid setgid to replace user', async t => { await t.resolves(fixOwner.chownr(CACHE, filename), 'should not throw') }) -t.test('attempt to chownr.sync on platforms that do not need ownership fix', async t => { +t.test('attempt to chownr on platforms that do not need ownership fix', async t => { process.getuid = undefined t.teardown(() => { process.getuid = getuid @@ -99,63 +99,7 @@ t.test('attempt to chownr.sync on platforms that do not need ownership fix', asy await t.resolves(fixOwner.chownr(CACHE, filename), 'should not throw') }) -t.test('attempt to chownr.sync existing path', (t) => { - patchesGetuid(t) - function chownr () {} - chownr.sync = () => { - throw missingFileError - } - const fixOwner = getFixOwner(t, { - chownr, - 'infer-owner': { sync: () => ({}) }, - }) - - t.notOk(fixOwner.chownr.sync(CACHE, filename), 'should not throw if path exists') - t.end() -}) - -t.test('attempt to chownr.sync unknown error', (t) => { - patchesGetuid(t) - function chownr () {} - chownr.sync = () => { - throw genericError - } - const fixOwner = getFixOwner(t, { - chownr, - 'infer-owner': { sync: () => ({}) }, - }) - - t.throws(() => fixOwner.chownr.sync(CACHE, filename), genericError, 'should throw unknown errors') - t.end() -}) - -t.test('attempt to chownr.sync using same user', (t) => { - patchesGetuid(t) - const fixOwner = getFixOwner(t, { - 'infer-owner': { - sync: () => ({ - uid: process.getuid(), - gid: process.getgid(), - }), - }, - }) - - t.notOk(fixOwner.chownr.sync(CACHE, filename), 'should not throw') - t.end() -}) - -t.test('attempt to chownr.sync on platforms that do not need ownership fix', (t) => { - process.getuid = undefined - t.teardown(() => { - process.getuid = getuid - }) - const fixOwner = require('../../lib/util/fix-owner') - - t.notOk(fixOwner.chownr.sync(CACHE, filename), 'should not throw') - t.end() -}) - -t.test('uses infer-owner ids instead of process-retrieved if valid', (t) => { +t.test('uses infer-owner ids instead of process-retrieved if valid', async (t) => { const getgid = process.getgid process.getuid = () => 0 process.getgid = () => 1 @@ -163,27 +107,25 @@ t.test('uses infer-owner ids instead of process-retrieved if valid', (t) => { process.getuid = getuid process.getgid = getgid }) - t.plan(3) - function chownr () {} - chownr.sync = (path, uid, gid) => { - t.equal(path, filename, 'should match filename') - t.equal(uid, 501, 'should match uid') - t.equal(gid, 20, 'should match gid') - } const fixOwner = getFixOwner(t, { - chownr, - 'infer-owner': { - sync: () => ({ + chownr: (path, uid, gid, cb) => { + t.equal(path, filename, 'should match filename') + t.equal(uid, 501, 'should match uid') + t.equal(gid, 20, 'should match gid') + return cb() + }, + 'infer-owner': () => { + return Promise.resolve({ uid: 501, gid: 20, - }), + }) }, }) - fixOwner.chownr.sync(CACHE, filename) + await fixOwner.chownr(CACHE, filename) }) -// mkdirfix and mkdirfix.sync error handling tests +// mkdirfix error handling tests t.test('attempt to mkdirfix existing path', async t => { const fixOwner = getFixOwner(t, { @@ -202,38 +144,3 @@ t.test('attempt to mkdirfix unknown error', (t) => { t.plan(1) t.rejects(() => fixOwner.mkdirfix(CACHE, filename), 'should throw unknown errors') }) - -t.test('attempt to mkdirfix.sync existing path', (t) => { - function mkdirp () {} - mkdirp.sync = () => { - throw pathExistsError - } - const fixOwner = getFixOwner(t, { mkdirp }) - - t.notOk(fixOwner.mkdirfix.sync(CACHE, filename), 'should not throw if path exists') - t.end() -}) - -t.test('attempt to mkdirfix.sync unknown error', (t) => { - function mkdirp () {} - mkdirp.sync = () => { - throw genericError - } - const fixOwner = getFixOwner(t, { mkdirp }) - - t.throws( - () => fixOwner.mkdirfix.sync(CACHE, filename), - genericError, - 'should throw unknown errors' - ) - t.end() -}) - -t.test('attempt to mkdirfix.sync but no dir created', (t) => { - function mkdirp () {} - mkdirp.sync = () => {} - const fixOwner = getFixOwner(t, { mkdirp }) - - t.notOk(fixOwner.mkdirfix.sync(CACHE, filename), 'should not throw') - t.end() -})