Skip to content

Commit

Permalink
feat(size): handle content size info (#49)
Browse files Browse the repository at this point in the history
  • Loading branch information
zkat committed Apr 20, 2017
1 parent 0be449e commit 91230af
Show file tree
Hide file tree
Showing 12 changed files with 144 additions and 78 deletions.
14 changes: 11 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ cacache.ls(cachePath).then(console.log)
integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
path: '.testcache/content/deadbeef', // joined with `cachePath`
time: 12345698490,
size: 4023948,
metadata: {
name: 'blah',
version: '1.2.3',
Expand All @@ -131,7 +132,8 @@ cacache.ls(cachePath).then(console.log)
key: 'other-thing',
integrity: 'sha1-ANothER+hasH=',
path: '.testcache/content/bada55',
time: 11992309289
time: 11992309289,
size: 111112
}
}
```
Expand All @@ -153,6 +155,7 @@ cacache.ls.stream(cachePath).on('data', console.log)
integrity: 'sha512-BaSe64HaSh',
path: '.testcache/content/deadbeef', // joined with `cachePath`
time: 12345698490,
size: 13423,
metadata: {
name: 'blah',
version: '1.2.3',
Expand All @@ -164,7 +167,8 @@ cacache.ls.stream(cachePath).on('data', console.log)
key: 'other-thing',
integrity: 'whirlpool-WoWSoMuchSupport',
path: '.testcache/content/bada55',
time: 11992309289
time: 11992309289,
size: 498023984029
}

{
Expand Down Expand Up @@ -208,7 +212,8 @@ cache.get(cachePath, 'my-thing').then(console.log)
thingName: 'my'
},
integrity: 'sha512-BaSe64HaSh',
data: Buffer#<deadbeef>
data: Buffer#<deadbeef>,
size: 9320
}

// Look up by digest
Expand Down Expand Up @@ -280,6 +285,7 @@ cacache.get.info(cachePath, 'my-thing').then(console.log)
integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
path: '.testcache/content/deadbeef',
time: 12345698490,
size: 849234,
metadata: {
name: 'blah',
version: '1.2.3',
Expand Down Expand Up @@ -357,6 +363,8 @@ for inserted data. Can use any algorithm listed in `crypto.getHashes()` or
`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You
may also use any anagram of `'modnar'` to use this feature.

Has no effect if `opts.integrity` is present.

##### `opts.uid`/`opts.gid`

If provided, cacache will do its best to make sure any new files added to the
Expand Down
8 changes: 7 additions & 1 deletion get.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ function getData (byDigest, cache, key, opts) {
return BB.resolve(byDigest ? memoized : {
metadata: memoized.entry.metadata,
data: memoized.data,
integrity: memoized.entry.integrity
integrity: memoized.entry.integrity,
size: memoized.entry.size
})
}
return (
Expand All @@ -41,6 +42,7 @@ function getData (byDigest, cache, key, opts) {
}).then(data => byDigest ? data : {
metadata: entry.metadata,
data: data,
size: entry.size,
integrity: entry.integrity
}).then(res => {
if (opts.memoize && byDigest) {
Expand All @@ -62,6 +64,7 @@ function getStream (cache, key, opts) {
stream.on('newListener', function (ev, cb) {
ev === 'metadata' && cb(memoized.entry.metadata)
ev === 'integrity' && cb(memoized.entry.integrity)
ev === 'size' && cb(memoized.entry.size)
})
stream.write(memoized.data, () => stream.end())
return stream
Expand All @@ -87,11 +90,14 @@ function getStream (cache, key, opts) {
} else {
memoStream = through()
}
opts.size = opts.size == null ? entry.size : opts.size
stream.emit('metadata', entry.metadata)
stream.emit('integrity', entry.integrity)
stream.emit('size', entry.size)
stream.on('newListener', function (ev, cb) {
ev === 'metadata' && cb(entry.metadata)
ev === 'integrity' && cb(entry.integrity)
ev === 'size' && cb(entry.size)
})
pipe(
read.readStream(cache, entry.integrity, opts),
Expand Down
22 changes: 13 additions & 9 deletions lib/content/write.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ function write (cache, data, opts) {
).then(() => (
moveToDestination(tmp, cache, sri, opts)
))
)).then(() => sri)
)).then(() => ({integrity: sri, size: data.length}))
}

module.exports.stream = writeStream
Expand All @@ -62,8 +62,9 @@ function writeStream (cache, opts) {
e.code = 'ENODATA'
return ret.emit('error', e)
}
allDone.then(sri => {
sri && ret.emit('integrity', sri)
allDone.then(res => {
res.integrity && ret.emit('integrity', res.integrity)
res.size !== null && ret.emit('size', res.size)
cb()
}, e => {
ret.emit('error', e)
Expand All @@ -81,30 +82,33 @@ function handleContent (inputStream, cache, opts, errCheck) {
errCheck()
return pipeToTmp(
inputStream, cache, tmp.target, opts, errCheck
).then(sri => {
).then(res => {
return moveToDestination(
tmp, cache, sri, opts, errCheck
).then(() => sri)
tmp, cache, res.integrity, opts, errCheck
).then(() => res)
})
})
}

function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
return BB.resolve().then(() => {
let sri
let integrity
let size
const hashStream = ssri.integrityStream({
integrity: opts.integrity,
algorithms: opts.algorithms,
size: opts.size
}).on('integrity', s => {
sri = s
integrity = s
}).on('size', s => {
size = s
})
const outStream = fs.createWriteStream(tmpTarget, {
flags: 'wx'
})
errCheck()
return pipe(inputStream, hashStream, outStream).then(() => {
return sri
return {integrity, size}
}, err => {
return rimraf(tmpTarget).then(() => { throw err })
})
Expand Down
2 changes: 2 additions & 0 deletions lib/entry-index.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ function insert (cache, key, integrity, opts) {
key,
integrity: integrity && ssri.stringify(integrity),
time: Date.now(),
size: opts.size,
metadata: opts.metadata
}
return fixOwner.mkdirfix(
Expand Down Expand Up @@ -206,6 +207,7 @@ function formatEntry (cache, entry) {
key: entry.key,
integrity: entry.integrity,
path: contentPath(cache, entry.integrity),
size: entry.size,
time: entry.time,
metadata: entry.metadata
}
Expand Down
1 change: 0 additions & 1 deletion lib/verify.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ const fs = require('graceful-fs')
const glob = BB.promisify(require('glob'))
const index = require('./entry-index')
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const rimraf = BB.promisify(require('rimraf'))
const ssri = require('ssri')

Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"version": "7.0.5",
"cache-version": {
"content": "2",
"index": "4"
"index": "5"
},
"description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
"main": "index.js",
Expand Down
17 changes: 13 additions & 4 deletions put.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@ const to = require('mississippi').to
module.exports = putData
function putData (cache, key, data, opts) {
opts = opts || {}
return write(cache, data, opts).then(integrity => {
return index.insert(cache, key, integrity, opts).then(entry => {
return write(cache, data, opts).then(res => {
// TODO - stop modifying opts
opts.size = res.size
return index.insert(cache, key, res.integrity, opts).then(entry => {
if (opts.memoize) {
memo.put(cache, entry, data)
}
return integrity
return res.integrity
})
})
}
Expand All @@ -22,8 +24,13 @@ module.exports.stream = putStream
function putStream (cache, key, opts) {
opts = opts || {}
let integrity
const contentStream = write.stream(cache, opts).on('integrity', int => {
let size
const contentStream = write.stream(
cache, opts
).on('integrity', int => {
integrity = int
}).on('size', s => {
size = s
})
let memoData
let memoTotal = 0
Expand All @@ -38,6 +45,8 @@ function putStream (cache, key, opts) {
})
}, cb => {
contentStream.end(() => {
// TODO - stop modifying `opts`
opts.size = size
index.insert(cache, key, integrity, opts).then(entry => {
if (opts.memoize) {
memo.put(cache, entry, Buffer.concat(memoData, memoTotal))
Expand Down
Loading

0 comments on commit 91230af

Please sign in to comment.