Skip to content

Commit

Permalink
feat(copy): added cacache.get.copy api for fast copies (#107)
Browse files Browse the repository at this point in the history
  • Loading branch information
zkat committed Oct 7, 2017
1 parent cc0fd2f commit 067b5f6
Show file tree
Hide file tree
Showing 9 changed files with 135 additions and 2 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
language: node_js
sudo: false
node_js:
- "7"
- "8"
- "6"
- "4"
2 changes: 1 addition & 1 deletion appveyor.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
environment:
matrix:
- nodejs_version: "7"
- nodejs_version: "8"
- nodejs_version: "6"
- nodejs_version: "4"

Expand Down
36 changes: 36 additions & 0 deletions get.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

const BB = require('bluebird')

const fs = require('fs')
const index = require('./lib/entry-index')
const memo = require('./lib/memoization')
const pipe = require('mississippi').pipe
Expand Down Expand Up @@ -152,3 +153,38 @@ function info (cache, key, opts) {
}

module.exports.hasContent = read.hasContent

module.exports.copy = function cp (cache, key, dest, opts) {
return copy(false, cache, key, dest, opts)
}
module.exports.copy.byDigest = function cpDigest (cache, digest, dest, opts) {
return copy(true, cache, digest, dest, opts)
}
function copy (byDigest, cache, key, dest, opts) {
opts = opts || {}
if (read.copy) {
return (
byDigest ? BB.resolve(null) : index.find(cache, key, opts)
).then(entry => {
if (!entry && !byDigest) {
throw new index.NotFoundError(cache, key)
}
return read.copy(
cache, byDigest ? key : entry.integrity, dest, opts
).then(() => byDigest ? key : {
metadata: entry.metadata,
size: entry.size,
integrity: entry.integrity
})
})
} else {
return getData(byDigest, cache, key, opts).then(res => {
return fs.writeFileAsync(dest, byDigest ? res : res.data)
.then(() => byDigest ? key : {
metadata: res.metadata,
size: res.size,
integrity: res.integrity
})
})
}
}
12 changes: 12 additions & 0 deletions lib/content/read.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,18 @@ function readStream (cache, integrity, opts) {
return stream
}

if (fs.copyFile) {
module.exports.copy = copy
}
function copy (cache, integrity, dest, opts) {
opts = opts || {}
return pickContentSri(cache, integrity).then(content => {
const sri = content.sri
const cpath = contentPath(cache, sri)
return fs.copyFileAsync(cpath, dest).then(() => content.size)
})
}

module.exports.hasContent = hasContent
function hasContent (cache, integrity) {
if (!integrity) { return BB.resolve(false) }
Expand Down
2 changes: 2 additions & 0 deletions locales/en.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ x.get = (cache, key, opts) => get(cache, key, opts)
x.get.byDigest = (cache, hash, opts) => get.byDigest(cache, hash, opts)
x.get.stream = (cache, key, opts) => get.stream(cache, key, opts)
x.get.stream.byDigest = (cache, hash, opts) => get.stream.byDigest(cache, hash, opts)
x.get.copy = (cache, key, dest, opts) => get.copy(cache, key, dest, opts)
x.get.copy.byDigest = (cache, hash, dest, opts) => get.copy.byDigest(cache, hash, dest, opts)
x.get.info = (cache, key) => get.info(cache, key)
x.get.hasContent = (cache, hash) => get.hasContent(cache, hash)

Expand Down
2 changes: 2 additions & 0 deletions locales/es.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ x.saca = (cache, clave, ops) => get(cache, clave, ops)
x.saca.porHacheo = (cache, hacheo, ops) => get.byDigest(cache, hacheo, ops)
x.saca.flujo = (cache, clave, ops) => get.stream(cache, clave, ops)
x.saca.flujo.porHacheo = (cache, hacheo, ops) => get.stream.byDigest(cache, hacheo, ops)
x.sava.copia = (cache, clave, destino, opts) => get.copy(cache, clave, destino, opts)
x.sava.copia.porHacheo = (cache, hacheo, destino, opts) => get.copy.byDigest(cache, hacheo, destino, opts)
x.saca.info = (cache, clave) => get.info(cache, clave)
x.saca.tieneDatos = (cache, hacheo) => get.hasContent(cache, hacheo)

Expand Down
30 changes: 30 additions & 0 deletions test/benchmarks/content.read.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
'use strict'

const BB = require('bluebird')

const CacheContent = require('../util/cache-content')
const fs = BB.promisifyAll(require('fs'))
const path = require('path')
const Tacks = require('tacks')
const ssri = require('ssri')

Expand Down Expand Up @@ -58,6 +62,32 @@ module.exports = (suite, CACHE) => {
}
})

suite.add('content.read.copy()', {
defer: true,
setup () {
const fixture = new Tacks(CacheContent({
[BIGINTEGRITY]: BIGCONTENT
}))
fixture.create(CACHE)
},
fn (deferred) {
if (read.copy) {
read.copy(CACHE, BIGINTEGRITY, path.join(CACHE, 'bigdata'))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
} else {
read(CACHE, BIGINTEGRITY)
.then(data => fs.writeFileAsync(path.join(CACHE, 'bigdata'), data))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
}
}
})

suite.add('content.read.stream() small data', {
defer: true,
setup () {
Expand Down
20 changes: 20 additions & 0 deletions test/content.read.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,15 @@ const Buffer = require('safe-buffer').Buffer
const BB = require('bluebird')

const finished = BB.promisify(require('mississippi').finished)
const fs = require('fs')
const path = require('path')
const ssri = require('ssri')
const Tacks = require('tacks')
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)

BB.promisifyAll(fs)

const CACHE = path.join(testDir, 'cache')
const CacheContent = require('./util/cache-content')

Expand Down Expand Up @@ -147,3 +150,20 @@ test('hasContent: returns { sri, size } when a cache file exists', function (t)
})
)
})

test('copy: copies content to a destination path', {
skip: !fs.copyFile && 'Not supported on node versions without fs.copyFile'
}, t => {
const CONTENT = Buffer.from('foobarbaz')
const INTEGRITY = ssri.fromData(CONTENT)
const DEST = path.join(CACHE, 'foobar-file')
const fixture = new Tacks(CacheContent({
[INTEGRITY]: CONTENT
}))
fixture.create(CACHE)
return read.copy(CACHE, INTEGRITY, DEST).then(() => {
return fs.readFileAsync(DEST)
}).then(data => {
t.deepEqual(data, CONTENT, 'file successfully copied')
})
})
31 changes: 31 additions & 0 deletions test/get.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ const Buffer = require('safe-buffer').Buffer
const BB = require('bluebird')

const finished = BB.promisify(require('mississippi').finished)
const fs = require('fs')
const index = require('../lib/entry-index')
const memo = require('../lib/memoization')
const path = require('path')
Expand All @@ -13,6 +14,8 @@ const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)
const ssri = require('ssri')

BB.promisifyAll(fs)

const CacheContent = require('./util/cache-content')

const CACHE = path.join(testDir, 'cache')
Expand Down Expand Up @@ -104,6 +107,34 @@ test('basic stream get', t => {
})
})

test('get.copy', t => {
const DEST = path.join(CACHE, 'copymehere')
const fixture = new Tacks(CacheContent({
[INTEGRITY]: CONTENT
}))
fixture.create(CACHE)
return index.insert(CACHE, KEY, INTEGRITY, opts())
.then(() => get.copy(CACHE, KEY, DEST))
.then(res => {
t.deepEqual(res, {
metadata: METADATA,
integrity: INTEGRITY,
size: SIZE
}, 'copy operation returns basic metadata')
return fs.readFileAsync(DEST)
})
.then(data => {
t.deepEqual(data, CONTENT, 'data copied by key matches')
return rimraf(DEST)
})
.then(() => get.copy.byDigest(CACHE, INTEGRITY, DEST))
.then(() => fs.readFileAsync(DEST))
.then(data => {
t.deepEqual(data, CONTENT, 'data copied by digest matches')
return rimraf(DEST)
})
})

test('ENOENT if not found', t => {
return get(CACHE, KEY).then(() => {
throw new Error('lookup should fail')
Expand Down

0 comments on commit 067b5f6

Please sign in to comment.