diff --git a/.travis.yml b/.travis.yml index 6767eb7a..48ca09dd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,5 +19,4 @@ notifications: - matteo.collina@gmail.com - pedro.teixeira@gmail.com - mail@substack.net -script: npm run-script alltests - +script: npm test diff --git a/README.md b/README.md index 833d42f0..7baf8106 100644 --- a/README.md +++ b/README.md @@ -118,7 +118,6 @@ db.put('name', 'LevelUP', function (err) { * db.createReadStream() * db.createKeyStream() * db.createValueStream() - * db.createWriteStream() ### Special operations exposed by LevelDOWN @@ -127,6 +126,9 @@ db.put('name', 'LevelUP', function (err) { * leveldown.destroy() * leveldown.repair() +### Special Notes + * What happened to db.createWriteStream() + -------------------------------------------------------- @@ -189,7 +191,7 @@ var db = levelup(memdown) * `'compression'` *(boolean, default: `true`)*: If `true`, all *compressible* data will be run through the Snappy compression algorithm before being stored. Snappy is very fast and shouldn't gain much speed by disabling so leave this on unless you have good reason to turn it off. -* `'cacheSize'` *(number, default: `8 * 1024 * 1024`)*: The size (in bytes) of the in-memory [LRU](http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used) cache with frequently used uncompressed block contents. +* `'cacheSize'` *(number, default: `8 * 1024 * 1024`)*: The size (in bytes) of the in-memory [LRU](http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used) cache with frequently used uncompressed block contents. * `'keyEncoding'` and `'valueEncoding'` *(string, default: `'utf8'`)*: The encoding of the keys and values passed through Node.js' `Buffer` implementation (see [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end)).

'utf8' is the default encoding for both keys and values so you can simply pass in strings and expect strings from your get() operations. You can also pass Buffer objects as keys and/or values and conversion will be performed.

@@ -251,7 +253,7 @@ db.get('foo', function (err, value) { Encoding of the `key` object will adhere to the `'keyEncoding'` option provided to levelup(), although you can provide alternative encoding settings in the options for `get()` (it's recommended that you stay consistent in your encoding of keys and values in a single store). -LevelDB will by default fill the in-memory LRU Cache with data from a call to get. Disabling this is done by setting `fillCache` to `false`. +LevelDB will by default fill the in-memory LRU Cache with data from a call to get. Disabling this is done by setting `fillCache` to `false`. -------------------------------------------------------- @@ -449,104 +451,13 @@ db.createReadStream({ keys: false, values: true }) ``` -------------------------------------------------------- - -### db.createWriteStream([options]) - -A **WriteStream** can be obtained by calling the `createWriteStream()` method. The resulting stream is a complete Node.js-style [Writable Stream](http://nodejs.org/docs/latest/api/stream.html#stream_writable_stream) which accepts objects with `'key'` and `'value'` pairs on its `write()` method. - -The WriteStream will buffer writes and submit them as a `batch()` operations where writes occur *within the same tick*. - -```js -var ws = db.createWriteStream() - -ws.on('error', function (err) { - console.log('Oh my!', err) -}) -ws.on('close', function () { - console.log('Stream closed') -}) - -ws.write({ key: 'name', value: 'Yuri Irsenovich Kim' }) -ws.write({ key: 'dob', value: '16 February 1941' }) -ws.write({ key: 'spouse', value: 'Kim Young-sook' }) -ws.write({ key: 'occupation', value: 'Clown' }) -ws.end() -``` - -The standard `write()`, `end()`, `destroy()` and `destroySoon()` methods are implemented on the WriteStream. `'drain'`, `'error'`, `'close'` and `'pipe'` events are emitted. - -You can specify encodings both for the whole stream and individual entries: + +#### What happened to `db.createWriteStream`? -To set the encoding for the whole stream, provide an options object as the first parameter to `createWriteStream()` with `'keyEncoding'` and/or `'valueEncoding'`. - -To set the encoding for an individual entry: - -```js -writeStream.write({ - key : new Buffer([1, 2, 3]) - , value : { some: 'json' } - , keyEncoding : 'binary' - , valueEncoding : 'json' -}) -``` - -#### write({ type: 'put' }) - -If individual `write()` operations are performed with a `'type'` property of `'del'`, they will be passed on as `'del'` operations to the batch. - -```js -var ws = db.createWriteStream() - -ws.on('error', function (err) { - console.log('Oh my!', err) -}) -ws.on('close', function () { - console.log('Stream closed') -}) - -ws.write({ type: 'del', key: 'name' }) -ws.write({ type: 'del', key: 'dob' }) -ws.write({ type: 'put', key: 'spouse' }) -ws.write({ type: 'del', key: 'occupation' }) -ws.end() -``` - -#### db.createWriteStream({ type: 'del' }) - -If the *WriteStream* is created with a `'type'` option of `'del'`, all `write()` operations will be interpreted as `'del'`, unless explicitly specified as `'put'`. - -```js -var ws = db.createWriteStream({ type: 'del' }) - -ws.on('error', function (err) { - console.log('Oh my!', err) -}) -ws.on('close', function () { - console.log('Stream closed') -}) - -ws.write({ key: 'name' }) -ws.write({ key: 'dob' }) -// but it can be overridden -ws.write({ type: 'put', key: 'spouse', value: 'Ri Sol-ju' }) -ws.write({ key: 'occupation' }) -ws.end() -``` - -#### Pipes and Node Stream compatibility - -A ReadStream can be piped directly to a WriteStream, allowing for easy copying of an entire database. A simple `copy()` operation is included in LevelUP that performs exactly this on two open databases: - -```js -function copy (srcdb, dstdb, callback) { - srcdb.createReadStream().pipe(dstdb.createWriteStream()).on('close', callback) -} -``` - -The ReadStream is also [fstream](https://github.com/isaacs/fstream)-compatible which means you should be able to pipe to and from fstreams. So you can serialize and deserialize an entire database to a directory where keys are filenames and values are their contents, or even into a *tar* file using [node-tar](https://github.com/isaacs/node-tar). See the [fstream functional test](https://github.com/rvagg/node-levelup/blob/master/test/functional/fstream-test.js) for an example. *(Note: I'm not really sure there's a great use-case for this but it's a fun example and it helps to harden the stream implementations.)* - -KeyStreams and ValueStreams can be treated like standard streams of raw data. If `'keyEncoding'` or `'valueEncoding'` is set to `'binary'` the `'data'` events will simply be standard Node `Buffer` objects straight out of the data store. +Yes we have removed `db.createWriteStream` but not to worry, there is good reason for this. *Disclaimper*: if you are not in a stage where you are worried about performance but want a streaming interface +into your database, please checkout [`level-ws`][level-ws]. +TODO: talk about performance and multiple writeStream implementations -------------------------------------------------------- @@ -608,7 +519,7 @@ require('leveldown').destroy('./huge.db', function (err) { console.log('done!') > If a DB cannot be opened, you may attempt to call this method to resurrect as much of the contents of the database as possible. Some data may be lost, so be careful when calling this function on a database that contains important information. -You will find information on the *repair* operation in the *LOG* file inside the store directory. +You will find information on the *repair* operation in the *LOG* file inside the store directory. A `repair()` can also be used to perform a compaction of the LevelDB log into table files. @@ -730,3 +641,5 @@ LevelUP is licensed under the MIT license. All rights not explicitly granted in ======= *LevelUP builds on the excellent work of the LevelDB and Snappy teams from Google and additional contributors. LevelDB and Snappy are both issued under the [New BSD Licence](http://opensource.org/licenses/BSD-3-Clause).* + +[level-ws]: https://github.com/level/level-ws diff --git a/lib/levelup.js b/lib/levelup.js index 3f0a4ffa..d7dd2ff1 100644 --- a/lib/levelup.js +++ b/lib/levelup.js @@ -18,7 +18,6 @@ var EventEmitter = require('events').EventEmitter , InitializationError = require('./errors').InitializationError , ReadStream = require('./read-stream') - , WriteStream = require('./write-stream') , util = require('./util') , Batch = require('./batch') , codec = require('./codec') @@ -440,12 +439,6 @@ LevelUP.prototype.createValueStream = function (options) { return this.createReadStream(extend(options, { keys: false, values: true })) } -LevelUP.prototype.writeStream = -LevelUP.prototype.createWriteStream = function (options) { - //XXX is extend needed here? - return new WriteStream(extend(options), this) -} - LevelUP.prototype.toString = function () { return 'LevelUP' } @@ -457,7 +450,6 @@ function utilStatic (name) { } module.exports = LevelUP -module.exports.copy = util.copy // DEPRECATED: prefer accessing LevelDOWN for this: require('leveldown').destroy() module.exports.destroy = utilStatic('destroy') // DEPRECATED: prefer accessing LevelDOWN for this: require('leveldown').repair() diff --git a/lib/util.js b/lib/util.js index 99a62baa..2e74f89f 100644 --- a/lib/util.js +++ b/lib/util.js @@ -23,13 +23,6 @@ var extend = require('xtend') return eo }()) -function copy (srcdb, dstdb, callback) { - srcdb.readStream() - .pipe(dstdb.writeStream()) - .on('close', callback ? callback : function () {}) - .on('error', callback ? callback : function (err) { throw err }) -} - function getOptions (levelup, options) { var s = typeof options == 'string' // just an encoding if (!s && options && options.encoding && !options.valueEncoding) @@ -84,7 +77,6 @@ function isDefined (v) { module.exports = { defaultOptions : defaultOptions - , copy : copy , getOptions : getOptions , getLevelDOWN : getLevelDOWN , dispatchError : dispatchError diff --git a/lib/write-stream.js b/lib/write-stream.js deleted file mode 100644 index 6b50b988..00000000 --- a/lib/write-stream.js +++ /dev/null @@ -1,178 +0,0 @@ -/* Copyright (c) 2012-2014 LevelUP contributors - * See list at - * MIT License - * - */ - -var Stream = require('stream').Stream - , inherits = require('util').inherits - , extend = require('xtend') - , bl = require('bl') - - , setImmediate = global.setImmediate || process.nextTick - - , getOptions = require('./util').getOptions - - , defaultOptions = { type: 'put' } - -function WriteStream (options, db) { - if (!(this instanceof WriteStream)) - return new WriteStream(options, db) - - Stream.call(this) - this._options = extend(defaultOptions, getOptions(db, options)) - this._db = db - this._buffer = [] - this._status = 'init' - this._end = false - this.writable = true - this.readable = false - - var self = this - , ready = function () { - if (!self.writable) - return - self._status = 'ready' - self.emit('ready') - self._process() - } - - if (db.isOpen()) - setImmediate(ready) - else - db.once('ready', ready) -} - -inherits(WriteStream, Stream) - -WriteStream.prototype.write = function (data) { - if (!this.writable) - return false - this._buffer.push(data) - if (this._status != 'init') - this._processDelayed() - if (this._options.maxBufferLength && - this._buffer.length > this._options.maxBufferLength) { - this._writeBlock = true - return false - } - return true -} - -WriteStream.prototype.end = function (data) { - var self = this - if (data) - this.write(data) - setImmediate(function () { - self._end = true - self._process() - }) -} - -WriteStream.prototype.destroy = function () { - this.writable = false - this.end() -} - -WriteStream.prototype.destroySoon = function () { - this.end() -} - -WriteStream.prototype.add = function (entry) { - if (!entry.props) - return - if (entry.props.Directory) - entry.pipe(this._db.writeStream(this._options)) - else if (entry.props.File || entry.File || entry.type == 'File') - this._write(entry) - return true -} - -WriteStream.prototype._processDelayed = function () { - var self = this - setImmediate(function () { - self._process() - }) -} - -WriteStream.prototype._process = function () { - var buffer - , self = this - - , cb = function (err) { - if (!self.writable) - return - if (self._status != 'closed') - self._status = 'ready' - if (err) { - self.writable = false - return self.emit('error', err) - } - self._process() - } - - if (self._status != 'ready' && self.writable) { - if (self._buffer.length && self._status != 'closed') - self._processDelayed() - return - } - - if (self._buffer.length && self.writable) { - self._status = 'writing' - buffer = self._buffer - self._buffer = [] - - self._db.batch(buffer.map(function (d) { - return { - type : d.type || self._options.type - , key : d.key - , value : d.value - , keyEncoding : d.keyEncoding || self._options.keyEncoding - , valueEncoding : d.valueEncoding - || d.encoding - || self._options.valueEncoding - } - }), cb) - - if (self._writeBlock) { - self._writeBlock = false - self.emit('drain') - } - - // don't allow close until callback has returned - return - } - - if (self._end && self._status != 'closed') { - self._status = 'closed' - self.writable = false - self.emit('close') - } -} - -WriteStream.prototype._write = function (entry) { - var key = entry.path || entry.props.path - , self = this - - if (!key) - return - - entry.pipe(bl(function (err, data) { - if (err) { - self.writable = false - return self.emit('error', err) - } - - if (self._options.fstreamRoot && - key.indexOf(self._options.fstreamRoot) > -1) - key = key.substr(self._options.fstreamRoot.length + 1) - - self.write({ key: key, value: data.slice(0) }) - })) -} - -WriteStream.prototype.toString = function () { - return 'LevelUP.WriteStream' -} - -module.exports = WriteStream diff --git a/package.json b/package.json index db35b3bf..a653c0e3 100644 --- a/package.json +++ b/package.json @@ -65,9 +65,7 @@ "semver": false }, "scripts": { - "test": "tap test/*-test.js --stderr", - "functionaltests": "node ./test/functional/fstream-test.js && node ./test/functional/binary-data-test.js && node ./test/functional/compat-test.js", - "alltests": "npm test && npm run-script functionaltests" + "test": "tap test/*-test.js --stderr" }, "license": "MIT" } diff --git a/test/copy-test.js b/test/copy-test.js deleted file mode 100644 index c87e60d0..00000000 --- a/test/copy-test.js +++ /dev/null @@ -1,68 +0,0 @@ -/* Copyright (c) 2012-2014 LevelUP contributors - * See list at - * MIT License - */ - -var levelup = require('../lib/levelup.js') - , async = require('async') - , common = require('./common') - - , assert = require('referee').assert - , refute = require('referee').refute - , buster = require('bustermove') - -buster.testCase('Copy', { - 'setUp': common.commonSetUp - , 'tearDown': common.commonTearDown - - , 'copy full database': function (done) { - var sourceData = [] - - for (var i = 0; i < 100; i++) { - sourceData.push({ - type : 'put' - , key : i - , value : Math.random() - }) - } - - var opensrc = function (callback) { - this.openTestDatabase(function (db) { - db.batch(sourceData.slice(), function (err) { - callback(err, db) - }) - }) - }.bind(this) - - , opendst = function (callback) { - this.openTestDatabase(function (db) { - callback(null, db) - }) - }.bind(this) - - , verify = function (dstdb) { - async.forEach( - sourceData - , function (data, callback) { - dstdb.get(data.key, function (err, value) { - refute(err) - assert.equals(+value.toString(), data.value, 'Destination data #' + data.key + ' has correct value') - callback() - }) - } - , done - ) - }.bind(this) - - async.parallel( - { src: opensrc, dst: opendst } - , function (err, dbs) { - refute(err) - levelup.copy(dbs.src, dbs.dst, function (err) { - refute(err) - verify(dbs.dst) - }) - } - ) - } -}) diff --git a/test/encoding-test.js b/test/encoding-test.js index 601c8131..b28150d2 100644 --- a/test/encoding-test.js +++ b/test/encoding-test.js @@ -78,51 +78,6 @@ buster.testCase('Encoding', { }) }) } - , 'test write-stream encoding': function (done) { - this.openTestDatabase({ encoding: 'json' }, function (db) { - var ws = db.createWriteStream({ - keyEncoding : 'utf8', - valueEncoding : 'binary' - }) - ws.on('close', function () { - db.get('foo', { - keyEncoding : 'utf8', - valueEncoding : 'binary' - }, function (err, val) { - refute(err) - assert.equals(val.toString(), '\u0001\u0002\u0003') - db.close(done) - }) - }) - ws.write({ key : 'foo', value : new Buffer([1, 2, 3]) }) - ws.end() - }) - } - , 'test write-stream chunk encoding': function (done) { - this.openTestDatabase({ encoding: 'json' }, function (db) { - var ws = db.createWriteStream({ - keyEncoding : 'utf8', - valueEncoding : 'binary' - }) - ws.on('close', function () { - db.get(new Buffer([1, 2, 3]), { - keyEncoding : 'binary', - valueEncoding : 'json' - }, function (err, val) { - refute(err) - assert.equals(val.some, 'json') - db.close(done) - }) - }) - ws.write({ - key : new Buffer([1, 2, 3]), - value : { some : 'json' }, - keyEncoding : 'binary', - valueEncoding : 'json' - }) - ws.end() - }) - } , 'test batch op encoding': function (done) { this.openTestDatabase({ encoding: 'json' }, function (db) { db.batch([ diff --git a/test/functional/binary-data-test.js b/test/functional/binary-data-test.js deleted file mode 100644 index ae5b9fb5..00000000 --- a/test/functional/binary-data-test.js +++ /dev/null @@ -1,43 +0,0 @@ -/* Copyright (c) 2012-2014 LevelUP contributors - * See list at - * MIT License - */ - -/* - * This test unpacks a tar file, pushes that data into a - * database then compares the database data with the files - * on the filesystem. - * The different types of data are useful for testing, particularly - * the binary files. - */ - -var async = require('async') - , rimraf = require('rimraf') - , tarcommon = require('./tarcommon') - -console.log('***************************************************') -console.log('RUNNING BINARY-DATA-TEST...') - -async.series([ - // pre-clean - rimraf.bind(null, tarcommon.dblocation) - , rimraf.bind(null, tarcommon.datadir) - // extract data for comparison - , tarcommon.extract.bind(null, tarcommon.datatar, tarcommon.datadir) - // open database - , tarcommon.opendb.bind(null, tarcommon.dblocation) - // push the data into a database - , tarcommon.fstreamWrite - // run a sync put & del to force an fs sync - , tarcommon.sync - // verify database entries are the same as the files - , tarcommon.verify - // clean up - , rimraf.bind(null, tarcommon.dblocation) - , rimraf.bind(null, tarcommon.datadir) -], function (err) { - if (err) console.error('Error', err) - else console.log('No errors? All good then!') - console.log('***************************************************') - process.exit(err ? -1 : 0) -}) diff --git a/test/functional/compat-test.js b/test/functional/compat-test.js deleted file mode 100644 index 672b8c16..00000000 --- a/test/functional/compat-test.js +++ /dev/null @@ -1,50 +0,0 @@ -/* Copyright (c) 2012-2014 LevelUP contributors - * See list at - * MIT License - */ - -/* - * This test verifies that an existing database contains the - * correct data, by comparing it to the original data contained - * in a tar file. - * Useful for comparing across LevelDB versions. - */ - -var async = require('async') - , rimraf = require('rimraf') - , path = require('path') - , tarcommon = require('./tarcommon') - - , dbtar = path.join(__dirname, 'test-data.db.tar') - , dblocation = path.join(__dirname, 'levelup_test_compat.db') - -function runTest (dbtar, callback) { - async.series([ - // pre-clean - rimraf.bind(null, tarcommon.dblocation) - , rimraf.bind(null, dblocation) - , rimraf.bind(null, tarcommon.datadir) - // extract existing database - , tarcommon.extract.bind(null, dbtar, __dirname) - // extract data for comparison - , tarcommon.extract.bind(null, tarcommon.datatar, tarcommon.datadir) - // open database - , tarcommon.opendb.bind(null, dblocation) - // verify database entries are the same as the files - , tarcommon.verify - // clean up - , rimraf.bind(null, tarcommon.dblocation) - , rimraf.bind(null, dblocation) - , rimraf.bind(null, tarcommon.datadir) - ], callback) -} - -console.log('***************************************************') -console.log('RUNNING COMPAT-DATA-TEST...') - -runTest(dbtar, function (err) { - if (err) throw err - console.log('No errors? All good then!') - console.log('***************************************************') - process.exit(err ? -1 : 0) -}) diff --git a/test/functional/fstream-test.js b/test/functional/fstream-test.js deleted file mode 100644 index e1b56c21..00000000 --- a/test/functional/fstream-test.js +++ /dev/null @@ -1,102 +0,0 @@ -/* Copyright (c) 2012-2014 LevelUP contributors - * See list at - * MIT License - */ - -var assert = require('referee').assert - , refute = require('referee').refute - , fstream = require('fstream') - , async = require('async') - , mkfiletree = require('mkfiletree') - , readfiletree = require('readfiletree') - , rimraf = require('rimraf') - , bogan = require('boganipsum') - , levelup = require('../../lib/levelup') - - , fixtureFiles = { - 'foo': 'FOO!\n' - , 'a directory': { - 'bogantastic.txt': bogan() - , 'subdir': { - 'boganmeup.dat': bogan() - , 'sub sub dir': { - 'bar': 'BAR!\n' - , 'maaaaaaaate': bogan() - } - , 'bang': 'POW' - } - , 'boo': 'W00t' - } - } - , dblocation = 'levelup_test_fstream.db' - - , opendb = function (dir, callback) { - levelup(dblocation, { createIfMissing: true , errorIfExists: false }, function (err, db) { - refute(err) - callback(null, dir, db) - }) - } - - , fstreamWrite = function (dir, db, callback) { - fstream.Reader(dir) - .pipe(db.writeStream({ fstreamRoot: dir }) - .on('close', function () { - db.close(function (err) { - refute(err) - callback(null, dir) - }) - })) - } - - , fstreamRead = function (dir, db, callback) { - db.readStream({ type: 'fstream' }) - .pipe(new fstream.Writer({ path: dir + '.out', type: 'Directory' }) - .on('close', function () { - db.close(function (err) { - refute(err) - callback(null, dir) - }) - }) - ) - } - - , verify = function (dir, obj, callback) { - assert.equals(obj, fixtureFiles) - console.log('Guess what?? It worked!!') - callback(null, dir) - } - - , cleanUp = function (dir, callback) { - async.parallel([ - rimraf.bind(null, dir + '.out') - , rimraf.bind(null, dblocation) - , mkfiletree.cleanUp - ], callback) - } - -process.on('uncaughtException', function (err) { - refute(err) -}) - -console.log('***************************************************') -console.log('RUNNING FSTREAM-TEST...') - -async.waterfall([ - rimraf.bind(null, dblocation) - , mkfiletree.makeTemp.bind(null, 'levelup_test_fstream', fixtureFiles) - , opendb - , fstreamWrite - , opendb - , fstreamRead - , function (dir, callback) { - readfiletree(dir, function (err, obj) { - refute(err) - callback(err, dir, obj) - }) - } - , verify - , cleanUp - , function () { - console.log('***************************************************') - } -]) diff --git a/test/functional/tarcommon.js b/test/functional/tarcommon.js deleted file mode 100644 index a9b42731..00000000 --- a/test/functional/tarcommon.js +++ /dev/null @@ -1,97 +0,0 @@ -/* Copyright (c) 2012-2014 LevelUP contributors - * See list at - * MIT License - */ - -var assert = require('referee').assert - , fs = require('fs') - , path = require('path') - , fstream = require('fstream') - , tar = require('tar') - , crypto = require('crypto') - , levelup = require('../../lib/levelup') - - , dblocation = path.join(__dirname, 'levelup_test_binary.db') - , datatar = path.join(__dirname, 'test-data.tar') - , datadir = path.join(__dirname, 'test-data') - , db - , expectedEntries - -module.exports.dblocation = dblocation -module.exports.datatar = datatar -module.exports.datadir = datadir - -module.exports.opendb = function (dblocation, callback) { - levelup( - dblocation - , { createIfMissing: true , errorIfExists: false, keyEncoding: 'utf8', valueEncoding: 'binary' } - , function (err, _db) { - db = _db - console.log('Opened database...') - callback(err) - } - ) -} - -module.exports.extract = function (tarfile, dir, callback) { - expectedEntries = 0 - fs.createReadStream(tarfile) - .pipe(tar.Extract({ path: dir })) - .on('entry', function (entry) { - if (entry.props.File || entry.File || entry.type == 'File') - expectedEntries++ - }) - .on('end', function () { - console.log('Extracted tar file...') - callback() - }) -} - -module.exports.fstreamWrite = function (callback) { - fstream.Reader(datadir) - .pipe(db.writeStream({ fstreamRoot: path.resolve(__dirname) }) - .on('close', function () { - console.log('Piped data to database...') - callback() - })) - .on('error', callback) -} - -// using sync:true will force a flush to the fs, otherwise the readStream() is too -// quick and won't get the full data -module.exports.sync = function (callback) { - db.put('__', '__', { sync: true }, function (err) { - if (err) return callback(err) - db.del('__', { sync: true }, callback) - }) -} - -module.exports.verify = function (callback) { - var entries = 0 - db.readStream() - .on('data', function (data) { - var md5sum = crypto.createHash('md5') - , dbmd5sum - - md5sum.update(data.value) - dbmd5sum = md5sum.digest('hex') - md5sum = crypto.createHash('md5') - entries++ - fs.createReadStream(path.join(__dirname, data.key)) - .on('data', function (d) { md5sum.update(d) }) - .on('end', function () { - var fsmd5sum = md5sum.digest('hex') - assert.equals( - dbmd5sum - , fsmd5sum - , 'MD5 sum compare of ' + data.key + ' failed (' + dbmd5sum + ' != ' + fsmd5sum + ')' - ) - }) - }) - .on('end', function () { - assert.equals(entries, expectedEntries, 'correct number of entries in the database') - console.log('Finished comparing database entries...') - console.log('Cleaning up...') - callback() - }) -} diff --git a/test/functional/test-data.db.tar b/test/functional/test-data.db.tar deleted file mode 100644 index 062f46d9..00000000 Binary files a/test/functional/test-data.db.tar and /dev/null differ diff --git a/test/functional/test-data.tar b/test/functional/test-data.tar deleted file mode 100644 index 2a69b32c..00000000 Binary files a/test/functional/test-data.tar and /dev/null differ diff --git a/test/write-stream-test.js b/test/write-stream-test.js deleted file mode 100644 index a36e86be..00000000 --- a/test/write-stream-test.js +++ /dev/null @@ -1,467 +0,0 @@ -/* Copyright (c) 2012-2014 LevelUP contributors - * See list at - * MIT License - */ - -var async = require('async') - , common = require('./common') - - , assert = require('referee').assert - , refute = require('referee').refute - , buster = require('bustermove') - -buster.testCase('WriteStream', { - 'setUp': function (done) { - common.commonSetUp.call(this, function () { - this.timeout = 1000 - - this.sourceData = [] - - for (var i = 0; i < 10; i++) { - this.sourceData.push({ - type : 'put' - , key : i - , value : Math.random() - }) - } - - this.verify = function (ws, db, done, data) { - if (!data) data = this.sourceData // can pass alternative data array for verification - async.forEach( - data - , function (data, callback) { - db.get(data.key, function (err, value) { - refute(err) - assert.equals(+value, +data.value, 'WriteStream data #' + data.key + ' has correct value') - callback() - }) - } - , done - ) - } - - done() - }.bind(this)) - } - - , 'tearDown': common.commonTearDown - - //TODO: test various encodings - - , 'test simple WriteStream': function (done) { - this.openTestDatabase(function (db) { - var ws = db.createWriteStream() - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', this.verify.bind(this, ws, db, done)) - this.sourceData.forEach(function (d) { - ws.write(d) - }) - ws.once('ready', ws.end) // end after it's ready, nextTick makes this work OK - }.bind(this)) - } - - , 'test WriteStream with async writes': function (done) { - this.openTestDatabase(function (db) { - var ws = db.createWriteStream() - - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', this.verify.bind(this, ws, db, done)) - async.forEachSeries( - this.sourceData - , function (d, callback) { - // some should batch() and some should put() - if (d.key % 3) { - setTimeout(function () { - ws.write(d) - callback() - }, 10) - } else { - ws.write(d) - callback() - } - } - , function () { - ws.end() - } - ) - }.bind(this)) - } - - /* - // exactly the same as previous but should avoid batch() writes - , 'test WriteStream with async writes and useBatch=false': function (done) { - this.openTestDatabase(function (db) { - db.batch = function () { - Array.prototype.slice.call(arguments).forEach(function (a) { - if (typeof a == 'function') a('Should not call batch()') - }) - } - - var ws = db.createWriteStream({ useBatch: false }) - - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', this.verify.bind(this, ws, db, done)) - async.forEachSeries( - this.sourceData - , function (d, callback) { - if (d.key % 3) { - setTimeout(function () { - ws.write(d) - callback() - }, 10) - } else { - ws.write(d) - callback() - } - } - , function () { - ws.end() - } - ) - }.bind(this)) - } - */ - - , 'test end accepts data': function (done) { - this.openTestDatabase(function (db) { - var ws = db.createWriteStream() - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', this.verify.bind(this, ws, db, done)) - var i = 0 - this.sourceData.forEach(function (d) { - i ++ - if (i < this.sourceData.length) { - ws.write(d) - } else { - ws.end(d) - } - }.bind(this)) - }.bind(this)) - } - - // at the moment, destroySoon() is basically just end() - , 'test destroySoon()': function (done) { - this.openTestDatabase(function (db) { - var ws = db.createWriteStream() - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', this.verify.bind(this, ws, db, done)) - this.sourceData.forEach(function (d) { - ws.write(d) - }) - ws.once('ready', ws.destroySoon) // end after it's ready, nextTick makes this work OK - }.bind(this)) - } - - , 'test destroy()': function (done) { - var verify = function (ws, db) { - async.forEach( - this.sourceData - , function (data, callback) { - db.get(data.key, function (err, value) { - // none of them should exist - assert(err) - refute(value) - callback() - }) - } - , done - ) - } - - this.openTestDatabase(function (db) { - var ws = db.createWriteStream() - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', verify.bind(this, ws, db)) - this.sourceData.forEach(function (d) { - ws.write(d) - }) - ws.once('ready', ws.destroy) - }.bind(this)) - } - - , 'test json encoding': function (done) { - var options = { createIfMissing: true, errorIfExists: true, keyEncoding: 'utf8', valueEncoding: 'json' } - , data = [ - { type: 'put', key: 'aa', value: { a: 'complex', obj: 100 } } - , { type: 'put', key: 'ab', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { type: 'put', key: 'ac', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - , { type: 'put', key: 'ba', value: { a: 'complex', obj: 100 } } - , { type: 'put', key: 'bb', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { type: 'put', key: 'bc', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - , { type: 'put', key: 'ca', value: { a: 'complex', obj: 100 } } - , { type: 'put', key: 'cb', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { type: 'put', key: 'cc', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - ] - - this.openTestDatabase(options, function (db) { - var ws = db.createWriteStream() - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', this.verify.bind(this, ws, db, done, data)) - data.forEach(function (d) { - ws.write(d) - }) - ws.once('ready', ws.end) // end after it's ready, nextTick makes this work OK - }.bind(this)) - } - - , 'test del capabilities for each key/value': function (done) { - - var options = { createIfMissing: true, errorIfExists: true, keyEncoding: 'utf8', valueEncoding: 'json' } - , data = [ - { type: 'put', key: 'aa', value: { a: 'complex', obj: 100 } } - , { type: 'put', key: 'ab', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { type: 'put', key: 'ac', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - , { type: 'put', key: 'ba', value: { a: 'complex', obj: 100 } } - , { type: 'put', key: 'bb', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { type: 'put', key: 'bc', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - , { type: 'put', key: 'ca', value: { a: 'complex', obj: 100 } } - , { type: 'put', key: 'cb', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { type: 'put', key: 'cc', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - ] - - async.waterfall([ - function (cb) { - this.openTestDatabase(options, function (db) { - cb(null, db); - }); - }.bind(this), - function (db, cb) { - var ws = db.createWriteStream() - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', function () { - cb(null, db); - }) - data.forEach(function (d) { - ws.write(d) - }) - - // end after it's ready, nextTick makes this work OK - ws.once('ready', ws.end) - }, - function (db, cb) { - var delStream = db.createWriteStream() - delStream.on('error', function (err) { - refute(err) - }) - delStream.on('close', function () { - cb(null, db); - }) - data.forEach(function (d) { - d.type = 'del' - delStream.write(d) - }) - - // end after it's ready, nextTick makes this work OK - delStream.once('ready', delStream.end) - }, - function (db, cb) { - async.forEach( - data - , function (data, callback) { - db.get(data.key, function (err, value) { - // none of them should exist - assert(err) - refute(value) - callback() - }) - } - , cb - ) - } - ], done) - } - - , 'test del capabilities as constructor option': function (done) { - - var options = { createIfMissing: true, errorIfExists: true, keyEncoding: 'utf8', valueEncoding: 'json' } - , data = [ - { key: 'aa', value: { a: 'complex', obj: 100 } } - , { key: 'ab', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { key: 'ac', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - , { key: 'ba', value: { a: 'complex', obj: 100 } } - , { key: 'bb', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { key: 'bc', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - , { key: 'ca', value: { a: 'complex', obj: 100 } } - , { key: 'cb', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { key: 'cc', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - ] - - async.waterfall([ - function (cb) { - this.openTestDatabase(options, function (db) { - cb(null, db); - }); - }.bind(this), - function (db, cb) { - var ws = db.createWriteStream() - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', function () { - cb(null, db); - }) - data.forEach(function (d) { - ws.write(d) - }) - - // end after it's ready, nextTick makes this work OK - ws.once('ready', ws.end) - }, - function (db, cb) { - var delStream = db.createWriteStream({ type: 'del' }) - delStream.on('error', function (err) { - refute(err) - }) - delStream.on('close', function () { - cb(null, db); - }) - data.forEach(function (d) { - delStream.write(d) - }) - - // end after it's ready, nextTick makes this work OK - delStream.once('ready', delStream.end) - }, - function (db, cb) { - async.forEach( - data - , function (data, callback) { - db.get(data.key, function (err, value) { - // none of them should exist - assert(err) - refute(value) - callback() - }) - } - , cb - ) - } - ], done) - } - - , 'test type at key/value level must take precedence on the constructor': function (done) { - - var options = { createIfMissing: true, errorIfExists: true, keyEncoding: 'utf8', valueEncoding: 'json' } - , data = [ - { key: 'aa', value: { a: 'complex', obj: 100 } } - , { key: 'ab', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { key: 'ac', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - , { key: 'ba', value: { a: 'complex', obj: 100 } } - , { key: 'bb', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { key: 'bc', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - , { key: 'ca', value: { a: 'complex', obj: 100 } } - , { key: 'cb', value: { b: 'foo', bar: [ 1, 2, 3 ] } } - , { key: 'cc', value: { c: 'w00t', d: { e: [ 0, 10, 20, 30 ], f: 1, g: 'wow' } } } - ] - , exception = data[0] - - exception['type'] = 'put' - - async.waterfall([ - function (cb) { - this.openTestDatabase(options, function (db) { - cb(null, db); - }); - }.bind(this), - function (db, cb) { - var ws = db.createWriteStream() - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', function () { - cb(null, db); - }) - data.forEach(function (d) { - ws.write(d) - }) - - // end after it's ready, nextTick makes this work OK - ws.once('ready', ws.end) - }, - function (db, cb) { - var delStream = db.createWriteStream({ type: 'del' }) - delStream.on('error', function (err) { - refute(err) - }) - delStream.on('close', function () { - cb(null, db); - }) - data.forEach(function (d) { - delStream.write(d) - }) - - // end after it's ready, nextTick makes this work OK - delStream.once('ready', delStream.end) - }, - function (db, cb) { - async.forEach( - data - , function (data, callback) { - db.get(data.key, function (err, value) { - if (data.type === 'put') { - assert(value) - callback() - } else { - assert(err) - refute(value) - callback() - } - }) - } - , cb - ) - } - ], done) - } - - , 'test ignoring pairs with the wrong type': function (done) { - - async.waterfall([ - function (cb) { - this.openTestDatabase(cb.bind(null, null)) - }.bind(this), - function (db, cb) { - var ws = db.createWriteStream() - ws.on('error', function (err) { - refute(err) - }) - ws.on('close', cb.bind(null, db)) - this.sourceData.forEach(function (d) { - d.type = "x" + Math.random() - ws.write(d) - }) - ws.once('ready', ws.end) // end after it's ready, nextTick makes this work OK - }.bind(this), - function (db, cb) { - async.forEach( - this.sourceData - , function (data, callback) { - db.get(data.key, function (err, value) { - assert(err) - refute(value) - callback() - }) - } - , cb - ) - - }.bind(this) - ], done) - } -})