diff --git a/example.js b/example.js index 8239c353..9f26bdf8 100644 --- a/example.js +++ b/example.js @@ -3,16 +3,6 @@ const Repo = require('ipfs-repo') const repo = new Repo('/Users/awesome/.jsipfs') -repo.init({ my: 'config' }, (err) => { - if (err) { - throw err - } - - repo.open((err) => { - if (err) { - throw err - } - - console.log('repo is ready') - }) -}) +repo.init({ my: 'config' }) + .then(repo.open) + .then(() => console.log('repo is ready')) diff --git a/package.json b/package.json index 62fe79a4..e612e3c7 100644 --- a/package.json +++ b/package.json @@ -49,15 +49,14 @@ "rimraf": "^2.6.2" }, "dependencies": { - "async": "^2.6.1", "base32.js": "~0.1.0", "bignumber.js": "^8.0.2", "cids": "~0.5.7", "datastore-core": "~0.6.0", "datastore-fs": "~0.7.0", - "datastore-level": "~0.10.0", + "datastore-level": "git://github.com/ipfs/js-datastore-level.git#refactor/async-iterators", "debug": "^4.1.0", - "interface-datastore": "~0.6.0", + "interface-datastore": "git://github.com/ipfs/interface-datastore.git#refactor/async-iterators", "ipfs-block": "~0.8.0", "lodash.get": "^4.4.2", "lodash.has": "^4.5.2", diff --git a/src/api-addr.js b/src/api-addr.js index f5ba9b58..8b85a0d6 100644 --- a/src/api-addr.js +++ b/src/api-addr.js @@ -10,30 +10,28 @@ module.exports = (store) => { /** * Get the current configuration from the repo. * - * @param {function(Error, Object)} callback - * @returns {void} + * @returns {Promise} */ - get (callback) { - store.get(apiFile, (err, value) => callback(err, value && value.toString())) + async get () { + const value = await store.get(apiFile) + return value && value.toString() }, /** * Set the current configuration for this repo. * * @param {Object} value - the api address to be written - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - set (value, callback) { - store.put(apiFile, Buffer.from(value.toString()), callback) + async set (value) { + return store.put(apiFile, Buffer.from(value.toString())) }, /** * Deletes api file * - * @param {function(Error, bool)} callback - * @returns {void} + * @returns {Promise} */ - delete (callback) { - store.delete(apiFile, callback) + async delete () { + return store.delete(apiFile) } } } diff --git a/src/blockstore.js b/src/blockstore.js index e38c386c..689b2332 100644 --- a/src/blockstore.js +++ b/src/blockstore.js @@ -5,10 +5,7 @@ const ShardingStore = core.ShardingDatastore const Key = require('interface-datastore').Key const base32 = require('base32.js') const Block = require('ipfs-block') -const setImmediate = require('async/setImmediate') -const reject = require('async/reject') const CID = require('cids') -const pull = require('pull-stream') /** * Transform a raw buffer to a base32 encoded key. @@ -31,21 +28,17 @@ const cidToDsKey = (cid) => { return keyFromBuffer(cid.buffer) } -module.exports = (filestore, options, callback) => { - maybeWithSharding(filestore, options, (err, store) => { - if (err) { return callback(err) } - - callback(null, createBaseStore(store)) - }) +module.exports = async (filestore, options) => { + const store = await maybeWithSharding(filestore, options) + return createBaseStore(store) } -function maybeWithSharding (filestore, options, callback) { +function maybeWithSharding (filestore, options) { if (options.sharding) { const shard = new core.shard.NextToLast(2) - ShardingStore.createOrOpen(filestore, shard, callback) - } else { - setImmediate(() => callback(null, filestore)) + return ShardingStore.createOrOpen(filestore, shard) } + return filestore } function createBaseStore (store) { @@ -54,142 +47,114 @@ function createBaseStore (store) { * Query the store. * * @param {object} query - * @param {function(Error, Array)} callback - * @return {void} + * @return {Iterable} */ - query (query, callback) { - pull( - store.query(query), - pull.collect(callback) - ) + query (query) { + return store.query(query) }, /** * Get a single block by CID. * * @param {CID} cid - * @param {function(Error, Block)} callback - * @returns {void} + * @returns {Promise} */ - get (cid, callback) { + async get (cid) { if (!CID.isCID(cid)) { - return setImmediate(() => { - callback(new Error('Not a valid cid')) - }) + throw new Error('Not a valid cid') } - const key = cidToDsKey(cid) - store.get(key, (err, blockData) => { - if (err) { - // If not found, we try with the other CID version. - // If exists, then store that block under the CID that was requested. - // Some duplication occurs. - if (err.code === 'ERR_NOT_FOUND') { - const otherCid = cidToOtherVersion(cid) - if (!otherCid) return callback(err) - - const otherKey = cidToDsKey(otherCid) - return store.get(otherKey, (err, blockData) => { - if (err) return callback(err) - - store.put(key, blockData, (err) => { - if (err) return callback(err) - callback(null, new Block(blockData, cid)) - }) - }) - } - - return callback(err) + let blockData + try { + blockData = await store.get(key) + return new Block(blockData, cid) + } catch (err) { + if (err.code === 'ERR_NOT_FOUND') { + const otherCid = cidToOtherVersion(cid) + if (!otherCid) throw err + + const otherKey = cidToDsKey(otherCid) + const blockData = await store.get(otherKey) + await store.put(key, blockData) + return new Block(blockData, cid) } - - callback(null, new Block(blockData, cid)) - }) + } }, - put (block, callback) { + /** + * Write a single block to the store. + * + * @param {Block} block + * @returns {Promise} + */ + async put (block) { if (!Block.isBlock(block)) { - return setImmediate(() => { - callback(new Error('invalid block')) - }) + throw new Error('invalid block') } const k = cidToDsKey(block.cid) - - store.has(k, (err, exists) => { - if (err) { return callback(err) } - if (exists) { return callback() } - - store.put(k, block.data, callback) - }) + const exists = await store.has(k) + if (exists) return + return store.put(k, block.data) }, + /** * Like put, but for more. * * @param {Array} blocks - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - putMany (blocks, callback) { + async putMany (blocks) { const keys = blocks.map((b) => ({ key: cidToDsKey(b.cid), block: b })) const batch = store.batch() - reject(keys, (k, cb) => store.has(k.key, cb), (err, newKeys) => { - if (err) { - return callback(err) - } + const newKeys = (await Promise.all(keys.map(async k => { + const exists = await store.has(k.key) + return exists ? null : k + }))).filter(Boolean) - newKeys.forEach((k) => { - batch.put(k.key, k.block.data) - }) - - batch.commit(callback) + newKeys.forEach((k) => { + batch.put(k.key, k.block.data) }) + return batch.commit() }, /** * Does the store contain block with this cid? * * @param {CID} cid - * @param {function(Error, bool)} callback - * @returns {void} + * @returns {Promise} */ - has (cid, callback) { + async has (cid) { if (!CID.isCID(cid)) { - return setImmediate(() => { - callback(new Error('Not a valid cid')) - }) + throw new Error('Not a valid cid') } - store.has(cidToDsKey(cid), (err, exists) => { - if (err) return callback(err) - if (exists) return callback(null, true) - - // If not found, we try with the other CID version. - const otherCid = cidToOtherVersion(cid) - if (!otherCid) return callback(null, false) - - store.has(cidToDsKey(otherCid), callback) - }) + const exists = await store.has(cidToDsKey(cid)) + if (exists) return exists + const otherCid = cidToOtherVersion(cid) + if (!otherCid) return false + return store.has(cidToDsKey(otherCid)) }, /** * Delete a block from the store * * @param {CID} cid - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - delete (cid, callback) { + async delete (cid) { if (!CID.isCID(cid)) { - return setImmediate(() => { - callback(new Error('Not a valid cid')) - }) + throw new Error('Not a valid cid') } - - store.delete(cidToDsKey(cid), callback) + return store.delete(cidToDsKey(cid)) }, - - close (callback) { - store.close(callback) + /** + * Close the store + * + * @returns {Promise} + */ + async close () { + return store.close() } } } diff --git a/src/config.js b/src/config.js index 15f00ec3..a6ad51f8 100644 --- a/src/config.js +++ b/src/config.js @@ -2,7 +2,6 @@ const Key = require('interface-datastore').Key const queue = require('async/queue') -const waterfall = require('async/waterfall') const _get = require('lodash.get') const _set = require('lodash.set') const _has = require('lodash.has') @@ -12,97 +11,75 @@ const configKey = new Key('config') module.exports = (store) => { const setQueue = queue(_doSet, 1) + setQueue.error = (err) => { throw err } const configStore = { /** * Get the current configuration from the repo. * * @param {String} key - the config key to get - * @param {function(Error, Object)} callback - * @returns {void} + * @returns {Promise} */ - get (key, callback) { - if (typeof key === 'function') { - callback = key - key = undefined - } + async get (key) { if (!key) { key = undefined } - store.get(configKey, (err, encodedValue) => { - if (err) { return callback(err) } - - let config - try { - config = JSON.parse(encodedValue.toString()) - } catch (err) { - return callback(err) - } - if (key !== undefined && !_has(config, key)) { - return callback(new Error('Key ' + key + ' does not exist in config')) - } - const value = key !== undefined ? _get(config, key) : config - callback(null, value) - }) + return store.get(configKey) + .then((encodedValue) => { + const config = JSON.parse(encodedValue.toString()) + if (key !== undefined && !_has(config, key)) { + throw new Error(`Key ${key} does not exist in config`) + } + const value = key !== undefined ? _get(config, key) : config + return value + }) }, /** * Set the current configuration for this repo. * * @param {String} key - the config key to be written * @param {Object} value - the config value to be written - * @param {function(Error)} callback * @returns {void} */ - set (key, value, callback) { - if (typeof value === 'function') { - callback = value - value = key - key = undefined - } else if (!key || typeof key !== 'string') { - return callback(new Error('Invalid key type')) + set (key, value) { + if (!key || typeof key !== 'string') { + throw new Error('Invalid key type') } if (value === undefined || Buffer.isBuffer(value)) { - return callback(new Error('Invalid value type')) + throw new Error('Invalid value type') } setQueue.push({ key: key, value: value - }, callback) + }) }, /** * Check if a config file exists. * - * @param {function(Error, bool)} callback - * @returns {void} + * @returns {Promise} */ - exists (callback) { - store.has(configKey, callback) + async exists () { + return store.has(configKey) } } return configStore - function _doSet (m, callback) { + async function _doSet (m) { const key = m.key const value = m.value if (key) { - waterfall( - [ - (cb) => configStore.get(cb), - (config, cb) => cb(null, _set(config, key, value)), - _saveAll - ], - callback) - } else { - _saveAll(value, callback) + const config = await configStore.get() + _set(config, key, value) + await _saveAll(config) } } - function _saveAll (config, callback) { + function _saveAll (config) { const buf = Buffer.from(JSON.stringify(config, null, 2)) - store.put(configKey, buf, callback) + return store.put(configKey, buf) } } diff --git a/src/errors/index.js b/src/errors/index.js index 3e0f3647..68d3f7d6 100644 --- a/src/errors/index.js +++ b/src/errors/index.js @@ -1,3 +1,12 @@ 'use strict' +class LockExists extends Error { + constructor (message) { + super(message) + this.name = 'LockExists' + this.message = message + } +} + exports.ERR_REPO_NOT_INITIALIZED = 'ERR_REPO_NOT_INITIALIZED' +exports.LockExists = LockExists \ No newline at end of file diff --git a/src/index.js b/src/index.js index cc18d7f6..ac255172 100644 --- a/src/index.js +++ b/src/index.js @@ -1,15 +1,10 @@ 'use strict' -const waterfall = require('async/waterfall') -const series = require('async/series') -const parallel = require('async/parallel') -const each = require('async/each') const _get = require('lodash.get') const assert = require('assert') const path = require('path') const debug = require('debug') const Big = require('bignumber.js') -const pull = require('pull-stream') const backends = require('./backends') const version = require('./version') @@ -61,83 +56,61 @@ class IpfsRepo { * Initialize a new repo. * * @param {Object} config - config to write into `config`. - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - init (config, callback) { + async init (config) { log('initializing at: %s', this.path) - - series([ - (cb) => this.root.open(ignoringAlreadyOpened(cb)), - (cb) => this.config.set(buildConfig(config), cb), - (cb) => this.spec.set(buildDatastoreSpec(config), cb), - (cb) => this.version.set(repoVersion, cb) - ], callback) + await this.root.open() + await this.config.set(buildConfig(config)) + await this.spec.set(buildDatastoreSpec(config)) + await this.version.set(repoVersion) } /** * Open the repo. If the repo is already open no action will be taken. * If the repo is not initialized it will return an error. * - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - open (callback) { + async open () { if (!this.closed) { - setImmediate(() => callback(new Error('repo is already open'))) - return // early + throw new Error('repo is already open') } log('opening at: %s', this.path) // check if the repo is already initialized - waterfall([ - (cb) => this.root.open(ignoringAlreadyOpened(cb)), - (cb) => this._isInitialized(cb), - (cb) => this._openLock(this.path, cb), - (lck, cb) => { - log('aquired repo.lock') - this.lockfile = lck - cb() - }, - (cb) => { - log('creating datastore') - this.datastore = backends.create('datastore', path.join(this.path, 'datastore'), this.options) - log('creating blocks') - const blocksBaseStore = backends.create('blocks', path.join(this.path, 'blocks'), this.options) - blockstore( - blocksBaseStore, - this.options.storageBackendOptions.blocks, - cb) - }, - (blocks, cb) => { - this.blocks = blocks - cb() - }, - (cb) => { - log('creating keystore') - this.keys = backends.create('keys', path.join(this.path, 'keys'), this.options) - cb() - }, - - (cb) => { - this.closed = false - log('all opened') - cb() + try { + await this.root.open() + const initialized = await this._isInitialized() + if (!initialized) { + throw Object.assign(new Error('repo is not initialized yet'), + { + code: ERRORS.ERR_REPO_NOT_INITIALIZED, + path: this.path + }) } - ], (err) => { + this.lockfile = await this._openLock(this.path) + log('aquired repo.lock') + log('creating datastore') + this.datastore = backends.create('datastore', path.join(this.path, 'datastore'), this.options) + log('creating blocks') + const blocksBaseStore = backends.create('blocks', path.join(this.path, 'blocks'), this.options) + this.blocks = await blockstore(blocksBaseStore, this.options.storageBackendOptions.blocks) + log('creating keystore') + this.keys = backends.create('keys', path.join(this.path, 'keys'), this.options) + this.closed = false + log('all opened') + } catch (err) { if (err && this.lockfile) { - this._closeLock((err2) => { - if (!err2) { - this.lockfile = null - } else { - log('error removing lock', err2) - } - callback(err) - }) - } else { - callback(err) + try { + this._closeLock() + } catch (err2) { + log('error removing lock', err2) + } + this.lockfile = null + throw err } - }) + } } /** @@ -161,102 +134,66 @@ class IpfsRepo { * be returned in the callback if one has been created. * * @param {string} path - * @param {function(Error, lockfile)} callback - * @returns {void} + * @returns {Promise} */ - _openLock (path, callback) { - this._locker.lock(path, (err, lockfile) => { - if (err) { - return callback(err, null) - } - - assert.strictEqual(typeof lockfile.close, 'function', 'Locks must have a close method') - callback(null, lockfile) - }) + async _openLock (path) { + const lockfile = await this._locker.lock(path) + assert.strictEqual(typeof lockfile.close, 'function', 'Locks must have a close method') + return lockfile } /** * Closes the lock on the repo * - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - _closeLock (callback) { + async _closeLock () { if (this.lockfile) { - return this.lockfile.close(callback) + return this.lockfile.close() } - callback() } /** * Check if the repo is already initialized. - * * @private - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - _isInitialized (callback) { + async _isInitialized () { log('init check') - parallel( - { - config: (cb) => this.config.exists(cb), - spec: (cb) => this.spec.exists(cb), - version: (cb) => this.version.check(repoVersion, cb) - }, - (err, res) => { - log('init', err, res) - if (err && !res.config) { - return callback(Object.assign(new Error('repo is not initialized yet'), - { - code: ERRORS.ERR_REPO_NOT_INITIALIZED, - path: this.path - })) - } - callback(err) - } - ) + let config, spec + try { + [config, spec] = await Promise.all([this.config.exists(), this.spec.exists(), this.version.check(repoVersion)]) + return config && spec + } catch (err) { + return false + } } /** * Close the repo and cleanup. * - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - close (callback) { + async close () { if (this.closed) { - return callback(new Error('repo is already closed')) + throw new Error('repo is already closed') } - log('closing at: %s', this.path) - series([ - (cb) => this.apiAddr.delete(ignoringNotFound(cb)), - (cb) => { - each( - [this.blocks, this.keys, this.datastore], - (store, callback) => store.close(callback), - cb) - }, - (cb) => { - log('unlocking') - this.closed = true - this._closeLock(cb) - }, - (cb) => { - this.lockfile = null - cb() - } - ], (err) => callback(err)) + await this.apiAddr.delete() + await Promise.all([this.blocks, this.keys, this.datastore].map((store) => store.close())) + log('unlocking') + this.closed = true + await this._closeLock() + this.lockfile = null } /** * Check if a repo exists. * - * @param {function(Error, bool)} callback - * @returns {void} + * @returns {Promise} */ - exists (callback) { - this.version.exists(callback) + exists () { + return this.version.exists() } /** @@ -264,96 +201,70 @@ class IpfsRepo { * * @param {Object} options * @param {Boolean} options.human - * @param {function(Error, Object)} callback - * @return {void} + * @return {Object} */ - stat (options, callback) { - if (typeof options === 'function') { - callback = options - options = {} + async stat (options) { + options = Object.assign({}, { human: false }, options) + let storageMax, blocks, version, datastore, keys + [storageMax, blocks, version, datastore, keys] = await Promise.all([ + this._storageMaxStat(), + this._blockStat(), + this.version.get(), + getSize(this.datastore), + getSize(this.keys) + ]) + let size = blocks.size + .plus(datastore) + .plus(keys) + + if (options.human) { + size = size.div(1048576) + } + return { + repoPath: this.path, + storageMax: storageMax, + version: version, + numObjects: blocks.count, + repoSize: size } + } - options = Object.assign({}, { human: false }, options) + _storageMaxStat () { + return this.config.get('Datastore.StorageMax') + .then((max) => new Big(max)) + .catch(() => new Big(noLimit)) + } - parallel({ - storageMax: (cb) => this.config.get('Datastore.StorageMax', (err, max) => { - if (err) { - cb(null, new Big(noLimit)) - } else { - cb(null, new Big(max)) - } - }), - version: (cb) => this.version.get(cb), - blocks: (cb) => this.blocks.query({}, (err, list) => { - list = list || [] - - const count = new Big(list.length) - let size = new Big(0) - - list.forEach(block => { - size = size - .plus(block.value.byteLength) - .plus(block.key._buf.byteLength) - }) - - cb(err, { - count: count, - size: size - }) - }), - datastore: (cb) => getSize(this.datastore, cb), - keys: (cb) => getSize(this.keys, cb) - }, (err, results) => { - if (err) return callback(err) - - let size = results.blocks.size - .plus(results.datastore) - .plus(results.keys) - - if (options.human) { - size = size.div(1048576) - } + async _blockStat () { + const list = [] + for await (const block of this.blocks.query({})) { + list.push(block) + } + const count = new Big(list.length) + let size = new Big(0) - callback(null, { - repoPath: this.path, - storageMax: results.storageMax, - version: results.version, - numObjects: results.blocks.count, - repoSize: size - }) + list.forEach(block => { + size = size + .plus(block.value.byteLength) + .plus(block.key._buf.byteLength) }) + return { count: count, size: size } } } -function getSize (queryFn, callback) { - pull( - queryFn.query({}), - pull.reduce((sum, block) => { - return sum - .plus(block.value.byteLength) - .plus(block.key._buf.byteLength) - }, new Big(0), callback)) +async function getSize (queryFn) { + let sum = new Big(0) + for await (const block of queryFn.query({})) { + sum.plus(block.value.byteLength) + .plus(block.key._buf.byteLength) + } + return sum } module.exports = IpfsRepo module.exports.repoVersion = repoVersion module.exports.errors = ERRORS -function ignoringIf (cond, cb) { - return (err) => { - cb(err && !cond(err) ? err : null) - } -} -function ignoringAlreadyOpened (cb) { - return ignoringIf((err) => err.message === 'Already open', cb) -} - -function ignoringNotFound (cb) { - return ignoringIf((err) => { - return err && (err.code === ERRORS.ERR_REPO_NOT_INITIALIZED || err.message.startsWith('ENOENT')) - }, cb) -} - function buildOptions (_options) { const options = Object.assign({}, defaultOptions, _options) diff --git a/src/lock-memory.js b/src/lock-memory.js index 9b6c2937..54b33aa2 100644 --- a/src/lock-memory.js +++ b/src/lock-memory.js @@ -1,7 +1,7 @@ 'use strict' +const errors = require('./errors') const debug = require('debug') -const setImmediate = require('async/setImmediate') const log = debug('repo:lock') @@ -11,41 +11,38 @@ const LOCKS = {} /** * Lock the repo in the given dir. - * * @param {string} dir - * @param {function(Error, lock)} callback - * @returns {void} + * @returns {Promise} */ -exports.lock = (dir, callback) => { - const file = dir + '/' + lockFile - log('locking %s', file) - LOCKS[file] = true - const closer = { - close (cb) { - if (LOCKS[file]) { - delete LOCKS[file] - } - setImmediate(cb) +exports.lock = async (dir) => { + const file = dir + '/' + lockFile + log('locking %s', file) + + if (LOCKS[file] === true) { + throw errors.LockExists(`Lock exists for file: ${file}`) } - } - setImmediate(() => { - callback(null, closer) - }) + + LOCKS[file] = true + const closer = { + close() { + if (LOCKS[file]) { + delete LOCKS[file] + } + } + } + return closer } /** * Check if the repo in the given directory is locked. * * @param {string} dir - * @param {function(Error, bool)} callback - * @returns {void} + * @returns {bool} */ -exports.locked = (dir, callback) => { - const file = dir + '/' + lockFile - log('checking lock: %s') - - const locked = LOCKS[file] - setImmediate(() => { - callback(null, locked) - }) +exports.locked = (dir) => { + const file = dir + '/' + lockFile + log('checking lock: %s', file) + + const locked = LOCKS[file] + return locked } diff --git a/src/lock.js b/src/lock.js index b3dbac17..c3a7e3dd 100644 --- a/src/lock.js +++ b/src/lock.js @@ -22,24 +22,11 @@ const STALE_TIME = 20000 * Lock the repo in the given dir. * * @param {string} dir - * @param {function(Error, lock)} callback - * @returns {void} + * @returns {Promise} */ -exports.lock = (dir, callback) => { +exports.lock = async (dir) => { const file = path.join(dir, lockFile) log('locking %s', file) - - lock(dir, { lockfilePath: file, stale: STALE_TIME }) - .then(release => { - callback(null, { - close: (cb) => { - release() - .then(() => cb()) - .catch(err => cb(err)) - } - }) - }, callback) - .catch(err => { - log(err) - }) + const release = await lock(dir, { lockfilePath: file, stale: STALE_TIME }) + return { close: () => release() } } diff --git a/src/spec.js b/src/spec.js index 8cb165f9..6bf6522a 100644 --- a/src/spec.js +++ b/src/spec.js @@ -10,35 +10,28 @@ module.exports = (store) => { /** * Check if a datastore spec file exists. * - * @param {function(Error, bool)} callback - * @returns {void} + * @returns {Promise} */ - exists (callback) { - store.has(specKey, callback) + async exists () { + return store.has(specKey) }, /** * Get the current datastore spec. * - * @param {function(Error, number)} callback - * @returns {void} + * @returns {Promise} */ - get (callback) { - store.get(specKey, (err, buf) => { - if (err) { - return callback(err) - } - callback(null, JSON.parse(buf.toString())) - }) + async get () { + const buf = await store.get() + return JSON.parse(buf.toString()) }, /** * Set the datastore spec of the repo, writing it to the underlying store. - * + * TODO unclear on what the type should be or if it's required * @param {number} spec - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - set (spec, callback) { - store.put(specKey, Buffer.from(JSON.stringify(sortKeys(spec, { deep: true }))), callback) + async set (spec) { + return store.put(specKey, Buffer.from(JSON.stringify(sortKeys(spec, { deep: true })))) } } } diff --git a/src/version.js b/src/version.js index 154112af..448982cd 100644 --- a/src/version.js +++ b/src/version.js @@ -11,58 +11,44 @@ module.exports = (store) => { /** * Check if a version file exists. * - * @param {function(Error, bool)} callback - * @returns {void} + * @returns {Promise} */ - exists (callback) { - store.has(versionKey, callback) + async exists () { + return store.has(versionKey) }, /** * Get the current version. * - * @param {function(Error, number)} callback - * @returns {void} + * @returns {Promise} */ - get (callback) { - store.get(versionKey, (err, buf) => { - if (err) { - return callback(err) - } - callback(null, parseInt(buf.toString().trim(), 10)) - }) + async get () { + const buf = await this.get(versionKey) + return parseInt(buf.toString().trim(), 10) }, /** * Set the version of the repo, writing it to the underlying store. * * @param {number} version - * @param {function(Error)} callback - * @returns {void} + * @returns {Promise} */ - set (version, callback) { - store.put(versionKey, Buffer.from(String(version)), callback) + async set (version) { + return store.put(versionKey, Buffer.from(String(version))) }, /** * Check the current version, and return an error on missmatch * @param {number} expected - * @param {function(Error)} callback * @returns {void} */ - check (expected, callback) { - this.get((err, version) => { - if (err) { - return callback(err) - } - log('comparing version: %s and %s', version, expected) - - // Version 6 and 7 are the same - // TODO: Clean up the compatibility logic. Repo feature detection would be ideal, or a better version schema - const compatibleVersion = (version === 6 && expected === 7) || (expected === 6 && version === 7) + async check (expected) { + const version = await this.get() + log('comparing version: %s and %s', version, expected) + // Version 6 and 7 are the same + // TODO: Clean up the compatibility logic. Repo feature detection would be ideal, or a better version schema + const compatibleVersion = (version === 6 && expected === 7) || (expected === 6 && version === 7) - if (version !== expected && !compatibleVersion) { - return callback(new Error(`ipfs repo needs migration: expected version v${expected}, found version v${version}`)) - } - callback() - }) + if (version !== expected && !compatibleVersion) { + throw new Error(`ipfs repo needs migration: expected version v${expected}, found version v${version}`) + } } } } diff --git a/test/blockstore-test.js b/test/blockstore-test.js index 45ae748a..4248bebc 100644 --- a/test/blockstore-test.js +++ b/test/blockstore-test.js @@ -5,12 +5,9 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect +const assert = chai.assert const Block = require('ipfs-block') const CID = require('cids') -const parallel = require('async/parallel') -const waterfall = require('async/waterfall') -const each = require('async/each') -const map = require('async/map') const _ = require('lodash') const multihashing = require('multihashing-async') const Buffer = require('safe-buffer').Buffer @@ -21,230 +18,141 @@ module.exports = (repo) => { const bData = Buffer.from('hello world') let b - before((done) => { - multihashing(bData, 'sha2-256', (err, h) => { - if (err) { - return done(err) - } - - b = new Block(bData, new CID(h)) - done() - }) + before(async () => { + const hash = await multihashing(bData, 'sha2-256') + b = new Block(bData, new CID(hash)) }) describe('.put', () => { - it('simple', (done) => { - repo.blocks.put(b, done) + it('simple', async () => { + await repo.blocks.put(b) }) - it('multi write (locks)', (done) => { - parallel([ - (cb) => repo.blocks.put(b, cb), - (cb) => repo.blocks.put(b, cb) - ], done) + it('multi write (locks)', async () => { + await Promise.all([repo.blocks.put(b), repo.blocks.put(b)]) }) - it('empty value', (done) => { + it('empty value', async () => { const d = Buffer.alloc(0) - multihashing(d, 'sha2-256', (err, multihash) => { - expect(err).to.not.exist() - const empty = new Block(d, new CID(multihash)) - repo.blocks.put(empty, done) - }) + const multihash = await multihashing(d, 'sha2-256') + const empty = new Block(d, new CID(multihash)) + await repo.blocks.put(empty) }) - it('massive multiwrite', function (done) { + it('massive multiwrite', async () => { this.timeout(15000) // add time for ci - waterfall([ - (cb) => map(_.range(100), (i, cb) => { - multihashing(blockData[i], 'sha2-256', cb) - }, cb), - (hashes, cb) => each(_.range(100), (i, cb) => { - const block = new Block(blockData[i], new CID(hashes[i])) - repo.blocks.put(block, cb) - }, cb) - ], done) + const hashes = await Promise.all(_.range(100).map((i) => multihashing(blockData[i], 'sha2-256'))) + await Promise.all(_.range(100).map((i) => { + const block = new Block(blockData[i], new CID(hashes[i])) + return repo.blocks.put(block) + })) }) - it('.putMany', function (done) { + it('.putMany', async () => { this.timeout(15000) // add time for ci - waterfall([ - (cb) => map(_.range(50), (i, cb) => { - const d = Buffer.from('many' + Math.random()) - multihashing(d, 'sha2-256', (err, hash) => { - if (err) { - return cb(err) - } - cb(null, new Block(d, new CID(hash))) - }) - }, cb), - (blocks, cb) => { - repo.blocks.putMany(blocks, (err) => { - expect(err).to.not.exist() - map(blocks, (b, cb) => { - repo.blocks.get(b.cid, cb) - }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.be.eql(blocks) - cb() - }) - }) - } - ], done) + const blocks = await Promise.all(_.range(50).map(async (i) => { + const d = Buffer.from('many' + Math.random()) + const hash = await multihashing(d, 'sha2-256') + return new Block(d, new CID(hash)) + })) + await repo.blocks.putMany(blocks) + blocks.each(async (block) => { + const block1 = await repo.blocks.get(block.cid) + expect(block1).to.be.eql(block) + }) }) - it('returns an error on invalid block', (done) => { - repo.blocks.put('hello', (err) => { + it('returns an error on invalid block', async () => { + try { + await repo.blocks.put('hello') + assert.fail() + } catch (err) { expect(err).to.exist() - done() - }) + } }) }) describe('.get', () => { - it('simple', (done) => { - repo.blocks.get(b.cid, (err, block) => { - expect(err).to.not.exist() - expect(block).to.be.eql(b) - done() - }) - }) - - it('massive read', function (done) { - this.timeout(15000) // add time for ci - parallel(_.range(20 * 100).map((i) => (cb) => { - const j = i % blockData.length - waterfall([ - (cb) => multihashing(blockData[j], 'sha2-256', cb), - (h, cb) => { - const cid = new CID(h) - repo.blocks.get(cid, cb) - }, - (block, cb) => { - expect(block.data).to.be.eql(blockData[j]) - cb() - } - ], cb) - }), done) - }) - - it('returns an error on invalid block', (done) => { - repo.blocks.get('woot', (err, val) => { - expect(err).to.exist() - expect(val).to.not.exist() - done() - }) - }) - - it('should get block stored under v0 CID with a v1 CID', done => { - const data = Buffer.from(`TEST${Date.now()}`) - - multihashing(data, 'sha2-256', (err, hash) => { - if (err) return done(err) - - const cid = new CID(hash) - - repo.blocks.put(new Block(data, cid), err => { - if (err) return done(err) - - repo.blocks.get(cid.toV1(), (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(data) - done() - }) - }) - }) + it('simple', async () => { + const block = await repo.blocks.get(b.cid) + expect(block).to.be.eql(b) }) + }) - it('should get block stored under v1 CID with a v0 CID', done => { - const data = Buffer.from(`TEST${Date.now()}`) + it('massive read', async function () { + this.timeout(15000) // add time for ci + await Promise.all(_.range(20 * 100).map(async (i) => { + const j = i % blockData.length + const hash = await multihashing(blockData[j], 'sha2-256') + const block = await repo.blocks.get(new CID(hash)) + block.to.be.eql(blockData[j]) + })) + }) - multihashing(data, 'sha2-256', (err, hash) => { - if (err) return done(err) + it('returns an error on invalid block', async () => { + try { + await repo.blocks.get('woot') + } catch (err) { + expect(err).to.exist() + } + assert.fail() + }) - const cid = new CID(1, 'dag-pb', hash) + it('should get block stored under v0 CID with a v1 CID', async () => { + const data = Buffer.from(`TEST${Date.now()}`) + const hash = await multihashing(data, 'sha2-256') + const cid = new CID(hash) + await repo.blocks.put(new Block(data, cid)) + const block = await repo.blocks.get(cid.toV1()) + expect(block.data).to.eql(data) + }) - repo.blocks.put(new Block(data, cid), err => { - if (err) return done(err) + it('should get block stored under v1 CID with a v0 CID', async () => { + const data = Buffer.from(`TEST${Date.now()}`) - repo.blocks.get(cid.toV0(), (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(data) - done() - }) - }) - }) - }) + const hash = await multihashing(data, 'sha2-256') + const cid = new CID(1, 'dag-pb', hash) + await repo.blocks.put(new Block(data, cid)) + const block = await repo.blocks.get(cid.toV0()) + expect(block.data).to.empty(data) }) describe('.has', () => { - it('existing block', (done) => { - repo.blocks.has(b.cid, (err, exists) => { - expect(err).to.not.exist() - expect(exists).to.eql(true) - done() - }) + it('existing block', async () => { + const exists = await repo.blocks.has(b.cid) + expect(exists).to.eql(true) }) - it('non existent block', (done) => { - repo.blocks.has(new CID('QmbcpFjzamCj5ZZdduW32ctWUPvbGMwQZk2ghWK6PrKswE'), (err, exists) => { - expect(err).to.not.exist() - expect(exists).to.eql(false) - done() - }) + it('non existent block', async () => { + const exists = await repo.blocks.has(new CID('QmbcpFjzamCj5ZZdduW32ctWUPvbGMwQZk2ghWK6PrKswE')) + expect(exists).to.eql(false) }) - it('should have block stored under v0 CID with a v1 CID', done => { + it('should have block stored under v0 CID with a v1 CID', async () => { const data = Buffer.from(`TEST${Date.now()}`) - - multihashing(data, 'sha2-256', (err, hash) => { - if (err) return done(err) - - const cid = new CID(hash) - - repo.blocks.put(new Block(data, cid), err => { - if (err) return done(err) - - repo.blocks.has(cid.toV1(), (err, exists) => { - expect(err).to.not.exist() - expect(exists).to.eql(true) - done() - }) - }) - }) + const hash = await multihashing(data, 'sha2-256') + const cid = new CID(hash) + await repo.blocks.put(new Block(data, cid)) + const exists = await repo.blocks.has(cid.toV1()) + expect(exists).to.eql(true) }) - it('should have block stored under v1 CID with a v0 CID', done => { + it('should have block stored under v1 CID with a v0 CID', async () => { const data = Buffer.from(`TEST${Date.now()}`) - multihashing(data, 'sha2-256', (err, hash) => { - if (err) return done(err) - - const cid = new CID(1, 'dag-pb', hash) - - repo.blocks.put(new Block(data, cid), err => { - if (err) return done(err) - - repo.blocks.has(cid.toV0(), (err, exists) => { - expect(err).to.not.exist() - expect(exists).to.eql(true) - done() - }) - }) - }) + const hash = await multihashing(data, 'sha2-256') + const cid = new CID(1, 'dag-pb', hash) + await repo.blocks.put(new Block(data, cid)) + const exists = await repo.blocks.has(cid.toV0()) + expect(exists).to.eql(true) }) }) describe('.delete', () => { - it('simple', (done) => { - waterfall([ - (cb) => repo.blocks.delete(b.cid, cb), - (cb) => repo.blocks.has(b.cid, cb) - ], (err, exists) => { - expect(err).to.not.exist() - expect(exists).to.equal(false) - done() - }) + it('simple', async () => { + await repo.blocks.delete(b.cid) + const exists = await repo.blocks.has(b.cid) + expect(exists).to.equal(false) }) }) }) diff --git a/test/browser.js b/test/browser.js index f5490093..67591acb 100644 --- a/test/browser.js +++ b/test/browser.js @@ -2,27 +2,24 @@ 'use strict' -const series = require('async/series') - const IPFSRepo = require('../src') describe('IPFS Repo Tests on the Browser', () => { require('./options-test') const repo = new IPFSRepo('myrepo') - before((done) => { - series([ - (cb) => repo.init({}, cb), - (cb) => repo.open(cb) - ], done) + before(async () => { + await repo.init({}) + await repo.open() }) - after((done) => { - repo.close(done) + after(async () => { + await repo.close() }) require('./repo-test')(repo) require('./blockstore-test')(repo) require('./datastore-test')(repo) require('./keystore-test')(repo) + require('./lock-test')(repo) }) diff --git a/test/datastore-test.js b/test/datastore-test.js index b7fb8c26..e85296c7 100644 --- a/test/datastore-test.js +++ b/test/datastore-test.js @@ -5,9 +5,6 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const parallel = require('async/parallel') -const waterfall = require('async/waterfall') -const each = require('async/each') const _ = require('lodash') const Key = require('interface-datastore').Key @@ -18,75 +15,55 @@ module.exports = (repo) => { const b = new Key('hello') describe('.put', () => { - it('simple', (done) => { - repo.datastore.put(b, data, done) + it('simple', async () => { + await repo.datastore.put(b, data) }) - it('multi write (locks)', (done) => { - parallel([ - (cb) => repo.datastore.put(b, data, cb), - (cb) => repo.datastore.put(b, data, cb) - ], done) + it('multi write (locks)', async () => { + await Promise.all([repo.datastore.put(b, data), repo.datastore.put(b, data)]) }) - it('massive multiwrite', function (done) { + it('massive multiwrite', async function () { this.timeout(15000) // add time for ci - each(_.range(100), (i, cb) => { - repo.datastore.put(new Key('hello' + i), dataList[i], cb) - }, done) + await Promise.all(_.range(100).map((i) => { + return repo.datastore.put(new Key('hello' + i), dataList[i]) + })) }) }) describe('.get', () => { - it('simple', (done) => { - repo.datastore.get(b, (err, val) => { - expect(err).to.not.exist() - expect(val).to.be.eql(data) - done() - }) + it('simple', async () => { + const val = await repo.datastore.get(b) + expect(val).to.be.eql(data) }) - it('massive read', function (done) { + it('massive read', async function () { this.timeout(15000) // add time for ci - parallel(_.range(20 * 100).map((i) => (cb) => { + await Promise.all(_.range(20 * 100).map((i) => { const j = i % dataList.length - repo.datastore.get(new Key('hello' + j), (err, val) => { - expect(err).to.not.exist() - expect(val).to.be.eql(dataList[j]) - cb() - }) - }), done) + return repo.datastore.get(new Key('hello' + j)) + .then(val => expect(val).to.be.eql(dataList[j])) + })) }).timeout(10 * 1000) }) describe('.has', () => { - it('existing entry', (done) => { - repo.datastore.has(b, (err, exists) => { - expect(err).to.not.exist() - expect(exists).to.eql(true) - done() - }) + it('existing entry', async () => { + const exists = await repo.datastore.has(b) + expect(exists).to.eql(true) }) - it('non existent block', (done) => { - repo.datastore.has(new Key('world'), (err, exists) => { - expect(err).to.not.exist() - expect(exists).to.eql(false) - done() - }) + it('non existent block', async () => { + const exists = await repo.datastore.has(new Key('world')) + expect(exists).to.eql(false) }) }) describe('.delete', () => { - it('simple', (done) => { - waterfall([ - (cb) => repo.datastore.delete(b, cb), - (cb) => repo.datastore.has(b, cb) - ], (err, exists) => { - expect(err).to.not.exist() - expect(exists).to.equal(false) - done() - }) + it('simple', async () => { + await repo.datastore.delete(b) + const exists = await repo.datastore.has(b) + expect(exists).to.equal(false) }) }) }) diff --git a/test/interop-test.js b/test/interop-test.js index 05920c10..7a938b9d 100644 --- a/test/interop-test.js +++ b/test/interop-test.js @@ -7,50 +7,38 @@ const expect = chai.expect const mh = require('multihashes') const CID = require('cids') const Key = require('interface-datastore').Key -const map = require('async/map') module.exports = (repo) => { describe('interop', () => { - it('reads welcome-to-ipfs', (done) => { + it('reads welcome-to-ipfs', async () => { const welcomeHash = mh.fromHexString( '1220120f6af601d46e10b2d2e11ed71c55d25f3042c22501e41d1246e7a1e9d3d8ec' ) - repo.blocks.get(new CID(welcomeHash), (err, val) => { - expect(err).to.not.exist() - expect(val.data.toString()).to.match(/Hello and Welcome to IPFS/) - done() - }) + const val = await repo.blocks.get(new CID(welcomeHash)) + expect(val.data.toString()).to.match(/Hello and Welcome to IPFS/) }) - it('reads a bunch of blocks', (done) => { + it('reads a bunch of blocks', async () => { const cids = [ 'QmUxpzJGJYTK5AzH36jV9ucM2WdF5KhjANb4FAhqnREzuC', 'QmQbb26h9dcU5iNPMNEzYZnZN9YLTXBtFwuHmmo6YU4Aig' ].map((hash) => new CID(mh.fromB58String(hash))) - - map(cids, repo.blocks.get, (err, values) => { - expect(err).to.not.exist() - expect(values.length).to.equal(2) - expect(values.map((value) => value.data.length)).to.eql([2659, 12783]) - done() + const values = await Promise.all(cids.map(cid => repo.blocks.get(cid))) + values.forEach((value) => { + expect(value.length).to.equal(2) + expect(value.map(val => val.data.length)).to.eql([2659, 12783]) }) }) - it('reads pin set from the datastore', (done) => { - repo.datastore.get(new Key('/local/pins'), (err, val) => { - expect(err).to.not.exist() - expect(mh.toB58String(val)).to.equal('QmYAuyf2LzMba65NnhxLtGJxixKNUev9qYSu4MYM88hdwK') - done() - }) + it('reads pin set from the datastore', async () => { + const val = await repo.datastore.get(new Key('/local/pins')) + expect(mh.toB58String(val)).to.equal('QmYAuyf2LzMba65NnhxLtGJxixKNUev9qYSu4MYM88hdwK') }) - it('reads DHT records from the datastore', (done) => { - repo.datastore.get(new Key('/AHE5I5B7TY'), (err, val) => { - expect(err).to.not.exist() - expect(val.toString('hex')).to.eql('0a0601c9d4743f9e12097465737476616c75651a2212201d22e2a5e140e5cd20d88fc59cd560f4887c7d9acf938ddb24d7207eac40fd2f') - done() - }) + it('reads DHT records from the datastore', async () => { + const val = await repo.datastore.get(new Key('/AHE5I5B7TY')) + expect(val.toString('hex')).to.eql('0a0601c9d4743f9e12097465737476616c75651a2212201d22e2a5e140e5cd20d88fc59cd560f4887c7d9acf938ddb24d7207eac40fd2f') }) }) } diff --git a/test/lock-test.js b/test/lock-test.js index 84d3e485..60ec2163 100644 --- a/test/lock-test.js +++ b/test/lock-test.js @@ -4,28 +4,17 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const series = require('async/series') const IPFSRepo = require('../') module.exports = (repo) => { describe('Repo lock tests', () => { - it('should handle locking for a repo lifecycle', (done) => { + it('should handle locking for a repo lifecycle', async () => { expect(repo.lockfile).to.not.equal(null) - series([ - (cb) => { - repo.close(cb) - }, - (cb) => { - expect(repo.lockfile).to.equal(null) - cb() - }, - (cb) => { - repo.open(cb) - } - ], done) + await repo.close() + await repo.open() }) - it('should prevent multiple repos from using the same path', (done) => { + it('should prevent multiple repos from using the same path', async () => { const repoClone = new IPFSRepo(repo.path, repo.options) // Levelup throws an uncaughtException when a lock already exists, catch it @@ -35,29 +24,18 @@ module.exports = (repo) => { expect(err.message).to.match(/already held|IO error|already being hold/) }) - series([ - (cb) => { - try { - repoClone.init({}, cb) - } catch (err) { - cb(err) - } - }, - (cb) => { - repoClone.open(cb) - } - ], function (err) { - // There will be no listeners if the uncaughtException was triggered + try { + await repoClone.init({}) + await repoClone.open() + } catch (err) { if (process.listeners('uncaughtException').length > 0) { expect(err.message).to.match(/already locked|already held|already being hold|ELOCKED/) } - + } finally { // Reset listeners to maintain test integrity process.removeAllListeners('uncaughtException') process.addListener('uncaughtException', mochaExceptionHandler) - - done() - }) + } }) }) } diff --git a/test/node.js b/test/node.js index 3f62f14b..85394714 100644 --- a/test/node.js +++ b/test/node.js @@ -5,10 +5,15 @@ const ncp = require('ncp').ncp const rimraf = require('rimraf') const fs = require('fs') const path = require('path') -const series = require('async/series') +const promisify = require('util').promisify + const chai = require('chai') chai.use(require('dirty-chai')) +const asyncRimraf = promisify(rimraf) +const asyncNcp = promisify(ncp) +const fsstat = promisify(fs.stat) + const IPFSRepo = require('../src') describe('IPFS Repo Tests onNode.js', () => { @@ -16,30 +21,24 @@ describe('IPFS Repo Tests onNode.js', () => { const customLock = { lockName: 'test.lock', - lock: (dir, callback) => { - customLock.locked(dir, (err, isLocked) => { - if (err || isLocked) { - return callback(new Error('already locked')) - } - - const lockPath = path.join(dir, customLock.lockName) - fs.writeFileSync(lockPath, '') - - callback(null, { - close: (cb) => { - rimraf(lockPath, cb) - } - }) - }) + lock: async (dir) => { + const isLocked = await customLock.locked(dir) + if (isLocked) { + throw new Error('already locked') + } + const lockPath = path.join(dir, customLock.lockName) + fs.writeFileSync(lockPath, '') + return { + close: () => asyncRimraf(lockPath) + } }, - locked: (dir, callback) => { - fs.stat(path.join(dir, customLock.lockName), (err, stats) => { - if (err) { - callback(null, false) - } else { - callback(null, true) - } - }) + locked: async (dir) => { + try { + await fsstat(path.join(dir, customLock.lockName)) + return true + } catch (err) { + return false + } } } @@ -78,24 +77,18 @@ describe('IPFS Repo Tests onNode.js', () => { const repo = new IPFSRepo(repoPath, r.opts) - before((done) => { - series([ - (cb) => { - if (r.init) { - repo.init({}, cb) - } else { - ncp(testRepoPath, repoPath, cb) - } - }, - (cb) => repo.open(cb) - ], done) + before(async () => { + if (r.init) { + await repo.init({}) + } else { + await asyncNcp(testRepoPath, repoPath) + } + await repo.open() }) - after((done) => { - series([ - (cb) => repo.close(cb), - (cb) => rimraf(repoPath, cb) - ], done) + after(async () => { + await repo.close() + await asyncRimraf(repoPath) }) require('./repo-test')(repo) diff --git a/test/options-test.js b/test/options-test.js index b7afb606..f054ea87 100644 --- a/test/options-test.js +++ b/test/options-test.js @@ -31,8 +31,8 @@ describe('custom options tests', () => { it('allows for a custom lock', () => { const lock = { - lock: (path, callback) => { }, - locked: (path, callback) => { } + lock: async (path) => { }, + locked: async (path) => { } } const repo = new Repo(repoPath, { @@ -42,21 +42,23 @@ describe('custom options tests', () => { expect(repo._getLocker()).to.deep.equal(lock) }) - it('ensures a custom lock has a .close method', (done) => { + it('ensures a custom lock has a .close method', async () => { const lock = { - lock: (path, callback) => { - callback(null, {}) + lock: async (path) => { + return {} } } const repo = new Repo(repoPath, { lock }) - - expect( - () => repo._openLock(repo.path) - ).to.throw('Locks must have a close method') - done() + let error + try { + await repo._openLock(repo.path) + } catch (err) { + error = err + } + expect(error.message).to.equal('Locks must have a close method') }) }) diff --git a/test/repo-test.js b/test/repo-test.js index 83c625d3..2769b0af 100644 --- a/test/repo-test.js +++ b/test/repo-test.js @@ -4,17 +4,12 @@ const chai = require('chai') chai.use(require('dirty-chai')) const expect = chai.expect -const series = require('async/series') -const waterfall = require('async/waterfall') module.exports = (repo) => { describe('IPFS Repo Tests', () => { - it('check if Repo exists', (done) => { - repo.exists((err, exists) => { - expect(err).to.not.exist() - expect(exists).to.equal(true) - done() - }) + it('check if Repo exists', async () => { + const exists = await repo.exists() + expect(exists).to.equal(true) }) it('exposes the path', () => { @@ -22,100 +17,62 @@ module.exports = (repo) => { }) describe('config', () => { - it('get config', (done) => { - repo.config.get((err, config) => { - expect(err).to.not.exist() - expect(config).to.be.a('object') - done() - }) + it('get config', async () => { + const config = await repo.config.get() + expect(config).to.be.a('object') }) - it('set config', (done) => { - series([ - (cb) => repo.config.set({ a: 'b' }, cb), - (cb) => repo.config.get((err, config) => { - if (err) return cb(err) - expect(config).to.deep.equal({ a: 'b' }) - cb() - }) - ], done) + it('set config', async () => { + await repo.config.set({ a: 'b' }) + const config = await repo.config.get() + expect(config).to.deep.equal({ a: 'b' }) }) - it('get config key', (done) => { - repo.config.get('a', (err, value) => { - expect(err).to.not.exist() - expect(value).to.equal('b') - done() - }) + it('get config key', async () => { + const value = await repo.config.get('a') + expect(value).to.equal('b') }) - it('set config key', (done) => { - series([ - (cb) => repo.config.set('c.x', 'd', cb), - (cb) => repo.config.get((err, config) => { - if (err) return cb(err) - expect(config).to.deep.equal({ a: 'b', c: { x: 'd' } }) - cb() - }) - ], done) + it('set config key', async () => { + await repo.config.set('c.x', 'd') + const config = await repo.config.get() + expect(config).to.deep.equal({ a: 'b', c: { x: 'd' } }) }) }) describe('spec', () => { - it('get spec', (done) => { - repo.spec.get((err) => { - expect(err).to.not.exist() - done() - }) + it('get spec', async () => { + await repo.spec.get() }) - it('set spec', (done) => { - series([ - (cb) => repo.spec.set({ a: 'b' }, cb), - (cb) => repo.spec.get((err, spec) => { - if (err) return cb(err) - expect(spec).to.deep.equal({ a: 'b' }) - cb() - }) - ], done) + it('set spec', async () => { + await repo.spec.set({ a: 'b' }) + const spec = await repo.spec.get() + expect(spec).to.deep.equal({ a: 'b' }) }) }) describe('version', () => { - it('get version', (done) => { - repo.version.get((err, version) => { - expect(err).to.not.exist() - expect(version).to.equal(7) - done() - }) + it('get version', async () => { + const version = await repo.version.get() + expect(version).to.equal(7) }) - it('set version', (done) => { - waterfall([ - (cb) => repo.version.set(9000, cb), - (cb) => repo.version.get(cb), - (version, cb) => { - expect(version).to.equal(9000) - cb() - }, - (cb) => repo.version.set(7, cb) - ], done) + it('set version', async () => { + await repo.version.set(9000) + await repo.version.get() + await repo.version.set(7) }) }) describe('lifecycle', () => { - it('close and open', (done) => { - waterfall([ - (cb) => repo.close(cb), - (cb) => repo.open(cb), - (cb) => repo.close(cb), - (cb) => repo.open(cb), - (cb) => repo.version.get(cb), - (version, cb) => { - expect(version).to.exist() - cb() - } - ], done) + it('close and open', async () => { + await repo.close() + await repo.open() + await repo.close() + await repo.open() + const version = await repo.version.get() + expect(version).to.exist() }) }) }) diff --git a/test/stat-test.js b/test/stat-test.js index b263c368..2db0d8c3 100644 --- a/test/stat-test.js +++ b/test/stat-test.js @@ -7,40 +7,34 @@ const expect = chai.expect module.exports = (repo) => { describe('stat', () => { - it('get stats', (done) => { - repo.stat((err, stats) => { - expect(err).to.not.exist() - expect(stats).to.exist() - expect(stats).to.have.property('numObjects') - expect(stats).to.have.property('version') - expect(stats).to.have.property('repoPath') - expect(stats).to.have.property('repoSize') - expect(stats).to.have.property('storageMax') + it('get stats', async () => { + const stats = await repo.stat() + expect(stats).to.exist() + expect(stats).to.have.property('numObjects') + expect(stats).to.have.property('version') + expect(stats).to.have.property('repoPath') + expect(stats).to.have.property('repoSize') + expect(stats).to.have.property('storageMax') - expect(stats.numObjects > '0').to.eql(true) - expect(stats.version > '0').to.eql(true) - expect(stats.repoSize > '0').to.eql(true) - expect(stats.storageMax > '0').to.eql(true) - done() - }) + expect(stats.numObjects > '0').to.eql(true) + expect(stats.version > '0').to.eql(true) + expect(stats.repoSize > '0').to.eql(true) + expect(stats.storageMax > '0').to.eql(true) }) - it('get human stats', (done) => { - repo.stat({ human: true }, (err, stats) => { - expect(err).to.not.exist() - expect(stats).to.exist() - expect(stats).to.have.property('numObjects') - expect(stats).to.have.property('version') - expect(stats).to.have.property('repoPath') - expect(stats).to.have.property('repoSize') - expect(stats).to.have.property('storageMax') + it('get human stats', async () => { + const stats = await repo.stat({ human: true }) + expect(stats).to.exist() + expect(stats).to.have.property('numObjects') + expect(stats).to.have.property('version') + expect(stats).to.have.property('repoPath') + expect(stats).to.have.property('repoSize') + expect(stats).to.have.property('storageMax') - expect(stats.numObjects > '0').to.eql(true) - expect(stats.version > '0').to.eql(true) - expect(stats.repoSize > '0').to.eql(true) - expect(stats.storageMax > '0').to.eql(true) - done() - }) + expect(stats.numObjects > '0').to.eql(true) + expect(stats.version > '0').to.eql(true) + expect(stats.repoSize > '0').to.eql(true) + expect(stats.storageMax > '0').to.eql(true) }) }) }