diff --git a/.aegir.js b/.aegir.js new file mode 100644 index 00000000..f1766146 --- /dev/null +++ b/.aegir.js @@ -0,0 +1,10 @@ +'use strict' + +module.exports = { + webpack: { + node: { + // this is needed until level stops using node buffers in browser code + Buffer: true + } + } +} diff --git a/package.json b/package.json index 871b39e3..ec760887 100644 --- a/package.json +++ b/package.json @@ -43,7 +43,7 @@ "npm": ">=3.0.0" }, "devDependencies": { - "aegir": "^23.0.0", + "aegir": "^25.0.0", "chai": "^4.2.0", "chai-as-promised": "^7.1.1", "dirty-chai": "^2.0.1", @@ -68,7 +68,7 @@ "debug": "^4.1.0", "err-code": "^2.0.0", "interface-datastore": "^1.0.2", - "ipfs-repo-migrations": "^1.0.0", + "ipfs-repo-migrations": "^2.0.0", "ipfs-utils": "^2.2.0", "ipld-block": "^0.9.1", "it-map": "^1.0.2", diff --git a/src/default-options-browser.js b/src/default-options-browser.js index 07e59d1a..dd7d6c10 100644 --- a/src/default-options-browser.js +++ b/src/default-options-browser.js @@ -7,7 +7,8 @@ module.exports = { root: require('datastore-level'), blocks: require('datastore-level'), keys: require('datastore-level'), - datastore: require('datastore-level') + datastore: require('datastore-level'), + pins: require('datastore-level') }, storageBackendOptions: { root: { @@ -29,6 +30,11 @@ module.exports = { sharding: false, prefix: '', version: 2 + }, + pins: { + sharding: false, + prefix: '', + version: 2 } } } diff --git a/src/default-options.js b/src/default-options.js index e9322d67..510a2609 100644 --- a/src/default-options.js +++ b/src/default-options.js @@ -7,7 +7,8 @@ module.exports = { root: require('datastore-fs'), blocks: require('datastore-fs'), keys: require('datastore-fs'), - datastore: require('datastore-level') + datastore: require('datastore-level'), + pins: require('datastore-level') }, storageBackendOptions: { root: { diff --git a/src/index.js b/src/index.js index 85bc5503..8262045a 100644 --- a/src/index.js +++ b/src/index.js @@ -31,7 +31,6 @@ const lockers = { /** * IpfsRepo implements all required functionality to read and write to an ipfs repo. - * */ class IpfsRepo { /** @@ -122,6 +121,10 @@ class IpfsRepo { log('creating keystore') this.keys = backends.create('keys', pathJoin(this.path, 'keys'), this.options) await this.keys.open() + log('creating pins') + this.pins = backends.create('pins', pathJoin(this.path, 'pins'), this.options) + await this.pins.open() + const isCompatible = await this.version.check(constants.repoVersion) if (!isCompatible) { if (await this._isAutoMigrationEnabled()) { @@ -262,7 +265,8 @@ class IpfsRepo { this.root, this.blocks, this.keys, - this.datastore + this.datastore, + this.pins ].map((store) => store.close())) log('unlocking') diff --git a/test/browser.js b/test/browser.js index f955a7f5..82e2c8f7 100644 --- a/test/browser.js +++ b/test/browser.js @@ -38,5 +38,6 @@ describe('IPFS Repo Tests on the Browser', () => { require('./config-test')(repo) require('./api-addr-test')(repo) require('./lock-test')(repo) + require('./pins-test')(repo) require('./is-initialized') }) diff --git a/test/node.js b/test/node.js index 3b515f60..0ca879f1 100644 --- a/test/node.js +++ b/test/node.js @@ -114,6 +114,7 @@ describe('IPFS Repo Tests onNode.js', () => { if (!r.init) { require('./interop-test')(repo) } + require('./pins-test')(repo) require('./is-initialized') })) diff --git a/test/pins-test.js b/test/pins-test.js new file mode 100644 index 00000000..d0c12c04 --- /dev/null +++ b/test/pins-test.js @@ -0,0 +1,73 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const { Buffer } = require('buffer') +const { expect } = require('./utils/chai') +const range = require('just-range') +const Key = require('interface-datastore').Key + +module.exports = (repo) => { + describe('pins', () => { + const dataList = range(100).map((i) => Buffer.from(`hello-${i}-${Math.random()}`)) + const data = Buffer.from('hello world') + const b = new Key('hello') + + it('exists', () => { + expect(repo).to.have.property('pins') + }) + + describe('.put', () => { + it('simple', async () => { + await repo.pins.put(b, data) + }) + + it('multi write (locks)', async () => { + await Promise.all([repo.pins.put(b, data), repo.pins.put(b, data)]) + }) + + it('massive multiwrite', async function () { + this.timeout(15000) // add time for ci + await Promise.all(range(100).map((i) => { + return repo.pins.put(new Key('hello' + i), dataList[i]) + })) + }) + }) + + describe('.get', () => { + it('simple', async () => { + const val = await repo.pins.get(b) + expect(val).to.be.eql(data) + }) + + it('massive read', async function () { + this.timeout(15000) // add time for ci + await Promise.all(range(20 * 100).map(async (i) => { + const j = i % dataList.length + const val = await repo.pins.get(new Key('hello' + j)) + expect(val).to.be.eql(dataList[j]) + })) + }).timeout(10 * 1000) + }) + + describe('.has', () => { + it('existing pin', async () => { + const exists = await repo.pins.has(b) + expect(exists).to.eql(true) + }) + + it('non existent pin', async () => { + const exists = await repo.pins.has(new Key('world')) + expect(exists).to.eql(false) + }) + }) + + describe('.delete', () => { + it('simple', async () => { + await repo.pins.delete(b) + const exists = await repo.pins.has(b) + expect(exists).to.equal(false) + }) + }) + }) +} diff --git a/test/repo-test.js b/test/repo-test.js index 48336887..f61c36d8 100644 --- a/test/repo-test.js +++ b/test/repo-test.js @@ -162,13 +162,14 @@ module.exports = (repo) => { root: FakeDatastore, blocks: FakeDatastore, keys: FakeDatastore, - datastore: FakeDatastore + datastore: FakeDatastore, + pins: FakeDatastore } }) await repo.init({}) await repo.open() await repo.close() - expect(count).to.be.eq(4) + expect(count).to.be.eq(5) }) it('open twice throws error', async () => {