Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 877e053

Browse files
authored
Merge pull request #224 from ipfs/update-deps-and-use-cids-over-multihashes
chore: update deps and use cids instead of multihashes
2 parents 6db7346 + aa9dea1 commit 877e053

10 files changed

+32
-39
lines changed

package.json

+4-5
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,10 @@
4040
"aegir": "^15.0.0",
4141
"chai": "^4.1.2",
4242
"dirty-chai": "^2.0.1",
43-
"ipfs": "~0.30.1",
43+
"ipfs": "~0.31.2",
4444
"ipfs-block-service": "~0.14.0",
4545
"ipfs-repo": "~0.22.1",
46+
"multihashes": "~0.4.13",
4647
"ncp": "^2.0.0",
4748
"pull-generate": "^2.2.0",
4849
"pull-zip": "^2.0.1",
@@ -52,15 +53,13 @@
5253
},
5354
"dependencies": {
5455
"async": "^2.6.1",
55-
"bs58": "^4.0.1",
5656
"cids": "~0.5.3",
5757
"deep-extend": "~0.6.0",
5858
"ipfs-unixfs": "~0.1.15",
59-
"ipld": "~0.17.2",
60-
"ipld-dag-pb": "~0.14.5",
59+
"ipld": "~0.17.3",
60+
"ipld-dag-pb": "~0.14.6",
6161
"left-pad": "^1.3.0",
6262
"lodash": "^4.17.10",
63-
"multihashes": "~0.4.13",
6463
"multihashing-async": "~0.5.1",
6564
"pull-batch": "^1.0.0",
6665
"pull-block": "^1.4.0",

src/exporter/clean-multihash.js

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,7 @@
11
'use strict'
22

3-
const mh = require('multihashes')
3+
const CID = require('cids')
44

55
module.exports = (multihash) => {
6-
if (Buffer.isBuffer(multihash)) {
7-
return mh.toB58String(multihash)
8-
}
9-
10-
return multihash
6+
return new CID(multihash).toBaseEncodedString()
117
}

src/importer/flush-tree.js

+5-5
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
'use strict'
22

3-
const mh = require('multihashes')
43
const UnixFS = require('ipfs-unixfs')
54
const CID = require('cids')
65
const dagPB = require('ipld-dag-pb')
@@ -86,7 +85,7 @@ function createSizeIndex (files) {
8685
const sizeIndex = {}
8786

8887
files.forEach((file) => {
89-
sizeIndex[mh.toB58String(file.multihash)] = file.size
88+
sizeIndex[new CID(file.multihash).toBaseEncodedString()] = file.size
9089
})
9190

9291
return sizeIndex
@@ -126,17 +125,18 @@ function traverse (tree, sizeIndex, path, ipld, source, done) {
126125
const keys = Object.keys(tree)
127126
const dir = new UnixFS('directory')
128127
const links = keys.map((key) => {
129-
const b58mh = mh.toB58String(tree[key])
128+
const b58mh = new CID(tree[key]).toBaseEncodedString()
130129
return new DAGLink(key, sizeIndex[b58mh], tree[key])
131130
})
132131

133132
waterfall([
134133
(cb) => DAGNode.create(dir.marshal(), links, cb),
135134
(node, cb) => {
136-
sizeIndex[mh.toB58String(node.multihash)] = node.size
135+
const cid = new CID(node.multihash)
136+
sizeIndex[cid.toBaseEncodedString()] = node.size
137137

138138
ipld.put(node, {
139-
cid: new CID(node.multihash)
139+
cid
140140
}, (err) => cb(err, node))
141141
}
142142
], (err, node) => {

test/builder-dir-sharding.js

+7-7
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,14 @@ const exporter = require('./../src').exporter
77
const chai = require('chai')
88
chai.use(require('dirty-chai'))
99
const expect = chai.expect
10-
const mh = require('multihashes')
1110
const BlockService = require('ipfs-block-service')
1211
const Ipld = require('ipld')
1312
const pull = require('pull-stream')
1413
const pushable = require('pull-pushable')
1514
const whilst = require('async/whilst')
1615
const setImmediate = require('async/setImmediate')
1716
const leftPad = require('left-pad')
17+
const CID = require('cids')
1818

1919
module.exports = (repo) => {
2020
describe('builder: directory sharding', function () {
@@ -87,9 +87,9 @@ module.exports = (repo) => {
8787
pull.collect((err, nodes) => {
8888
expect(err).to.not.exist()
8989
expect(nodes.length).to.be.eql(2)
90-
const expectedHash = mh.toB58String(nonShardedHash)
90+
const expectedHash = new CID(nonShardedHash).toBaseEncodedString()
9191
expect(nodes[0].path).to.be.eql(expectedHash)
92-
expect(mh.toB58String(nodes[0].hash)).to.be.eql(expectedHash)
92+
expect(new CID(nodes[0].hash).toBaseEncodedString()).to.be.eql(expectedHash)
9393
expect(nodes[1].path).to.be.eql(expectedHash + '/b')
9494
expect(nodes[1].size).to.be.eql(29)
9595
pull(
@@ -113,7 +113,7 @@ module.exports = (repo) => {
113113
pull.collect((err, nodes) => {
114114
expect(err).to.not.exist()
115115
expect(nodes.length).to.be.eql(2)
116-
const expectedHash = mh.toB58String(shardedHash)
116+
const expectedHash = new CID(shardedHash).toBaseEncodedString()
117117
expect(nodes[0].path).to.be.eql(expectedHash)
118118
expect(nodes[0].hash).to.be.eql(expectedHash)
119119
expect(nodes[1].path).to.be.eql(expectedHash + '/b')
@@ -209,7 +209,7 @@ module.exports = (repo) => {
209209
function eachPath (path, index) {
210210
if (!index) {
211211
// first dir
212-
expect(path).to.be.eql(mh.toB58String(rootHash))
212+
expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString())
213213
const entry = entries[path]
214214
expect(entry).to.exist()
215215
expect(entry.content).to.not.exist()
@@ -315,7 +315,7 @@ module.exports = (repo) => {
315315
if (!index) {
316316
// first dir
317317
if (depth === 1) {
318-
expect(path).to.be.eql(mh.toB58String(rootHash))
318+
expect(path).to.be.eql(new CID(rootHash).toBaseEncodedString())
319319
}
320320
const entry = entries[path]
321321
expect(entry).to.exist()
@@ -338,7 +338,7 @@ module.exports = (repo) => {
338338
})
339339

340340
it('exports a big dir with subpath', (done) => {
341-
const exportHash = mh.toB58String(rootHash) + '/big/big/2000'
341+
const exportHash = new CID(rootHash).toBaseEncodedString() + '/big/big/2000'
342342
pull(
343343
exporter(exportHash, ipld),
344344
pull.collect(collected)

test/builder.js

+3-1
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,9 @@ module.exports = (repo) => {
6262
}, done)
6363
})
6464

65-
it('allows multihash hash algorithm to be specified for big file', (done) => {
65+
it('allows multihash hash algorithm to be specified for big file', function (done) {
66+
this.timeout(30000)
67+
6668
eachSeries(testMultihashes, (hashAlg, cb) => {
6769
const options = { hashAlg, strategy: 'flat' }
6870
const content = String(Math.random() + Date.now())

test/exporter.js

+2-5
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ const expect = chai.expect
77
const BlockService = require('ipfs-block-service')
88
const Ipld = require('ipld')
99
const UnixFS = require('ipfs-unixfs')
10-
const bs58 = require('bs58')
1110
const pull = require('pull-stream')
1211
const zip = require('pull-zip')
1312
const CID = require('cids')
@@ -152,7 +151,6 @@ module.exports = (repo) => {
152151

153152
it('ensure hash inputs are sanitized', (done) => {
154153
const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8'
155-
const mhBuf = Buffer.from(bs58.decode(hash))
156154
const cid = new CID(hash)
157155

158156
ipld.get(cid, (err, result) => {
@@ -161,7 +159,7 @@ module.exports = (repo) => {
161159
const unmarsh = UnixFS.unmarshal(node.data)
162160

163161
pull(
164-
exporter(mhBuf, ipld),
162+
exporter(cid, ipld),
165163
pull.collect(onFiles)
166164
)
167165

@@ -444,8 +442,7 @@ module.exports = (repo) => {
444442
exporter(files[0].multihash, ipld),
445443
pull.collect((err, files) => {
446444
expect(err).to.not.exist()
447-
448-
expect(bs58.encode(files[0].hash)).to.equal('QmQLTvhjmSa7657mKdSfTjxFBdwxmK8n9tZC9Xdp9DtxWY')
445+
expect(new CID(files[0].hash).toBaseEncodedString()).to.equal('QmQLTvhjmSa7657mKdSfTjxFBdwxmK8n9tZC9Xdp9DtxWY')
449446

450447
fileEql(files[0], bigFile, done)
451448
})

test/hash-parity-with-go-ipfs.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ chai.use(require('dirty-chai'))
88
const expect = chai.expect
99
const BlockService = require('ipfs-block-service')
1010
const pull = require('pull-stream')
11-
const mh = require('multihashes')
11+
const CID = require('cids')
1212
const Ipld = require('ipld')
1313
const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream')
1414

@@ -53,7 +53,7 @@ module.exports = (repo) => {
5353
expect(files.length).to.be.equal(1)
5454

5555
const file = files[0]
56-
expect(mh.toB58String(file.multihash)).to.be.equal(expectedHashes[strategy])
56+
expect(new CID(file.multihash).toBaseEncodedString()).to.be.equal(expectedHashes[strategy])
5757
done()
5858
})
5959
)

test/import-export-nested-dir.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ const expect = chai.expect
77
const BlockService = require('ipfs-block-service')
88
const Ipld = require('ipld')
99
const pull = require('pull-stream')
10-
const mh = require('multihashes')
1110
const map = require('async/map')
11+
const CID = require('cids')
1212

1313
const unixFSEngine = require('./../')
1414

@@ -109,7 +109,7 @@ module.exports = (repo) => {
109109
function normalizeNode (node) {
110110
return {
111111
path: node.path,
112-
multihash: mh.toB58String(node.multihash)
112+
multihash: new CID(node.multihash).toBaseEncodedString()
113113
}
114114
}
115115

test/importer.js

+2-3
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ const expect = chai.expect
1010
const spy = require('sinon/lib/sinon/spy')
1111
const BlockService = require('ipfs-block-service')
1212
const pull = require('pull-stream')
13-
const mh = require('multihashes')
1413
const CID = require('cids')
1514
const Ipld = require('ipld')
1615
const loadFixture = require('aegir/fixtures')
@@ -22,7 +21,7 @@ const collectLeafCids = require('./helpers/collect-leaf-cids')
2221

2322
function stringifyMh (files) {
2423
return files.map((file) => {
25-
file.multihash = mh.toB58String(file.multihash)
24+
file.multihash = new CID(file.multihash).toBaseEncodedString()
2625
return file
2726
})
2827
}
@@ -279,7 +278,7 @@ module.exports = (repo) => {
279278
expect(nodes.length).to.be.eql(1)
280279

281280
// always yield empty node
282-
expect(mh.toB58String(nodes[0].multihash)).to.be.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH')
281+
expect(new CID(nodes[0].multihash).toBaseEncodedString()).to.be.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH')
283282
done()
284283
}))
285284
})

test/with-dag-api.js

+3-3
Original file line numberDiff line numberDiff line change
@@ -10,15 +10,15 @@ const chai = require('chai')
1010
chai.use(require('dirty-chai'))
1111
const expect = chai.expect
1212
const pull = require('pull-stream')
13-
const mh = require('multihashes')
1413
const loadFixture = require('aegir/fixtures')
1514
const IPFS = require('ipfs')
1615
const os = require('os')
1716
const path = require('path')
17+
const CID = require('cids')
1818

1919
function stringifyMh (files) {
2020
return files.map((file) => {
21-
file.multihash = mh.toB58String(file.multihash)
21+
file.multihash = new CID(file.multihash).toBaseEncodedString()
2222
return file
2323
})
2424
}
@@ -225,7 +225,7 @@ describe('with dag-api', function () {
225225
expect(err).to.not.exist()
226226
expect(nodes.length).to.be.eql(1)
227227
// always yield empty node
228-
expect(mh.toB58String(nodes[0].multihash)).to.be.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH')
228+
expect(new CID(nodes[0].multihash).toBaseEncodedString()).to.be.eql('QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH')
229229
done()
230230
}))
231231
})

0 commit comments

Comments
 (0)