Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit 69915da

Browse files
alanshawdaviddias
authored andcommitted
feat: allow specify hash algorithm for large files (#184)
1 parent 33c9d1c commit 69915da

File tree

5 files changed

+40
-5
lines changed

5 files changed

+40
-5
lines changed

README.md

+2-1
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ The `import` object is a duplex pull stream that takes objects of the form:
115115
}
116116
```
117117

118-
`import` will outoyt file info objects as files get stored in IPFS. When stats on a node are emitted they are guaranteed to have been written.
118+
`import` will output file info objects as files get stored in IPFS. When stats on a node are emitted they are guaranteed to have been written.
119119

120120
`dag` is an instance of the [`IPLD Resolver`](https://github.com/ipld/js-ipld-resolver) or the [`js-ipfs` `dag api`](https://github.com/ipfs/interface-ipfs-core/tree/master/API/dag)
121121

@@ -140,6 +140,7 @@ The input's file paths and directory structure will be preserved in the [`dag-pb
140140
- bits (positive integer, defaults to `8`): the number of bits at each bucket of the HAMT
141141
- `progress` (function): a function that will be called with the byte length of chunks as a file is added to ipfs.
142142
- `onlyHash` (boolean, defaults to false): Only chunk and hash - do not write to disk
143+
- `hashAlg` (string): multihash hashing algorithm to use
143144

144145
### Exporter
145146

src/builder/reduce.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ module.exports = function (file, ipldResolver, options) {
3232
})
3333

3434
waterfall([
35-
(cb) => DAGNode.create(f.marshal(), links, cb),
35+
(cb) => DAGNode.create(f.marshal(), links, options.hashAlg, cb),
3636
(node, cb) => {
3737
if (options.onlyHash) return cb(null, node)
3838

src/importer/dir-flat.js

+3-2
Original file line numberDiff line numberDiff line change
@@ -56,12 +56,13 @@ class DirFlat extends Dir {
5656
})
5757

5858
const dir = new UnixFS('directory')
59+
const options = this._options
5960

6061
waterfall(
6162
[
62-
(callback) => DAGNode.create(dir.marshal(), links, callback),
63+
(callback) => DAGNode.create(dir.marshal(), links, options.hashAlg, callback),
6364
(node, callback) => {
64-
if (this._options.onlyHash) return callback(null, node)
65+
if (options.onlyHash) return callback(null, node)
6566

6667
ipldResolver.put(
6768
node,

src/importer/dir-sharded.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ function flush (options, bucket, path, ipldResolver, source, callback) {
144144
dir.hashType = options.hashFn.code
145145
waterfall(
146146
[
147-
(callback) => DAGNode.create(dir.marshal(), links, callback),
147+
(callback) => DAGNode.create(dir.marshal(), links, options.hashAlg, callback),
148148
(node, callback) => {
149149
if (options.onlyHash) return callback(null, node)
150150

test/test-builder.js

+33
Original file line numberDiff line numberDiff line change
@@ -57,5 +57,38 @@ module.exports = (repo) => {
5757
)
5858
}, done)
5959
})
60+
61+
it('allows multihash hash algorithm to be specified for big file', (done) => {
62+
eachSeries(Object.keys(mh.names), (hashAlg, cb) => {
63+
const options = { hashAlg, strategy: 'flat' }
64+
const content = String(Math.random() + Date.now())
65+
const inputFile = {
66+
path: content + '.txt',
67+
// Bigger than maxChunkSize
68+
content: Buffer.alloc(262144 + 5).fill(1)
69+
}
70+
71+
const onCollected = (err, nodes) => {
72+
if (err) return cb(err)
73+
74+
const node = nodes[0]
75+
76+
try {
77+
expect(node).to.exist()
78+
expect(mh.decode(node.multihash).name).to.equal(hashAlg)
79+
} catch (err) {
80+
return cb(err)
81+
}
82+
83+
cb()
84+
}
85+
86+
pull(
87+
pull.values([Object.assign({}, inputFile)]),
88+
createBuilder(FixedSizeChunker, ipldResolver, options),
89+
pull.collect(onCollected)
90+
)
91+
}, done)
92+
})
6093
})
6194
}

0 commit comments

Comments
 (0)