Skip to content

Commit 3871d5e

Browse files
committed
fix: use @ipld/dag-pb instead of ipld-dag-pb
Use the rewrite of the dag-pb codec.
1 parent e57ba16 commit 3871d5e

File tree

6 files changed

+68
-30
lines changed

6 files changed

+68
-30
lines changed

packages/ipfs-unixfs-importer/package.json

+2-1
Original file line numberDiff line numberDiff line change
@@ -35,18 +35,19 @@
3535
"@types/mocha": "^8.2.0",
3636
"aegir": "^30.3.0",
3737
"ipld": "^0.28.0",
38+
"ipld-dag-pb": "^0.21.0",
3839
"ipld-in-memory": "^7.0.0",
3940
"it-buffer-stream": "^2.0.0",
4041
"multicodec": "^2.0.0",
4142
"nyc": "^15.0.0"
4243
},
4344
"dependencies": {
45+
"@ipld/dag-pb": "0.0.1",
4446
"bl": "^4.0.0",
4547
"cids": "^1.1.5",
4648
"err-code": "^3.0.0",
4749
"hamt-sharding": "^2.0.0",
4850
"ipfs-unixfs": "^3.0.0",
49-
"ipld-dag-pb": "^0.21.0",
5051
"it-all": "^1.0.1",
5152
"it-batch": "^1.0.3",
5253
"it-first": "^1.0.1",

packages/ipfs-unixfs-importer/src/dag-builder/dir.js

+5-3
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,10 @@
33
const UnixFS = require('ipfs-unixfs')
44
const persist = require('../utils/persist')
55
const {
6-
DAGNode
7-
} = require('ipld-dag-pb')
6+
encode,
7+
prepare
8+
// @ts-ignore
9+
} = require('@ipld/dag-pb')
810

911
/**
1012
* @typedef {import('../').Directory} Directory
@@ -20,7 +22,7 @@ const dirBuilder = async (item, block, options) => {
2022
mode: item.mode
2123
})
2224

23-
const buffer = new DAGNode(unixfs.marshal()).serialize()
25+
const buffer = encode(prepare({ Data: unixfs.marshal() }))
2426
const cid = await persist(buffer, block, options)
2527
const path = item.path
2628

packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js

+5-3
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,10 @@
33
const UnixFS = require('ipfs-unixfs')
44
const persist = require('../../utils/persist')
55
const {
6-
DAGNode
7-
} = require('ipld-dag-pb')
6+
encode,
7+
prepare
8+
// @ts-ignore
9+
} = require('@ipld/dag-pb')
810

911
/**
1012
* @typedef {import('../../').BlockAPI} BlockAPI
@@ -38,7 +40,7 @@ async function * bufferImporter (file, block, options) {
3840
mode: file.mode
3941
})
4042

41-
buffer = new DAGNode(unixfs.marshal()).serialize()
43+
buffer = encode(prepare({ Data: unixfs.marshal() }))
4244
}
4345

4446
return {

packages/ipfs-unixfs-importer/src/dag-builder/file/index.js

+20-8
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,10 @@ const errCode = require('err-code')
44
const UnixFS = require('ipfs-unixfs')
55
const persist = require('../../utils/persist')
66
const {
7-
DAGNode,
8-
DAGLink
9-
} = require('ipld-dag-pb')
7+
encode,
8+
prepare
9+
// @ts-ignore
10+
} = require('@ipld/dag-pb')
1011
const all = require('it-all')
1112
const parallelBatch = require('it-parallel-batch')
1213
const mh = require('multihashing-async').multihash
@@ -93,7 +94,7 @@ const reduce = (file, block, options) => {
9394
})
9495

9596
const multihash = mh.decode(leaf.cid.multihash)
96-
buffer = new DAGNode(leaf.unixfs.marshal()).serialize()
97+
buffer = encode(prepare({ Data: leaf.unixfs.marshal() }))
9798

9899
leaf.cid = await persist(buffer, block, {
99100
...options,
@@ -136,7 +137,11 @@ const reduce = (file, block, options) => {
136137
// node is a leaf buffer
137138
f.addBlockSize(leaf.size)
138139

139-
return new DAGLink('', leaf.size, leaf.cid)
140+
return {
141+
Name: '',
142+
Tsize: leaf.size,
143+
Hash: leaf.cid
144+
}
140145
}
141146

142147
if (!leaf.unixfs || !leaf.unixfs.data) {
@@ -147,11 +152,18 @@ const reduce = (file, block, options) => {
147152
f.addBlockSize(leaf.unixfs.data.length)
148153
}
149154

150-
return new DAGLink('', leaf.size, leaf.cid)
155+
return {
156+
Name: '',
157+
Tsize: leaf.size,
158+
Hash: leaf.cid
159+
}
151160
})
152161

153-
const node = new DAGNode(f.marshal(), links)
154-
const buffer = node.serialize()
162+
const node = {
163+
Data: f.marshal(),
164+
Links: links
165+
}
166+
const buffer = encode(prepare(node))
155167
const cid = await persist(buffer, block, options)
156168

157169
return {

packages/ipfs-unixfs-importer/src/dir-flat.js

+12-7
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
'use strict'
22

33
const {
4-
DAGLink,
5-
DAGNode
6-
} = require('ipld-dag-pb')
4+
encode,
5+
prepare
6+
// @ts-ignore
7+
} = require('@ipld/dag-pb')
78
const UnixFS = require('ipfs-unixfs')
89
const Dir = require('./dir')
910
const persist = require('./utils/persist')
@@ -92,7 +93,11 @@ class DirFlat extends Dir {
9293
}
9394

9495
if (child.size != null && child.cid) {
95-
links.push(new DAGLink(children[i], child.size, child.cid))
96+
links.push({
97+
Name: children[i],
98+
Tsize: child.size,
99+
Hash: child.cid
100+
})
96101
}
97102
}
98103

@@ -102,13 +107,13 @@ class DirFlat extends Dir {
102107
mode: this.mode
103108
})
104109

105-
const node = new DAGNode(unixfs.marshal(), links)
106-
const buffer = node.serialize()
110+
const node = { Data: unixfs.marshal(), Links: links }
111+
const buffer = encode(prepare(node))
107112
const cid = await persist(buffer, block, this.options)
108113
const size = buffer.length + node.Links.reduce(
109114
/**
110115
* @param {number} acc
111-
* @param {DAGLink} curr
116+
* @param {{ Name: string, Tsize: number, Hash: CID }} curr
112117
*/
113118
(acc, curr) => acc + curr.Tsize,
114119
0)

packages/ipfs-unixfs-importer/src/dir-sharded.js

+24-8
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
'use strict'
22

33
const {
4-
DAGLink,
5-
DAGNode
6-
} = require('ipld-dag-pb')
4+
encode,
5+
prepare
6+
// @ts-ignore
7+
} = require('@ipld/dag-pb')
78
const UnixFS = require('ipfs-unixfs')
89
const Dir = require('./dir')
910
const persist = require('./utils/persist')
@@ -119,7 +120,11 @@ async function * flush (bucket, block, shardRoot, options) {
119120
throw new Error('Could not flush sharded directory, no subshard found')
120121
}
121122

122-
links.push(new DAGLink(labelPrefix, shard.size, shard.cid))
123+
links.push({
124+
Name: labelPrefix,
125+
Tsize: shard.size,
126+
Hash: shard.cid
127+
})
123128
childrenSize += shard.size
124129
} else if (typeof child.value.flush === 'function') {
125130
const dir = child.value
@@ -132,7 +137,11 @@ async function * flush (bucket, block, shardRoot, options) {
132137
}
133138

134139
const label = labelPrefix + child.key
135-
links.push(new DAGLink(label, flushedDir.size, flushedDir.cid))
140+
links.push({
141+
Name: label,
142+
Tsize: flushedDir.size,
143+
Hash: flushedDir.cid
144+
})
136145

137146
childrenSize += flushedDir.size
138147
} else {
@@ -145,7 +154,11 @@ async function * flush (bucket, block, shardRoot, options) {
145154
const label = labelPrefix + child.key
146155
const size = value.size
147156

148-
links.push(new DAGLink(label, size, value.cid))
157+
links.push({
158+
Name: label,
159+
Tsize: size,
160+
Hash: value.cid
161+
})
149162
childrenSize += size
150163
}
151164
}
@@ -162,8 +175,11 @@ async function * flush (bucket, block, shardRoot, options) {
162175
mode: shardRoot && shardRoot.mode
163176
})
164177

165-
const node = new DAGNode(dir.marshal(), links)
166-
const buffer = node.serialize()
178+
const node = {
179+
Data: dir.marshal(),
180+
Links: links
181+
}
182+
const buffer = encode(prepare(node))
167183
const cid = await persist(buffer, block, options)
168184
const size = buffer.length + childrenSize
169185

0 commit comments

Comments
 (0)