Skip to content

Commit 9794e85

Browse files
committed
fix: use @ipld/dag-pb instead of ipld-dag-pb
Use the rewrite of the dag-pb codec.
1 parent 010ab47 commit 9794e85

File tree

6 files changed

+61
-29
lines changed

6 files changed

+61
-29
lines changed

packages/ipfs-unixfs-importer/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -48,12 +48,12 @@
4848
"sinon": "^9.0.1"
4949
},
5050
"dependencies": {
51+
"@ipld/dag-pb": "0.0.1",
5152
"bl": "^4.0.0",
5253
"err-code": "^2.0.0",
5354
"hamt-sharding": "^1.0.0",
5455
"ipfs-unixfs": "^2.0.4",
5556
"ipfs-utils": "^5.0.0",
56-
"ipld-dag-pb": "^0.20.0",
5757
"it-all": "^1.0.1",
5858
"it-batch": "^1.0.3",
5959
"it-first": "^1.0.1",

packages/ipfs-unixfs-importer/src/dag-builder/dir.js

+4-3
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,9 @@
33
const UnixFS = require('ipfs-unixfs')
44
const persist = require('../utils/persist')
55
const {
6-
DAGNode
7-
} = require('ipld-dag-pb')
6+
encode,
7+
prepare
8+
} = require('@ipld/dag-pb')
89

910
const dirBuilder = async (item, block, options) => {
1011
const unixfs = new UnixFS({
@@ -13,7 +14,7 @@ const dirBuilder = async (item, block, options) => {
1314
mode: item.mode
1415
})
1516

16-
const buffer = new DAGNode(unixfs.marshal()).serialize()
17+
const buffer = encode(prepare({ Data: unixfs.marshal() }))
1718
const cid = await persist(buffer, block, options)
1819
const path = item.path
1920

packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js

+4-3
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,9 @@
33
const UnixFS = require('ipfs-unixfs')
44
const persist = require('../../utils/persist')
55
const {
6-
DAGNode
7-
} = require('ipld-dag-pb')
6+
encode,
7+
prepare
8+
} = require('@ipld/dag-pb')
89

910
async function * bufferImporter (file, source, block, options) {
1011
for await (let buffer of source) {
@@ -27,7 +28,7 @@ async function * bufferImporter (file, source, block, options) {
2728
mode: file.mode
2829
})
2930

30-
buffer = new DAGNode(unixfs.marshal()).serialize()
31+
buffer = encode(prepare({ Data: unixfs.marshal() }))
3132
}
3233

3334
return {

packages/ipfs-unixfs-importer/src/dag-builder/file/index.js

+19-8
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@ const errCode = require('err-code')
44
const UnixFS = require('ipfs-unixfs')
55
const persist = require('../../utils/persist')
66
const {
7-
DAGNode,
8-
DAGLink
9-
} = require('ipld-dag-pb')
7+
encode,
8+
prepare
9+
} = require('@ipld/dag-pb')
1010
const all = require('it-all')
1111
const parallelBatch = require('it-parallel-batch')
1212
const mh = require('multihashing-async').multihash
@@ -66,7 +66,7 @@ const reduce = (file, block, options) => {
6666
})
6767

6868
const multihash = mh.decode(leaf.cid.multihash)
69-
buffer = new DAGNode(leaf.unixfs.marshal()).serialize()
69+
buffer = encode(prepare({ Data: leaf.unixfs.marshal() }))
7070

7171
leaf.cid = await persist(buffer, block, {
7272
...options,
@@ -109,7 +109,11 @@ const reduce = (file, block, options) => {
109109
// node is a leaf buffer
110110
f.addBlockSize(leaf.size)
111111

112-
return new DAGLink(leaf.name, leaf.size, leaf.cid)
112+
return {
113+
Name: leaf.name === undefined ? '' : leaf.name,
114+
Tsize: leaf.size,
115+
Hash: leaf.cid
116+
}
113117
}
114118

115119
if (!leaf.unixfs.data) {
@@ -120,11 +124,18 @@ const reduce = (file, block, options) => {
120124
f.addBlockSize(leaf.unixfs.data.length)
121125
}
122126

123-
return new DAGLink(leaf.name, leaf.size, leaf.cid)
127+
return {
128+
Name: leaf.name === undefined ? '' : leaf.name,
129+
Tsize: leaf.size,
130+
Hash: leaf.cid
131+
}
124132
})
125133

126-
const node = new DAGNode(f.marshal(), links)
127-
const buffer = node.serialize()
134+
const node = {
135+
Data: f.marshal(),
136+
Links: links
137+
}
138+
const buffer = encode(prepare(node))
128139
const cid = await persist(buffer, block, options)
129140

130141
return {

packages/ipfs-unixfs-importer/src/dir-flat.js

+10-6
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
'use strict'
22

33
const {
4-
DAGLink,
5-
DAGNode
6-
} = require('ipld-dag-pb')
4+
encode,
5+
prepare
6+
} = require('@ipld/dag-pb')
77
const UnixFS = require('ipfs-unixfs')
88
const Dir = require('./dir')
99
const persist = require('./utils/persist')
@@ -65,7 +65,11 @@ class DirFlat extends Dir {
6565
}
6666
}
6767

68-
links.push(new DAGLink(children[i], child.size, child.cid))
68+
links.push({
69+
Name: children[i],
70+
Tsize: child.size,
71+
Hash: child.cid
72+
})
6973
}
7074

7175
const unixfs = new UnixFS({
@@ -74,8 +78,8 @@ class DirFlat extends Dir {
7478
mode: this.mode
7579
})
7680

77-
const node = new DAGNode(unixfs.marshal(), links)
78-
const buffer = node.serialize()
81+
const node = { Data: unixfs.marshal(), Links: links }
82+
const buffer = encode(prepare(node))
7983
const cid = await persist(buffer, block, this.options)
8084
const size = buffer.length + node.Links.reduce((acc, curr) => acc + curr.Tsize, 0)
8185

packages/ipfs-unixfs-importer/src/dir-sharded.js

+23-8
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
'use strict'
22

33
const {
4-
DAGLink,
5-
DAGNode
6-
} = require('ipld-dag-pb')
4+
encode,
5+
prepare
6+
} = require('@ipld/dag-pb')
77
const UnixFS = require('ipfs-unixfs')
88
const multihashing = require('multihashing-async')
99
const Dir = require('./dir')
@@ -110,7 +110,11 @@ async function * flush (path, bucket, block, shardRoot, options) {
110110
shard = subShard
111111
}
112112

113-
links.push(new DAGLink(labelPrefix, shard.size, shard.cid))
113+
links.push({
114+
Name: labelPrefix,
115+
Tsize: shard.size,
116+
Hash: shard.cid
117+
})
114118
childrenSize += shard.size
115119
} else if (typeof child.value.flush === 'function') {
116120
const dir = child.value
@@ -123,7 +127,11 @@ async function * flush (path, bucket, block, shardRoot, options) {
123127
}
124128

125129
const label = labelPrefix + child.key
126-
links.push(new DAGLink(label, flushedDir.size, flushedDir.cid))
130+
links.push({
131+
Name: label,
132+
Tsize: flushedDir.size,
133+
Hash: flushedDir.cid
134+
})
127135

128136
childrenSize += flushedDir.size
129137
} else {
@@ -136,7 +144,11 @@ async function * flush (path, bucket, block, shardRoot, options) {
136144
const label = labelPrefix + child.key
137145
const size = value.size
138146

139-
links.push(new DAGLink(label, size, value.cid))
147+
links.push({
148+
Name: label,
149+
Tsize: size,
150+
Hash: value.cid
151+
})
140152
childrenSize += size
141153
}
142154
}
@@ -153,8 +165,11 @@ async function * flush (path, bucket, block, shardRoot, options) {
153165
mode: shardRoot && shardRoot.mode
154166
})
155167

156-
const node = new DAGNode(dir.marshal(), links)
157-
const buffer = node.serialize()
168+
const node = {
169+
Data: dir.marshal(),
170+
Links: links
171+
}
172+
const buffer = encode(prepare(node))
158173
const cid = await persist(buffer, block, options)
159174
const size = buffer.length + childrenSize
160175

0 commit comments

Comments
 (0)