Skip to content

Commit 8e9a01c

Browse files
committed
chore: use new createNode() and createLink() from dag-pb
1 parent 9d20ba9 commit 8e9a01c

File tree

9 files changed

+42
-125
lines changed

9 files changed

+42
-125
lines changed

packages/ipfs-unixfs-exporter/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@
5858
},
5959
"dependencies": {
6060
"@ipld/dag-cbor": "^6.0.4",
61-
"@ipld/dag-pb": "^2.0.2",
61+
"@ipld/dag-pb": "^2.1.0",
6262
"err-code": "^3.0.1",
6363
"hamt-sharding": "^2.0.0",
6464
"interface-blockstore": "^0.0.5",

packages/ipfs-unixfs-exporter/test/exporter-sharded.spec.js

+8-16
Original file line numberDiff line numberDiff line change
@@ -238,25 +238,17 @@ describe('exporter sharded', function () {
238238
it('exports a file from a sharded directory inside a regular directory inside a sharded directory', async () => {
239239
const dirCid = await createShard(15)
240240

241-
const nodeBlockBuf = dagPb.encode({
242-
Data: new UnixFS({ type: 'directory' }).marshal(),
243-
Links: [{
244-
Name: 'shard',
245-
Tsize: 5,
246-
Hash: dirCid
247-
}]
248-
})
241+
const nodeBlock = dagPb.createNode(
242+
new UnixFS({ type: 'directory' }).marshal(),
243+
[dagPb.createLink('shard', 5, dirCid)])
244+
const nodeBlockBuf = dagPb.encode(nodeBlock)
249245
const nodeBlockCid = CID.createV0(await sha256.digest(nodeBlockBuf))
250246
await block.put(nodeBlockCid, nodeBlockBuf)
251247

252-
const shardNodeBuf = dagPb.encode({
253-
Data: new UnixFS({ type: 'hamt-sharded-directory' }).marshal(),
254-
Links: [{
255-
Name: '75normal-dir',
256-
Tsize: nodeBlockBuf.length,
257-
Hash: nodeBlockCid
258-
}]
259-
})
248+
const sharedNode = dagPb.createNode(
249+
new UnixFS({ type: 'hamt-sharded-directory' }).marshal(),
250+
[dagPb.createLink('75normal-dir', nodeBlockBuf.length, nodeBlockCid)])
251+
const shardNodeBuf = dagPb.encode(sharedNode)
260252
const shardNodeCid = CID.createV0(await sha256.digest(shardNodeBuf))
261253
await block.put(shardNodeCid, shardNodeBuf)
262254

packages/ipfs-unixfs-exporter/test/exporter.spec.js

+12-52
Original file line numberDiff line numberDiff line change
@@ -56,10 +56,7 @@ describe('exporter', () => {
5656
type: options.type,
5757
data: options.content
5858
})
59-
const node = {
60-
Data: file.marshal(),
61-
Links: options.links
62-
}
59+
const node = dagPb.createNode(file.marshal(), options.links)
6360
const buf = dagPb.encode(node)
6461
const cid = CID.createV0(await sha256.digest(buf))
6562
await block.put(cid, buf)
@@ -147,19 +144,10 @@ describe('exporter', () => {
147144
const leaf = UnixFS.unmarshal(child.node.Data)
148145

149146
file.addBlockSize(leaf.fileSize())
150-
151-
links.push({
152-
Name: '',
153-
Tsize: child.node.Data != null ? child.node.Data.length : 0,
154-
Hash: child.cid
155-
})
156-
}
157-
158-
const node = {
159-
Data: file.marshal(),
160-
Links: links
147+
links.push(dagPb.createLink('', child.node.Data != null ? child.node.Data.length : 0, child.cid))
161148
}
162149

150+
const node = dagPb.createNode(file.marshal(), links)
163151
const nodeBlock = dagPb.encode(node)
164152
const nodeCid = CID.createV0(await sha256.digest(nodeBlock))
165153
await block.put(nodeCid, nodeBlock)
@@ -265,19 +253,13 @@ describe('exporter', () => {
265253
type: 'raw',
266254
data: content.slice(0, 5)
267255
})
268-
const chunkNode1 = {
269-
Data: chunk1.marshal(),
270-
Links: []
271-
}
256+
const chunkNode1 = dagPb.createNode(chunk1.marshal())
272257
const chunkBlock1 = dagPb.encode(chunkNode1)
273258
const chunkCid1 = CID.createV0(await sha256.digest(chunkBlock1))
274259
await block.put(chunkCid1, chunkBlock1)
275260

276261
const chunk2 = new UnixFS({ type: 'raw', data: content.slice(5) })
277-
const chunkNode2 = {
278-
Data: chunk2.marshal(),
279-
Links: []
280-
}
262+
const chunkNode2 = dagPb.createNode(chunk2.marshal())
281263
const chunkBlock2 = dagPb.encode(chunkNode2)
282264
const chunkCid2 = CID.createV0(await sha256.digest(chunkBlock2))
283265
await block.put(chunkCid2, chunkBlock2)
@@ -288,18 +270,9 @@ describe('exporter', () => {
288270
file.addBlockSize(5)
289271
file.addBlockSize(5)
290272

291-
const fileNode = dagPb.prepare({
292-
Data: file.marshal(),
293-
Links: [{
294-
Name: '',
295-
Tsize: chunkNode1.Data != null ? chunkNode1.Data.length : 0,
296-
Hash: chunkCid1.toV0()
297-
}, {
298-
Name: '',
299-
Tsize: chunkNode2.Data != null ? chunkNode2.Data.length : 0,
300-
Hash: chunkCid2.toV0()
301-
}]
302-
})
273+
const fileNode = dagPb.createNode(file.marshal(), [
274+
dagPb.createLink('', chunkNode1.Data != null ? chunkNode1.Data.length : 0, chunkCid1.toV0()),
275+
dagPb.createLink('', chunkNode2.Data != null ? chunkNode2.Data.length : 0, chunkCid2.toV0())])
303276
const fileBlock = dagPb.encode(fileNode)
304277
const fileCid = CID.createV0(await sha256.digest(fileBlock))
305278
await block.put(fileCid, fileBlock)
@@ -321,11 +294,7 @@ describe('exporter', () => {
321294
const chunk = await dagPut({ content: uint8ArrayConcat(await all(randomBytes(100))) })
322295
const result = await dagPut({
323296
content: uint8ArrayConcat(await all(randomBytes(100))),
324-
links: [{
325-
Name: '',
326-
Tsize: chunk.node.Data != null ? chunk.node.Data.length : 0,
327-
Hash: chunk.cid
328-
}]
297+
links: [dagPb.createLink('', chunk.node.Data != null ? chunk.node.Data.length : 0, chunk.cid)]
329298
})
330299

331300
if (!result.file.data) {
@@ -1013,10 +982,7 @@ describe('exporter', () => {
1013982
})
1014983

1015984
it('errors we export a non-unixfs dag-pb node', async () => {
1016-
const dagpbBlock = dagPb.encode({
1017-
Data: Uint8Array.from([0, 1, 2, 3, 4]),
1018-
Links: []
1019-
})
985+
const dagpbBlock = dagPb.encode(dagPb.createNode(Uint8Array.from([0, 1, 2, 3, 4])))
1020986
const dagpbCid = CID.createV0(await sha256.digest(dagpbBlock))
1021987
await block.put(dagpbCid, dagpbBlock)
1022988

@@ -1037,14 +1003,8 @@ describe('exporter', () => {
10371003
})
10381004
file.addBlockSize(100)
10391005

1040-
const dagpbBuffer = dagPb.encode({
1041-
Data: file.marshal(),
1042-
Links: [{
1043-
Name: '',
1044-
Tsize: cborBlock.length,
1045-
Hash: cborCid
1046-
}]
1047-
})
1006+
const dagpbBuffer = dagPb.encode(dagPb.createNode(file.marshal(),
1007+
[dagPb.createLink('', cborBlock.length, cborCid)]))
10481008
const dagpbCid = CID.createV0(await sha256.digest(dagpbBuffer))
10491009
await block.put(dagpbCid, dagpbBuffer)
10501010

packages/ipfs-unixfs-importer/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
"util": "^0.12.3"
4848
},
4949
"dependencies": {
50-
"@ipld/dag-pb": "^2.0.2",
50+
"@ipld/dag-pb": "^2.1.0",
5151
"bl": "^5.0.0",
5252
"err-code": "^3.0.1",
5353
"hamt-sharding": "^2.0.0",

packages/ipfs-unixfs-importer/src/dag-builder/dir.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
const { UnixFS } = require('ipfs-unixfs')
44
const persist = require('../utils/persist')
5-
const { encode, prepare } = require('@ipld/dag-pb')
5+
const { encode, createNode } = require('@ipld/dag-pb')
66

77
/**
88
* @typedef {import('../types').Directory} Directory
@@ -18,7 +18,7 @@ const dirBuilder = async (item, blockstore, options) => {
1818
mode: item.mode
1919
})
2020

21-
const buffer = encode(prepare({ Data: unixfs.marshal() }))
21+
const buffer = encode(createNode(unixfs.marshal()))
2222
const cid = await persist(buffer, blockstore, options)
2323
const path = item.path
2424

packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js

+1-4
Original file line numberDiff line numberDiff line change
@@ -37,10 +37,7 @@ async function * bufferImporter (file, block, options) {
3737
mode: file.mode
3838
})
3939

40-
buffer = dagPb.encode({
41-
Data: unixfs.marshal(),
42-
Links: []
43-
})
40+
buffer = dagPb.encode(dagPb.createNode(unixfs.marshal()))
4441
}
4542

4643
return {

packages/ipfs-unixfs-importer/src/dag-builder/file/index.js

+7-19
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
const errCode = require('err-code')
44
const { UnixFS } = require('ipfs-unixfs')
55
const persist = require('../../utils/persist')
6-
const { encode, prepare } = require('@ipld/dag-pb')
6+
const { encode, createNode, createLink } = require('@ipld/dag-pb')
77
const parallelBatch = require('it-parallel-batch')
88
const rawCodec = require('multiformats/codecs/raw')
99
const dagPb = require('@ipld/dag-pb')
@@ -87,7 +87,7 @@ const reduce = (file, blockstore, options) => {
8787
data: buffer
8888
})
8989

90-
buffer = encode(prepare({ Data: leaf.unixfs.marshal() }))
90+
buffer = encode(createNode(leaf.unixfs.marshal()))
9191

9292
// // TODO vmx 2021-03-26: This is what the original code does, it checks
9393
// // the multihash of the original leaf node and uses then the same
@@ -148,12 +148,7 @@ const reduce = (file, blockstore, options) => {
148148
if (leaf.cid.code === rawCodec.code) {
149149
// node is a leaf buffer
150150
f.addBlockSize(leaf.size)
151-
152-
return {
153-
Name: '',
154-
Tsize: leaf.size,
155-
Hash: leaf.cid
156-
}
151+
return createLink('', leaf.size, leaf.cid)
157152
}
158153

159154
if (!leaf.unixfs || !leaf.unixfs.data) {
@@ -164,25 +159,18 @@ const reduce = (file, blockstore, options) => {
164159
f.addBlockSize(leaf.unixfs.data.length)
165160
}
166161

167-
return {
168-
Name: '',
169-
Tsize: leaf.size,
170-
Hash: leaf.cid
171-
}
162+
return createLink('', leaf.size, leaf.cid)
172163
})
173164

174-
const node = {
175-
Data: f.marshal(),
176-
Links: links
177-
}
178-
const buffer = encode(prepare(node))
165+
const node = createNode(f.marshal(), links)
166+
const buffer = encode(node)
179167
const cid = await persist(buffer, blockstore, options)
180168

181169
return {
182170
cid,
183171
path: file.path,
184172
unixfs: f,
185-
size: buffer.length + node.Links.reduce((acc, curr) => acc + curr.Tsize, 0)
173+
size: buffer.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize || 0), 0)
186174
}
187175
}
188176

packages/ipfs-unixfs-importer/src/dir-flat.js

+4-8
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
'use strict'
22

3-
const { encode, prepare } = require('@ipld/dag-pb')
3+
const { encode, createNode, createLink } = require('@ipld/dag-pb')
44
const { UnixFS } = require('ipfs-unixfs')
55
const Dir = require('./dir')
66
const persist = require('./utils/persist')
@@ -90,11 +90,7 @@ class DirFlat extends Dir {
9090
}
9191

9292
if (child.size != null && child.cid) {
93-
links.push({
94-
Name: children[i],
95-
Tsize: child.size,
96-
Hash: child.cid
97-
})
93+
links.push(createLink(children[i], child.size, child.cid))
9894
}
9995
}
10096

@@ -105,8 +101,8 @@ class DirFlat extends Dir {
105101
})
106102

107103
/** @type {PBNode} */
108-
const node = { Data: unixfs.marshal(), Links: links }
109-
const buffer = encode(prepare(node))
104+
const node = createNode(unixfs.marshal(), links)
105+
const buffer = encode(node)
110106
const cid = await persist(buffer, block, this.options)
111107
const size = buffer.length + node.Links.reduce(
112108
/**

packages/ipfs-unixfs-importer/src/dir-sharded.js

+6-22
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
'use strict'
22

3-
const { encode, prepare } = require('@ipld/dag-pb')
3+
const { encode, createNode, createLink } = require('@ipld/dag-pb')
44
const { UnixFS } = require('ipfs-unixfs')
55
const Dir = require('./dir')
66
const persist = require('./utils/persist')
@@ -116,11 +116,7 @@ async function * flush (bucket, blockstore, shardRoot, options) {
116116
throw new Error('Could not flush sharded directory, no subshard found')
117117
}
118118

119-
links.push({
120-
Name: labelPrefix,
121-
Tsize: shard.size,
122-
Hash: shard.cid
123-
})
119+
links.push(createLink(labelPrefix, shard.size, shard.cid))
124120
childrenSize += shard.size
125121
} else if (typeof child.value.flush === 'function') {
126122
const dir = child.value
@@ -133,12 +129,7 @@ async function * flush (bucket, blockstore, shardRoot, options) {
133129
}
134130

135131
const label = labelPrefix + child.key
136-
links.push({
137-
Name: label,
138-
Tsize: flushedDir.size,
139-
Hash: flushedDir.cid
140-
})
141-
132+
links.push(createLink(label, flushedDir.size, flushedDir.cid))
142133
childrenSize += flushedDir.size
143134
} else {
144135
const value = child.value
@@ -150,11 +141,7 @@ async function * flush (bucket, blockstore, shardRoot, options) {
150141
const label = labelPrefix + child.key
151142
const size = value.size
152143

153-
links.push({
154-
Name: label,
155-
Tsize: size,
156-
Hash: value.cid
157-
})
144+
links.push(createLink(label, size, value.cid))
158145
childrenSize += size
159146
}
160147
}
@@ -171,11 +158,8 @@ async function * flush (bucket, blockstore, shardRoot, options) {
171158
mode: shardRoot && shardRoot.mode
172159
})
173160

174-
const node = {
175-
Data: dir.marshal(),
176-
Links: links
177-
}
178-
const buffer = encode(prepare(node))
161+
const node = createNode(dir.marshal(), links)
162+
const buffer = encode(node)
179163
const cid = await persist(buffer, blockstore, options)
180164
const size = buffer.length + childrenSize
181165

0 commit comments

Comments
 (0)