Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.

Commit 77b5c32

Browse files
committed
feat: Replacing chunks of files that do not increase the size of the file
1 parent 4d01ddb commit 77b5c32

File tree

11 files changed

+545
-137
lines changed

11 files changed

+545
-137
lines changed

package.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,9 @@
5252
"ipfs-unixfs": "^0.1.14",
5353
"ipfs-unixfs-engine": "^0.28.1",
5454
"promisify-es6": "^1.0.3",
55-
"pull-stream": "^3.6.7"
55+
"pull-paramap": "^1.2.2",
56+
"pull-stream": "^3.6.7",
57+
"pull-traverse": "^1.0.3"
5658
},
5759
"pre-commit": [
5860
"lint",

src/core/read.js

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ const {
1414
validatePath,
1515
traverseTo
1616
} = require('./utils')
17+
const log = require('debug')('mfs:read')
1718

1819
const defaultOptions = {
1920
offset: 0,
@@ -35,6 +36,8 @@ module.exports = function mfsRead (ipfs) {
3536
return callback(error)
3637
}
3738

39+
log(`Reading ${path}`)
40+
3841
waterfall([
3942
(done) => traverseTo(ipfs, path, {
4043
parents: false

src/core/utils.js

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -116,19 +116,19 @@ const addLink = (ipfs, options, callback) => {
116116

117117
waterfall([
118118
(done) => {
119-
// remove the old link if necessary
119+
// Remove the old link if necessary
120120
DAGNode.rmLink(options.parent, options.name, done)
121121
},
122122
(parent, done) => {
123-
// add the new link
123+
// Add the new link to the parent
124124
DAGNode.addLink(parent, new DAGLink(options.name, options.child.size, options.child.hash || options.child.multihash), done)
125125
},
126126
(parent, done) => {
127127
if (!options.flush) {
128128
return done()
129129
}
130130

131-
// add the new parent DAGNode
131+
// Persist the new parent DAGNode
132132
ipfs.dag.put(parent, {
133133
cid: new CID(parent.hash || parent.multihash)
134134
}, (error) => done(error, parent))
@@ -167,14 +167,13 @@ const traverseTo = (ipfs, path, options, callback) => {
167167
node: rootNode,
168168
parent: null
169169
}, (parent, {pathSegment, index}, done) => {
170-
const lastPathSegment = index === pathSegments.length - 1
171170
const existingLink = parent.node.links.find(link => link.name === pathSegment)
172171

173172
log(`Looking for ${pathSegment} in ${parent.name}`)
174173

175174
if (!existingLink) {
176-
if (!lastPathSegment && !options.parents) {
177-
return done(new Error(`Cannot create ${path} - intermediate directory '${pathSegment}' did not exist: Try again with the --parents flag`))
175+
if (!options.parents) {
176+
return done(new Error(`Cannot traverse to ${path} - '${pathSegment}' did not exist: Try again with the --parents flag`))
178177
}
179178

180179
log(`Adding empty directory '${pathSegment}' to parent ${parent.name}`)

src/core/write.js

Lines changed: 0 additions & 125 deletions
This file was deleted.

src/core/write/find-file-size.js

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
'use strict'
2+
3+
const unmarshal = require('ipfs-unixfs').unmarshal
4+
5+
const findFileSize = (dagNode) => {
6+
const meta = unmarshal(dagNode.data)
7+
8+
if (meta.blockSizes.length) {
9+
return meta.blockSizes.reduce((acc, curr) => acc + curr, 0)
10+
}
11+
12+
return meta.data.length
13+
}
14+
15+
module.exports = findFileSize

src/core/write/import-node.js

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
'use strict'
2+
3+
const {
4+
waterfall
5+
} = require('async')
6+
const pull = require('pull-stream')
7+
const {
8+
values,
9+
collect
10+
} = pull
11+
const importer = require('ipfs-unixfs-engine').importer
12+
13+
const importNode = (ipfs, parent, fileName, buffer, options, callback) => {
14+
waterfall([
15+
(done) => pull(
16+
values([{
17+
content: buffer
18+
}]),
19+
importer(ipfs._ipld, {
20+
progress: options.progress,
21+
hashAlg: options.hash,
22+
cidVersion: options.cidVersion,
23+
strategy: options.strategy
24+
}),
25+
collect(done)
26+
),
27+
(results, done) => {
28+
const imported = results[0]
29+
30+
return done(null, {
31+
size: imported.size,
32+
multihash: imported.multihash
33+
})
34+
}
35+
], callback)
36+
}
37+
38+
module.exports = importNode

src/core/write/index.js

Lines changed: 129 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
'use strict'
2+
3+
const promisify = require('promisify-es6')
4+
const CID = require('cids')
5+
const {
6+
waterfall
7+
} = require('async')
8+
const {
9+
updateMfsRoot,
10+
validatePath,
11+
traverseTo,
12+
addLink,
13+
updateTree,
14+
FILE_SEPARATOR
15+
} = require('../utils')
16+
const log = require('debug')('mfs:write')
17+
const bs58 = require('bs58')
18+
const importNode = require('./import-node')
19+
const updateNode = require('./update-node')
20+
21+
const defaultOptions = {
22+
offset: 0, // the offset in the file to begin writing
23+
length: undefined, // how many bytes from the incoming buffer to write
24+
create: false, // whether to create the file if it does not exist
25+
truncate: false, // whether to truncate the file first
26+
rawLeaves: false,
27+
cidVersion: undefined,
28+
hash: undefined,
29+
parents: false,
30+
progress: undefined,
31+
strategy: 'balanced',
32+
flush: true
33+
}
34+
35+
module.exports = function mfsWrite (ipfs) {
36+
return promisify((path, buffer, options, callback) => {
37+
if (typeof options === 'function') {
38+
callback = options
39+
options = {}
40+
}
41+
42+
options = Object.assign({}, defaultOptions, options)
43+
44+
try {
45+
path = validatePath(path)
46+
} catch (error) {
47+
return callback(error)
48+
}
49+
50+
if (options.offset < 0) {
51+
return callback(new Error('cannot have negative write offset'))
52+
}
53+
54+
if (options.length < 0) {
55+
return callback(new Error('cannot have negative byte count'))
56+
}
57+
58+
if (options.length === 0) {
59+
return callback()
60+
}
61+
62+
if (options.length) {
63+
buffer = buffer.slice(0, options.length)
64+
}
65+
66+
const parts = path
67+
.split(FILE_SEPARATOR)
68+
.filter(Boolean)
69+
const fileName = parts.pop()
70+
71+
waterfall([
72+
// walk the mfs tree to the containing folder node
73+
(done) => traverseTo(ipfs, `${FILE_SEPARATOR}${parts.join(FILE_SEPARATOR)}`, options, done),
74+
(containingFolder, done) => {
75+
waterfall([
76+
(next) => {
77+
const existingChild = containingFolder.node.links.reduce((last, child) => {
78+
if (child.name === fileName) {
79+
return child
80+
}
81+
82+
return last
83+
}, null)
84+
85+
if (existingChild) {
86+
log('Updating linked DAGNode', bs58.encode(existingChild.multihash))
87+
// overwrite the existing file or part of it, possibly truncating what's left
88+
updateNode(ipfs, new CID(existingChild.multihash), buffer, options, next)
89+
} else {
90+
if (!options.create) {
91+
return next(new Error('file does not exist'))
92+
}
93+
94+
if (options.offset) {
95+
// pad the start of the file with zeros
96+
buffer = Buffer.concat([Buffer.alloc(options.offset, 0), buffer])
97+
}
98+
99+
log('Importing file', fileName, buffer.length, 'bytes')
100+
importNode(ipfs, containingFolder, fileName, buffer, options, next)
101+
}
102+
},
103+
104+
// Add or replace the DAGLink in the containing directory
105+
(child, next) => addLink(ipfs, {
106+
parent: containingFolder.node,
107+
name: fileName,
108+
child: {
109+
multihash: child.multihash || child.hash,
110+
size: child.size
111+
},
112+
flush: options.flush
113+
}, (error, newContaingFolder) => {
114+
// Store new containing folder CID
115+
containingFolder.node = newContaingFolder
116+
117+
next(error)
118+
}),
119+
120+
// Update the MFS tree from the containingFolder upwards
121+
(next) => updateTree(ipfs, containingFolder, next),
122+
123+
// Update the MFS record with the new CID for the root of the tree
124+
(newRoot, next) => updateMfsRoot(ipfs, newRoot.node.multihash, next)
125+
], done)
126+
}
127+
], callback)
128+
})
129+
}

0 commit comments

Comments
 (0)