Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.

Commit 2e94ac9

Browse files
committed
Add ipfs.files.get tests.
1 parent a86753a commit 2e94ac9

File tree

2 files changed

+174
-25
lines changed

2 files changed

+174
-25
lines changed

package.json

+1
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
"bl": "^1.1.2",
3232
"bs58": "^3.0.0",
3333
"chai": "^3.5.0",
34+
"concat-stream": "^1.5.1",
3435
"detect-node": "^2.0.3",
3536
"ipfs-merkle-dag": "^0.6.0",
3637
"readable-stream": "1.1.13"

src/files.js

+173-25
Original file line numberDiff line numberDiff line change
@@ -6,20 +6,28 @@ const bs58 = require('bs58')
66
const Readable = require('readable-stream')
77
const path = require('path')
88
const fs = require('fs')
9-
const isNode = require('detect-node')
109
const bl = require('bl')
10+
const concat = require('concat-stream')
11+
const through = require('through2')
1112

1213
module.exports = (common) => {
13-
describe('.files', () => {
14+
describe.only('.files', () => {
1415
let smallFile
1516
let bigFile
17+
let directoryContent
1618
let ipfs
1719

1820
before((done) => {
19-
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt')
20-
)
21-
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random')
22-
)
21+
smallFile = fs.readFileSync(path.join(__dirname, './data/testfile.txt'))
22+
bigFile = fs.readFileSync(path.join(__dirname, './data/15mb.random'))
23+
directoryContent = {
24+
'pp.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/pp.txt')),
25+
'holmes.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/holmes.txt')),
26+
'jungle.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/jungle.txt')),
27+
'alice.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/alice.txt')),
28+
'files/hello.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/hello.txt')),
29+
'files/ipfs.txt': fs.readFileSync(path.join(__dirname, './data/test-folder/files/ipfs.txt'))
30+
}
2331

2432
common.setup((err, _ipfs) => {
2533
expect(err).to.not.exist
@@ -100,15 +108,9 @@ module.exports = (common) => {
100108
})
101109

102110
it('add a nested dir as array', (done) => {
103-
if (!isNode) {
104-
return done()
105-
// can't run this test cause browserify
106-
// can't shim readFileSync in runtime
107-
}
108-
const base = path.join(__dirname, 'data/test-folder')
109111
const content = (name) => ({
110112
path: `test-folder/${name}`,
111-
content: fs.readFileSync(path.join(base, name))
113+
content: directoryContent[name]
112114
})
113115
const emptyDir = (name) => ({
114116
path: `test-folder/${name}`
@@ -138,21 +140,13 @@ module.exports = (common) => {
138140

139141
describe('.createAddStream', () => {
140142
it('stream of valid files and dirs', (done) => {
141-
if (!isNode) {
142-
return done()
143-
// can't run this test cause browserify
144-
// can't shim readFileSync in runtime
145-
}
146-
147-
const base = path.join(__dirname, 'data/test-folder')
148143
const content = (name) => ({
149144
path: `test-folder/${name}`,
150-
content: fs.readFileSync(path.join(base, name))
145+
content: directoryContent[name]
151146
})
152147
const emptyDir = (name) => ({
153148
path: `test-folder/${name}`
154149
})
155-
156150
const files = [
157151
content('pp.txt'),
158152
content('holmes.txt'),
@@ -241,7 +235,7 @@ module.exports = (common) => {
241235
})
242236

243237
describe('.cat', () => {
244-
it('with a bas58 multihash encoded string', () => {
238+
it('with a base58 multihash encoded string', () => {
245239
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
246240

247241
return ipfs.cat(hash)
@@ -273,11 +267,165 @@ module.exports = (common) => {
273267
const hash = new Buffer(bs58.decode('QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'))
274268
return ipfs.cat(hash)
275269
.then((stream) => {
276-
stream.pipe(bl((err, bldata) => {
270+
stream.pipe(bl((err, data) => {
277271
expect(err).to.not.exist
278-
expect(bldata.toString()).to.contain('Check out some of the other files in this directory:')
272+
expect(data.toString()).to.contain('Check out some of the other files in this directory:')
273+
}))
274+
})
275+
})
276+
})
277+
})
278+
279+
describe('.get', () => {
280+
it('with a base58 encoded multihash', (done) => {
281+
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
282+
ipfs.files.get(hash, (err, stream) => {
283+
expect(err).to.not.exist
284+
stream.pipe(concat((files) => {
285+
expect(err).to.not.exist
286+
expect(files).to.be.length(1)
287+
expect(files[0].path).to.equal(hash)
288+
files[0].content.pipe(concat((content) => {
289+
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
290+
done()
291+
}))
292+
}))
293+
})
294+
})
295+
296+
it('with a multihash', (done) => {
297+
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
298+
const mhBuf = new Buffer(bs58.decode(hash))
299+
ipfs.files.get(mhBuf, (err, stream) => {
300+
expect(err).to.not.exist
301+
stream.pipe(concat((files) => {
302+
expect(files).to.be.length(1)
303+
expect(files[0].path).to.deep.equal(hash)
304+
files[0].content.pipe(concat((content) => {
305+
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
306+
done()
307+
}))
308+
}))
309+
})
310+
})
311+
312+
it('large file', (done) => {
313+
const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
314+
ipfs.files.get(hash, (err, stream) => {
315+
expect(err).to.not.exist
316+
317+
// accumulate the files and their content
318+
var files = []
319+
stream.pipe(through.obj((file, enc, next) => {
320+
file.content.pipe(concat((content) => {
321+
files.push({
322+
path: file.path,
323+
content: content
324+
})
325+
next()
326+
}))
327+
}, () => {
328+
expect(files.length).to.equal(1)
329+
expect(files[0].path).to.equal(hash)
330+
expect(files[0].content).to.deep.equal(bigFile)
331+
done()
332+
}))
333+
})
334+
})
335+
336+
it('directory', (done) => {
337+
const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
338+
ipfs.files.get(hash, (err, stream) => {
339+
expect(err).to.not.exist
340+
341+
// accumulate the files and their content
342+
var files = []
343+
stream.pipe(through.obj((file, enc, next) => {
344+
if (file.content) {
345+
file.content.pipe(concat((content) => {
346+
files.push({
347+
path: file.path,
348+
content: content
349+
})
350+
next()
351+
}))
352+
} else {
353+
files.push(file)
354+
next()
355+
}
356+
}, () => {
357+
// Check paths
358+
var paths = files.map((file) => {
359+
return file.path
360+
})
361+
expect(paths).to.deep.equal([
362+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP',
363+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt',
364+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder',
365+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files',
366+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty',
367+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt',
368+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt',
369+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt',
370+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt',
371+
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt'
372+
])
373+
374+
// Check contents
375+
var contents = files.map((file) => {
376+
return file.content ? file.content : null
377+
})
378+
expect(contents).to.deep.equal([
379+
null,
380+
directoryContent['alice.txt'],
381+
null,
382+
null,
383+
null,
384+
directoryContent['files/hello.txt'],
385+
directoryContent['files/ipfs.txt'],
386+
directoryContent['holmes.txt'],
387+
directoryContent['jungle.txt'],
388+
directoryContent['pp.txt']
389+
])
390+
done()
391+
}))
392+
})
393+
})
394+
395+
describe('promise', () => {
396+
it('with a base58 encoded string', (done) => {
397+
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
398+
ipfs.files.get(hash)
399+
.then((stream) => {
400+
stream.pipe(concat((files) => {
401+
expect(files).to.be.length(1)
402+
expect(files[0].path).to.equal(hash)
403+
files[0].content.pipe(concat((content) => {
404+
expect(content.toString()).to.contain('Check out some of the other files in this directory:')
405+
done()
406+
}))
279407
}))
280408
})
409+
.catch((err) => {
410+
expect(err).to.not.exist
411+
})
412+
})
413+
414+
it('errors on invalid key', (done) => {
415+
const hash = 'somethingNotMultihash'
416+
ipfs.files.get(hash)
417+
.then((stream) => {})
418+
.catch((err) => {
419+
expect(err).to.exist
420+
const errString = err.toString()
421+
if (errString === 'Error: invalid ipfs ref path') {
422+
expect(err.toString()).to.contain('Error: invalid ipfs ref path')
423+
}
424+
if (errString === 'Error: Invalid Key') {
425+
expect(err.toString()).to.contain('Error: Invalid Key')
426+
}
427+
done()
428+
})
281429
})
282430
})
283431
})

0 commit comments

Comments
 (0)