@@ -6,20 +6,28 @@ const bs58 = require('bs58')
6
6
const Readable = require ( 'readable-stream' )
7
7
const path = require ( 'path' )
8
8
const fs = require ( 'fs' )
9
- const isNode = require ( 'detect-node' )
10
9
const bl = require ( 'bl' )
10
+ const concat = require ( 'concat-stream' )
11
+ const through = require ( 'through2' )
11
12
12
13
module . exports = ( common ) => {
13
- describe ( '.files' , ( ) => {
14
+ describe . only ( '.files' , ( ) => {
14
15
let smallFile
15
16
let bigFile
17
+ let directoryContent
16
18
let ipfs
17
19
18
20
before ( ( done ) => {
19
- smallFile = fs . readFileSync ( path . join ( __dirname , './data/testfile.txt' )
20
- )
21
- bigFile = fs . readFileSync ( path . join ( __dirname , './data/15mb.random' )
22
- )
21
+ smallFile = fs . readFileSync ( path . join ( __dirname , './data/testfile.txt' ) )
22
+ bigFile = fs . readFileSync ( path . join ( __dirname , './data/15mb.random' ) )
23
+ directoryContent = {
24
+ 'pp.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/pp.txt' ) ) ,
25
+ 'holmes.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/holmes.txt' ) ) ,
26
+ 'jungle.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/jungle.txt' ) ) ,
27
+ 'alice.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/alice.txt' ) ) ,
28
+ 'files/hello.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/files/hello.txt' ) ) ,
29
+ 'files/ipfs.txt' : fs . readFileSync ( path . join ( __dirname , './data/test-folder/files/ipfs.txt' ) )
30
+ }
23
31
24
32
common . setup ( ( err , _ipfs ) => {
25
33
expect ( err ) . to . not . exist
@@ -100,15 +108,9 @@ module.exports = (common) => {
100
108
} )
101
109
102
110
it ( 'add a nested dir as array' , ( done ) => {
103
- if ( ! isNode ) {
104
- return done ( )
105
- // can't run this test cause browserify
106
- // can't shim readFileSync in runtime
107
- }
108
- const base = path . join ( __dirname , 'data/test-folder' )
109
111
const content = ( name ) => ( {
110
112
path : `test-folder/${ name } ` ,
111
- content : fs . readFileSync ( path . join ( base , name ) )
113
+ content : directoryContent [ name ]
112
114
} )
113
115
const emptyDir = ( name ) => ( {
114
116
path : `test-folder/${ name } `
@@ -138,21 +140,13 @@ module.exports = (common) => {
138
140
139
141
describe ( '.createAddStream' , ( ) => {
140
142
it ( 'stream of valid files and dirs' , ( done ) => {
141
- if ( ! isNode ) {
142
- return done ( )
143
- // can't run this test cause browserify
144
- // can't shim readFileSync in runtime
145
- }
146
-
147
- const base = path . join ( __dirname , 'data/test-folder' )
148
143
const content = ( name ) => ( {
149
144
path : `test-folder/${ name } ` ,
150
- content : fs . readFileSync ( path . join ( base , name ) )
145
+ content : directoryContent [ name ]
151
146
} )
152
147
const emptyDir = ( name ) => ( {
153
148
path : `test-folder/${ name } `
154
149
} )
155
-
156
150
const files = [
157
151
content ( 'pp.txt' ) ,
158
152
content ( 'holmes.txt' ) ,
@@ -241,7 +235,7 @@ module.exports = (common) => {
241
235
} )
242
236
243
237
describe ( '.cat' , ( ) => {
244
- it ( 'with a bas58 multihash encoded string' , ( ) => {
238
+ it ( 'with a base58 multihash encoded string' , ( ) => {
245
239
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
246
240
247
241
return ipfs . cat ( hash )
@@ -273,11 +267,165 @@ module.exports = (common) => {
273
267
const hash = new Buffer ( bs58 . decode ( 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' ) )
274
268
return ipfs . cat ( hash )
275
269
. then ( ( stream ) => {
276
- stream . pipe ( bl ( ( err , bldata ) => {
270
+ stream . pipe ( bl ( ( err , data ) => {
277
271
expect ( err ) . to . not . exist
278
- expect ( bldata . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
272
+ expect ( data . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
273
+ } ) )
274
+ } )
275
+ } )
276
+ } )
277
+ } )
278
+
279
+ describe ( '.get' , ( ) => {
280
+ it ( 'with a base58 encoded multihash' , ( done ) => {
281
+ const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
282
+ ipfs . files . get ( hash , ( err , stream ) => {
283
+ expect ( err ) . to . not . exist
284
+ stream . pipe ( concat ( ( files ) => {
285
+ expect ( err ) . to . not . exist
286
+ expect ( files ) . to . be . length ( 1 )
287
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
288
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
289
+ expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
290
+ done ( )
291
+ } ) )
292
+ } ) )
293
+ } )
294
+ } )
295
+
296
+ it ( 'with a multihash' , ( done ) => {
297
+ const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
298
+ const mhBuf = new Buffer ( bs58 . decode ( hash ) )
299
+ ipfs . files . get ( mhBuf , ( err , stream ) => {
300
+ expect ( err ) . to . not . exist
301
+ stream . pipe ( concat ( ( files ) => {
302
+ expect ( files ) . to . be . length ( 1 )
303
+ expect ( files [ 0 ] . path ) . to . deep . equal ( hash )
304
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
305
+ expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
306
+ done ( )
307
+ } ) )
308
+ } ) )
309
+ } )
310
+ } )
311
+
312
+ it ( 'large file' , ( done ) => {
313
+ const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
314
+ ipfs . files . get ( hash , ( err , stream ) => {
315
+ expect ( err ) . to . not . exist
316
+
317
+ // accumulate the files and their content
318
+ var files = [ ]
319
+ stream . pipe ( through . obj ( ( file , enc , next ) => {
320
+ file . content . pipe ( concat ( ( content ) => {
321
+ files . push ( {
322
+ path : file . path ,
323
+ content : content
324
+ } )
325
+ next ( )
326
+ } ) )
327
+ } , ( ) => {
328
+ expect ( files . length ) . to . equal ( 1 )
329
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
330
+ expect ( files [ 0 ] . content ) . to . deep . equal ( bigFile )
331
+ done ( )
332
+ } ) )
333
+ } )
334
+ } )
335
+
336
+ it ( 'directory' , ( done ) => {
337
+ const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
338
+ ipfs . files . get ( hash , ( err , stream ) => {
339
+ expect ( err ) . to . not . exist
340
+
341
+ // accumulate the files and their content
342
+ var files = [ ]
343
+ stream . pipe ( through . obj ( ( file , enc , next ) => {
344
+ if ( file . content ) {
345
+ file . content . pipe ( concat ( ( content ) => {
346
+ files . push ( {
347
+ path : file . path ,
348
+ content : content
349
+ } )
350
+ next ( )
351
+ } ) )
352
+ } else {
353
+ files . push ( file )
354
+ next ( )
355
+ }
356
+ } , ( ) => {
357
+ // Check paths
358
+ var paths = files . map ( ( file ) => {
359
+ return file . path
360
+ } )
361
+ expect ( paths ) . to . deep . equal ( [
362
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' ,
363
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt' ,
364
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder' ,
365
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files' ,
366
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty' ,
367
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt' ,
368
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt' ,
369
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt' ,
370
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt' ,
371
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt'
372
+ ] )
373
+
374
+ // Check contents
375
+ var contents = files . map ( ( file ) => {
376
+ return file . content ? file . content : null
377
+ } )
378
+ expect ( contents ) . to . deep . equal ( [
379
+ null ,
380
+ directoryContent [ 'alice.txt' ] ,
381
+ null ,
382
+ null ,
383
+ null ,
384
+ directoryContent [ 'files/hello.txt' ] ,
385
+ directoryContent [ 'files/ipfs.txt' ] ,
386
+ directoryContent [ 'holmes.txt' ] ,
387
+ directoryContent [ 'jungle.txt' ] ,
388
+ directoryContent [ 'pp.txt' ]
389
+ ] )
390
+ done ( )
391
+ } ) )
392
+ } )
393
+ } )
394
+
395
+ describe ( 'promise' , ( ) => {
396
+ it ( 'with a base58 encoded string' , ( done ) => {
397
+ const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
398
+ ipfs . files . get ( hash )
399
+ . then ( ( stream ) => {
400
+ stream . pipe ( concat ( ( files ) => {
401
+ expect ( files ) . to . be . length ( 1 )
402
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
403
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
404
+ expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
405
+ done ( )
406
+ } ) )
279
407
} ) )
280
408
} )
409
+ . catch ( ( err ) => {
410
+ expect ( err ) . to . not . exist
411
+ } )
412
+ } )
413
+
414
+ it ( 'errors on invalid key' , ( done ) => {
415
+ const hash = 'somethingNotMultihash'
416
+ ipfs . files . get ( hash )
417
+ . then ( ( stream ) => { } )
418
+ . catch ( ( err ) => {
419
+ expect ( err ) . to . exist
420
+ const errString = err . toString ( )
421
+ if ( errString === 'Error: invalid ipfs ref path' ) {
422
+ expect ( err . toString ( ) ) . to . contain ( 'Error: invalid ipfs ref path' )
423
+ }
424
+ if ( errString === 'Error: Invalid Key' ) {
425
+ expect ( err . toString ( ) ) . to . contain ( 'Error: Invalid Key' )
426
+ }
427
+ done ( )
428
+ } )
281
429
} )
282
430
} )
283
431
} )
0 commit comments