@@ -8,9 +8,10 @@ const path = require('path')
8
8
const fs = require ( 'fs' )
9
9
const isNode = require ( 'detect-node' )
10
10
const concat = require ( 'concat-stream' )
11
+ const through = require ( 'through2' )
11
12
12
13
module . exports = ( common ) => {
13
- describe . only ( '.files' , ( ) => {
14
+ describe ( '.files' , ( ) => {
14
15
let smallFile
15
16
let bigFile
16
17
let ipfs
@@ -310,7 +311,7 @@ module.exports = (common) => {
310
311
expect ( err ) . to . not . exist
311
312
stream . pipe ( concat ( ( files ) => {
312
313
expect ( files ) . to . be . length ( 1 )
313
- expect ( files [ 0 ] . path ) . to . deep . equal ( mhBuf )
314
+ expect ( files [ 0 ] . path ) . to . deep . equal ( hash )
314
315
files [ 0 ] . content . pipe ( concat ( ( content ) => {
315
316
expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
316
317
done ( )
@@ -323,13 +324,22 @@ module.exports = (common) => {
323
324
const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
324
325
ipfs . files . get ( hash , ( err , stream ) => {
325
326
expect ( err ) . to . not . exist
326
- stream . pipe ( concat ( ( files ) => {
327
- expect ( files ) . to . be . length ( 1 )
328
- expect ( files [ 0 ] . path ) . to . equal ( hash )
329
- files [ 0 ] . content . pipe ( concat ( ( content ) => {
330
- expect ( content ) . to . deep . equal ( bigFile )
331
- done ( )
327
+
328
+ // accumulate the files and their content
329
+ var files = [ ]
330
+ stream . pipe ( through . obj ( ( file , enc , next ) => {
331
+ file . content . pipe ( concat ( ( content ) => {
332
+ files . push ( {
333
+ path : file . path ,
334
+ content : content
335
+ } )
336
+ next ( )
332
337
} ) )
338
+ } , ( ) => {
339
+ expect ( files . length ) . to . equal ( 1 )
340
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
341
+ expect ( files [ 0 ] . content ) . to . deep . equal ( bigFile )
342
+ done ( )
333
343
} ) )
334
344
} )
335
345
} )
@@ -338,21 +348,35 @@ module.exports = (common) => {
338
348
const hash = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
339
349
ipfs . files . get ( hash , ( err , stream ) => {
340
350
expect ( err ) . to . not . exist
341
- stream . pipe ( concat ( ( files ) => {
342
- expect ( files ) . to . be . length ( 8 )
351
+
352
+ // accumulate the files and their content
353
+ var files = [ ]
354
+ stream . pipe ( through . obj ( ( file , enc , next ) => {
355
+ file . content . pipe ( concat ( ( content ) => {
356
+ files . push ( {
357
+ path : file . path ,
358
+ content : content
359
+ } )
360
+ next ( )
361
+ } ) )
362
+ } , ( ) => {
363
+ expect ( files ) . to . be . length ( 10 )
343
364
var paths = files . map ( ( file ) => {
344
365
return file . path
345
366
} )
346
367
expect ( paths ) . to . deep . equal ( [
368
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' ,
347
369
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt' ,
348
370
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder' ,
371
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files' ,
372
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty' ,
373
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt' ,
374
+ 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt' ,
349
375
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt' ,
350
376
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt' ,
351
377
'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' ,
352
- 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty' ,
353
- 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt' ,
354
- 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt'
355
378
] )
379
+ done ( )
356
380
} ) )
357
381
} )
358
382
} )
0 commit comments