@@ -7,10 +7,10 @@ const Readable = require('readable-stream')
7
7
const path = require ( 'path' )
8
8
const fs = require ( 'fs' )
9
9
const isNode = require ( 'detect-node' )
10
- const bl = require ( 'bl ' )
10
+ const concat = require ( 'concat-stream ' )
11
11
12
12
module . exports = ( common ) => {
13
- describe ( '.files' , ( ) => {
13
+ describe . only ( '.files' , ( ) => {
14
14
let smallFile
15
15
let bigFile
16
16
let ipfs
@@ -200,18 +200,6 @@ module.exports = (common) => {
200
200
} )
201
201
} )
202
202
203
- it ( 'with a multihash' , ( done ) => {
204
- const mhBuf = new Buffer ( bs58 . decode ( 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' ) )
205
- ipfs . cat ( mhBuf , ( err , stream ) => {
206
- expect ( err ) . to . not . exist
207
- stream . pipe ( bl ( ( err , data ) => {
208
- expect ( err ) . to . not . exist
209
- expect ( data . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
210
- done ( )
211
- } ) )
212
- } )
213
- } )
214
-
215
203
it ( 'streams a large file' , ( done ) => {
216
204
const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
217
205
ipfs . cat ( hash , ( err , stream ) => {
@@ -222,6 +210,15 @@ module.exports = (common) => {
222
210
done ( )
223
211
} ) )
224
212
} )
213
+
214
+ it ( 'with a multihash' , ( done ) => {
215
+ const mhBuf = new Buffer ( bs58 . decode ( 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' ) )
216
+ ipfs . cat ( mhBuf , ( err , stream ) => {
217
+ expect ( err ) . to . not . exist
218
+ stream . pipe ( concat ( ( data ) => {
219
+ expect ( data . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
220
+ done ( )
221
+ } ) )
225
222
} )
226
223
} )
227
224
} )
@@ -249,8 +246,7 @@ module.exports = (common) => {
249
246
const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
250
247
ipfs . cat ( hash )
251
248
. then ( ( stream ) => {
252
- stream . pipe ( bl ( ( err , data ) => {
253
- expect ( err ) . to . not . exist
249
+ stream . pipe ( concat ( ( data ) => {
254
250
expect ( data . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
255
251
done ( )
256
252
} ) )
@@ -281,8 +277,7 @@ module.exports = (common) => {
281
277
const hash = new Buffer ( bs58 . decode ( 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB' ) )
282
278
ipfs . cat ( hash )
283
279
. then ( ( stream ) => {
284
- stream . pipe ( bl ( ( err , bldata ) => {
285
- expect ( err ) . to . not . exist
280
+ stream . pipe ( concat ( ( bldata ) => {
286
281
expect ( bldata . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
287
282
done ( )
288
283
} ) )
@@ -293,5 +288,91 @@ module.exports = (common) => {
293
288
} )
294
289
} )
295
290
} )
291
+
292
+ describe ( '.get' , ( ) => {
293
+ it ( 'with a base58 encoded multihash' , ( done ) => {
294
+ const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
295
+ ipfs . files . get ( hash , ( err , stream ) => {
296
+ expect ( err ) . to . not . exist
297
+ stream . pipe ( concat ( ( files ) => {
298
+ expect ( err ) . to . not . exist
299
+ expect ( files ) . to . be . length ( 1 )
300
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
301
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
302
+ expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
303
+ done ( )
304
+ } ) )
305
+ } ) )
306
+ } )
307
+ } )
308
+
309
+ it ( 'with a multihash' , ( done ) => {
310
+ const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
311
+ const mhBuf = new Buffer ( bs58 . decode ( hash ) )
312
+ ipfs . files . get ( mhBuf , ( err , stream ) => {
313
+ expect ( err ) . to . not . exist
314
+ stream . pipe ( concat ( ( files ) => {
315
+ expect ( files ) . to . be . length ( 1 )
316
+ expect ( files [ 0 ] . path ) . to . deep . equal ( mhBuf )
317
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
318
+ expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
319
+ done ( )
320
+ } ) )
321
+ } ) )
322
+ } )
323
+ } )
324
+
325
+ it ( 'large file' , ( done ) => {
326
+ const hash = 'Qme79tX2bViL26vNjPsF3DP1R9rMKMvnPYJiKTTKPrXJjq'
327
+ ipfs . files . get ( hash , ( err , stream ) => {
328
+ expect ( err ) . to . not . exist
329
+ stream . pipe ( concat ( ( files ) => {
330
+ expect ( files ) . to . be . length ( 1 )
331
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
332
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
333
+ expect ( content ) . to . deep . equal ( bigFile )
334
+ done ( )
335
+ } ) )
336
+ } ) )
337
+ } )
338
+ } )
339
+
340
+ describe ( 'promise' , ( ) => {
341
+ it ( 'with a base58 encoded string' , ( done ) => {
342
+ const hash = 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB'
343
+ ipfs . files . get ( hash )
344
+ . then ( ( stream ) => {
345
+ stream . pipe ( concat ( ( files ) => {
346
+ expect ( files ) . to . be . length ( 1 )
347
+ expect ( files [ 0 ] . path ) . to . equal ( hash )
348
+ files [ 0 ] . content . pipe ( concat ( ( content ) => {
349
+ expect ( content . toString ( ) ) . to . contain ( 'Check out some of the other files in this directory:' )
350
+ done ( )
351
+ } ) )
352
+ } ) )
353
+ } )
354
+ . catch ( ( err ) => {
355
+ expect ( err ) . to . not . exist
356
+ } )
357
+ } )
358
+
359
+ it ( 'errors on invalid key' , ( done ) => {
360
+ const hash = 'somethingNotMultihash'
361
+ ipfs . files . get ( hash )
362
+ . then ( ( stream ) => { } )
363
+ . catch ( ( err ) => {
364
+ expect ( err ) . to . exist
365
+ const errString = err . toString ( )
366
+ if ( errString === 'Error: invalid ipfs ref path' ) {
367
+ expect ( err . toString ( ) ) . to . contain ( 'Error: invalid ipfs ref path' )
368
+ }
369
+ if ( errString === 'Error: Invalid Key' ) {
370
+ expect ( err . toString ( ) ) . to . contain ( 'Error: Invalid Key' )
371
+ }
372
+ done ( )
373
+ } )
374
+ } )
375
+ } )
376
+ } )
296
377
} )
297
378
}
0 commit comments