Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.

Commit 8b955fe

Browse files
author
Alan Shaw
committed
refactor: local tests
1 parent 4c50f94 commit 8b955fe

14 files changed

+113
-188
lines changed

README.md

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -384,8 +384,6 @@ Aside from the default export, `ipfs-http-client` exports various types and util
384384

385385
- [`isIPFS`](https://www.npmjs.com/package/is-ipfs)
386386
- [`Buffer`](https://www.npmjs.com/package/buffer)
387-
- [`PeerId`](https://www.npmjs.com/package/peer-id)
388-
- [`PeerInfo`](https://www.npmjs.com/package/peer-info)
389387
- [`multiaddr`](https://www.npmjs.com/package/multiaddr)
390388
- [`multibase`](https://www.npmjs.com/package/multibase)
391389
- [`multicodec`](https://www.npmjs.com/package/multicodec)

package.json

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@
4848
"cids": "~0.7.1",
4949
"debug": "^4.1.0",
5050
"err-code": "^2.0.0",
51-
"explain-error": "^1.0.4",
5251
"form-data": "^3.0.0",
5352
"ipfs-block": "~0.8.1",
5453
"ipfs-utils": "github:ipfs/js-ipfs-utils#refactor/async-iterators",
@@ -78,11 +77,13 @@
7877
"go-ipfs-dep": "^0.4.22",
7978
"interface-ipfs-core": "^0.123.0",
8079
"ipfsd-ctl": "^0.47.1",
81-
"ndjson": "^1.5.0",
80+
"it-all": "^1.0.1",
81+
"it-concat": "^1.0.0",
82+
"it-pipe": "^1.1.0",
8283
"nock": "^11.4.0",
84+
"pipe": "^1.1.0",
8385
"promisify-es6": "^1.0.3",
8486
"pull-stream": "^3.6.14",
85-
"pump": "^3.0.0",
8687
"stream-equal": "^1.1.1"
8788
},
8889
"engines": {

src/pin/ls.js

Lines changed: 17 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
11
'use strict'
22

3+
const ndjson = require('iterable-ndjson')
34
const configure = require('../lib/configure')
5+
const toIterable = require('../lib/stream-to-iterable')
6+
const toCamel = require('../lib/object-to-camel')
47

58
module.exports = configure(({ ky }) => {
6-
return async (path, options) => {
9+
return async function * ls (path, options) {
710
if (path && path.type) {
811
options = path
912
path = null
@@ -14,16 +17,26 @@ module.exports = configure(({ ky }) => {
1417
options = options || {}
1518

1619
const searchParams = new URLSearchParams(options.searchParams)
20+
searchParams.set('stream', true)
1721
path.forEach(p => searchParams.append('arg', `${p}`))
1822
if (options.type) searchParams.set('type', options.type)
1923

20-
const { Keys } = await ky.post('pin/ls', {
24+
const res = await ky.post('pin/ls', {
2125
timeout: options.timeout,
2226
signal: options.signal,
2327
headers: options.headers,
2428
searchParams
25-
}).json()
29+
})
2630

27-
return Object.keys(Keys).map(hash => ({ hash, type: Keys[hash].Type }))
31+
for await (const pin of ndjson(toIterable(res.body))) {
32+
// For nodes that do not understand the `stream option`
33+
if (pin.Keys) {
34+
for (const hash of Object.keys(pin.Keys)) {
35+
yield { hash, type: pin.Keys[hash].Type }
36+
}
37+
return
38+
}
39+
yield toCamel(pin)
40+
}
2841
}
2942
})

src/pubsub/subscribe.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
'use strict'
22

33
const ndjson = require('iterable-ndjson')
4-
const explain = require('explain-error')
54
const bs58 = require('bs58')
65
const { Buffer } = require('buffer')
76
const log = require('debug')('ipfs-http-client:pubsub:subscribe')
@@ -71,7 +70,8 @@ async function readMessages (msgStream, { onMessage, onEnd, onError }) {
7170
topicIDs: msg.topicIDs
7271
})
7372
} catch (err) {
74-
onError(explain(err, 'Failed to parse pubsub message'), false, msg) // Not fatal
73+
err.message = `Failed to parse pubsub message: ${err.message}`
74+
onError(err, false, msg) // Not fatal
7575
}
7676
}
7777
} catch (err) {

test/exports.spec.js

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,6 @@ const multiaddr = require('multiaddr')
77
const multibase = require('multibase')
88
const multicodec = require('multicodec')
99
const multihash = require('multihashes')
10-
const PeerId = require('peer-id')
11-
const PeerInfo = require('peer-info')
1210
const { expect } = require('interface-ipfs-core/src/utils/mocha')
1311

1412
const IpfsHttpClient = require('../')
@@ -22,7 +20,5 @@ describe('exports', () => {
2220
expect(IpfsHttpClient.multibase).to.equal(multibase)
2321
expect(IpfsHttpClient.multicodec).to.equal(multicodec)
2422
expect(IpfsHttpClient.multihash).to.equal(multihash)
25-
expect(IpfsHttpClient.PeerId).to.equal(PeerId)
26-
expect(IpfsHttpClient.PeerInfo).to.equal(PeerInfo)
2723
})
2824
})

test/files-mfs.spec.js

Lines changed: 36 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,12 @@ const { expect } = require('interface-ipfs-core/src/utils/mocha')
66
const loadFixture = require('aegir/fixtures')
77
const mh = require('multihashes')
88
const CID = require('cids')
9-
const values = require('pull-stream/sources/values')
10-
const pull = require('pull-stream/pull')
11-
const collect = require('pull-stream/sinks/collect')
9+
const all = require('it-all')
10+
const pipe = require('it-pipe')
11+
const { TimeoutError } = require('ky-universal')
1212

1313
const ipfsClient = require('../src')
1414
const f = require('./utils/factory')
15-
const expectTimeout = require('./utils/expect-timeout')
1615

1716
const testfile = loadFixture('test/fixtures/testfile.txt')
1817

@@ -53,7 +52,7 @@ describe('.files (the MFS API part)', function () {
5352
})
5453

5554
it('.add file for testing', async () => {
56-
const res = await ipfs.add(testfile)
55+
const res = await all(ipfs.add(testfile))
5756

5857
expect(res).to.have.length(1)
5958
expect(res[0].hash).to.equal(expectedMultihash)
@@ -66,7 +65,7 @@ describe('.files (the MFS API part)', function () {
6665
const expectedBufferMultihash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX'
6766
const file = Buffer.from('hello')
6867

69-
const res = await ipfs.add(file)
68+
const res = await all(ipfs.add(file))
7069

7170
expect(res).to.have.length(1)
7271
expect(res[0].hash).to.equal(expectedBufferMultihash)
@@ -77,7 +76,7 @@ describe('.files (the MFS API part)', function () {
7776
const expectedHash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX'
7877
const content = Buffer.from('hello')
7978

80-
const res = await ipfs.add([{ path: '', content }])
79+
const res = await all(ipfs.add([{ path: '', content }]))
8180

8281
expect(res).to.have.length(1)
8382
expect(res[0].hash).to.equal(expectedHash)
@@ -88,7 +87,7 @@ describe('.files (the MFS API part)', function () {
8887
const expectedCid = 'bafybeifogzovjqrcxvgt7g36y7g63hvwvoakledwk4b2fr2dl4wzawpnny'
8988
const options = { cidVersion: 1, rawLeaves: false }
9089

91-
const res = await ipfs.add(testfile, options)
90+
const res = await all(ipfs.add(testfile, options))
9291

9392
expect(res).to.have.length(1)
9493
expect(res[0].hash).to.equal(expectedCid)
@@ -98,15 +97,16 @@ describe('.files (the MFS API part)', function () {
9897
it('.add with only-hash=true', async () => {
9998
const content = String(Math.random() + Date.now())
10099

101-
const files = await ipfs.add(Buffer.from(content), { onlyHash: true })
100+
const files = await all(ipfs.add(Buffer.from(content), { onlyHash: true }))
102101
expect(files).to.have.length(1)
103102

104103
// 'ipfs.object.get(<hash>)' should timeout because content wasn't actually added
105-
await expectTimeout(ipfs.object.get(files[0].hash), 4000)
104+
return expect(ipfs.object.get(files[0].hash, { timeout: 2000 }))
105+
.to.be.rejectedWith(TimeoutError)
106106
})
107107

108108
it('.add with options', async () => {
109-
const res = await ipfs.add(testfile, { pin: false })
109+
const res = await all(ipfs.add(testfile, { pin: false }))
110110

111111
expect(res).to.have.length(1)
112112
expect(res[0].hash).to.equal(expectedMultihash)
@@ -116,23 +116,23 @@ describe('.files (the MFS API part)', function () {
116116
it('.add pins by default', async () => {
117117
const newContent = Buffer.from(String(Math.random()))
118118

119-
const initialPins = await ipfs.pin.ls()
119+
const initialPins = await all(ipfs.pin.ls())
120120

121-
await ipfs.add(newContent)
121+
await all(ipfs.add(newContent))
122122

123-
const pinsAfterAdd = await ipfs.pin.ls()
123+
const pinsAfterAdd = await all(ipfs.pin.ls())
124124

125125
expect(pinsAfterAdd.length).to.eql(initialPins.length + 1)
126126
})
127127

128128
it('.add with pin=false', async () => {
129129
const newContent = Buffer.from(String(Math.random()))
130130

131-
const initialPins = await ipfs.pin.ls()
131+
const initialPins = await all(ipfs.pin.ls())
132132

133-
await ipfs.add(newContent, { pin: false })
133+
await all(ipfs.add(newContent, { pin: false }))
134134

135-
const pinsAfterAdd = await ipfs.pin.ls()
135+
const pinsAfterAdd = await all(ipfs.pin.ls())
136136

137137
expect(pinsAfterAdd.length).to.eql(initialPins.length)
138138
})
@@ -146,7 +146,7 @@ describe('.files (the MFS API part)', function () {
146146
}
147147
const options = { hashAlg: name, rawLeaves: false }
148148

149-
const res = await ipfs.add([file], options)
149+
const res = await all(ipfs.add([file], options))
150150

151151
expect(res).to.have.length(1)
152152
const cid = new CID(res[0].hash)
@@ -163,7 +163,7 @@ describe('.files (the MFS API part)', function () {
163163
progress = p
164164
}
165165

166-
const res = await ipfs.add(testfile, { progress: progressHandler })
166+
const res = await all(ipfs.add(testfile, { progress: progressHandler }))
167167

168168
expect(res).to.have.length(1)
169169
expect(progress).to.be.equal(testfile.byteLength)
@@ -180,7 +180,7 @@ describe('.files (the MFS API part)', function () {
180180
}
181181

182182
// TODO: needs to be using a big file
183-
const res = await ipfs.add(testfile, { progress: progressHandler })
183+
const res = await all(ipfs.add(testfile, { progress: progressHandler }))
184184

185185
expect(res).to.have.length(1)
186186
expect(progress).to.be.equal(testfile.byteLength)
@@ -197,15 +197,15 @@ describe('.files (the MFS API part)', function () {
197197
}
198198

199199
// TODO: needs to be using a directory
200-
const res = await ipfs.add(testfile, { progress: progressHandler })
200+
const res = await all(ipfs.add(testfile, { progress: progressHandler }))
201201

202202
expect(res).to.have.length(1)
203203
expect(progress).to.be.equal(testfile.byteLength)
204204
expect(progressCount).to.be.equal(1)
205205
})
206206

207207
it('.add without progress options', async () => {
208-
const res = await ipfs.add(testfile)
208+
const res = await all(ipfs.add(testfile))
209209

210210
expect(res).to.have.length(1)
211211
})
@@ -219,44 +219,33 @@ describe('.files (the MFS API part)', function () {
219219
}
220220
const options = { hashAlg: name, rawLeaves: false }
221221

222-
const res = await ipfs.add([file], options)
222+
const res = await all(ipfs.add([file], options))
223223

224224
expect(res).to.have.length(1)
225225
const cid = new CID(res[0].hash)
226226
expect(mh.decode(cid.multihash).name).to.equal(name)
227227
})
228228
})
229229

230-
it('.addPullStream with object chunks and pull stream content', (done) => {
230+
it('.add with object chunks and iterable content', async () => {
231231
const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'
232232

233-
pull(
234-
values([{ content: values([Buffer.from('test')]) }]),
235-
ipfs.addPullStream(),
236-
collect((err, res) => {
237-
expect(err).to.not.exist()
238-
239-
expect(res).to.have.length(1)
240-
expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 })
241-
done()
242-
})
233+
const res = await pipe(
234+
[{ content: [Buffer.from('test')] }],
235+
ipfs.add,
236+
all
243237
)
244-
})
245-
246-
it('.add with pull stream', async () => {
247-
const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'
248-
const res = await ipfs.add(values([Buffer.from('test')]))
249238

250239
expect(res).to.have.length(1)
251240
expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 })
252241
})
253242

254-
it('.add with array of objects with pull stream content', async () => {
243+
it('.add with iterable', async () => {
255244
const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'
256-
const res = await ipfs.add([{ content: values([Buffer.from('test')]) }])
245+
const res = await all(ipfs.add([Buffer.from('test')]))
257246

258247
expect(res).to.have.length(1)
259-
expect(res[0]).to.eql({ path: expectedCid, hash: expectedCid, size: 12 })
248+
expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 })
260249
})
261250

262251
it('files.mkdir', async () => {
@@ -327,7 +316,7 @@ describe('.files (the MFS API part)', function () {
327316
await ipfs.files.write(file, Buffer.from('Hello, world'), {
328317
create: true
329318
})
330-
const files = await ipfs.files.ls(folder)
319+
const files = await all(ipfs.files.ls(folder))
331320

332321
expect(files.length).to.equal(1)
333322
})
@@ -336,7 +325,7 @@ describe('.files (the MFS API part)', function () {
336325
const folder = `test-folder-${Math.random()}`
337326

338327
await ipfs.files.mkdir(`/${folder}`)
339-
const files = await ipfs.files.ls()
328+
const files = await all(ipfs.files.ls())
340329

341330
expect(files.find(file => file.name === folder)).to.be.ok()
342331
})
@@ -346,15 +335,15 @@ describe('.files (the MFS API part)', function () {
346335
create: true
347336
})
348337

349-
const buf = await ipfs.files.read('/test-folder/test-file-2.txt')
338+
const buf = Buffer.concat(await all(ipfs.files.read('/test-folder/test-file-2.txt')))
350339

351340
expect(buf.toString()).to.be.equal('hello world')
352341
})
353342

354343
it('files.write without options', async () => {
355344
await ipfs.files.write('/test-folder/test-file-2.txt', Buffer.from('hello world'))
356345

357-
const buf = await ipfs.files.read('/test-folder/test-file-2.txt')
346+
const buf = Buffer.concat(await all(ipfs.files.read('/test-folder/test-file-2.txt')))
358347

359348
expect(buf.toString()).to.be.equal('hello world')
360349
})
@@ -395,7 +384,7 @@ describe('.files (the MFS API part)', function () {
395384
await ipfs.files.write(file, testfile, {
396385
create: true
397386
})
398-
const buf = await ipfs.files.read(file)
387+
const buf = Buffer.concat(await all(ipfs.files.read(file)))
399388

400389
expect(Buffer.from(buf)).to.deep.equal(testfile)
401390
})

0 commit comments

Comments
 (0)