From 2fb718ccfe524fc996bbd6a72f51e6a3942cefbd Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:25:41 +0000 Subject: [PATCH 01/45] chore: bitswap async/await refactor --- src/bitswap/stat.js | 16 ++++------------ src/bitswap/utils.js | 35 ++++++++++++----------------------- src/bitswap/wantlist.js | 11 ++++------- 3 files changed, 20 insertions(+), 42 deletions(-) diff --git a/src/bitswap/stat.js b/src/bitswap/stat.js index 024612474..23daaf218 100644 --- a/src/bitswap/stat.js +++ b/src/bitswap/stat.js @@ -30,17 +30,9 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get bitswap stats', (done) => { - ipfs.bitswap.stat((err, res) => { - expectIsBitswap(err, res) - done() - }) - }) - - it('should get bitswap stats (promised)', () => { - return ipfs.bitswap.stat().then((res) => { - expectIsBitswap(null, res) - }) + it('should get bitswap stats', async () => { + const res = await ipfs.bitswap.stat() + expectIsBitswap(null, res) }) it('should not get bitswap stats when offline', function (done) { @@ -52,7 +44,7 @@ module.exports = (createCommon, options) => { (node, cb) => node.stop((err) => cb(err, node)) ], (err, node) => { expect(err).to.not.exist() - node.bitswap.wantlist((err) => { + node.bitswap.stat((err) => { expect(err).to.exist() done() }) diff --git a/src/bitswap/utils.js b/src/bitswap/utils.js index c5f2de344..0ec5b481b 100644 --- a/src/bitswap/utils.js +++ b/src/bitswap/utils.js @@ -1,35 +1,24 @@ 'use strict' -const until = require('async/until') +const pWhilst = require('p-whilst') -function waitForWantlistKey (ipfs, key, opts, cb) { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - - opts = opts || {} - opts.timeout = opts.timeout || 1000 +function waitForWantlistKey (ipfs, key, opts = {}) { + opts.timeout = opts.timeout || 10000 let list = { Keys: [] } - let timedOut = false - setTimeout(() => { timedOut = true }, opts.timeout) + const start = Date.now() + const test = () => !list.Keys.some(k => k['/'] === key) + + const iteratee = async () => { + if (Date.now() - start > opts.timeout) { + throw new Error(`Timed out waiting for ${key} in wantlist`) + } - const test = () => timedOut ? true : list.Keys.some(k => k['/'] === key) - const iteratee = (cb) => { - ipfs.bitswap.wantlist(opts.peerId, (err, nextList) => { - if (err) return cb(err) - list = nextList - cb() - }) + list = await ipfs.bitswap.wantlist(opts.peerId) } - until(test, iteratee, (err) => { - if (err) return cb(err) - if (timedOut) return cb(new Error(`Timed out waiting for ${key} in wantlist`)) - cb() - }) + return pWhilst(test, iteratee) } module.exports.waitForWantlistKey = waitForWantlistKey diff --git a/src/bitswap/wantlist.js b/src/bitswap/wantlist.js index cdc11c570..13ec39e81 100644 --- a/src/bitswap/wantlist.js +++ b/src/bitswap/wantlist.js @@ -45,15 +45,12 @@ module.exports = (createCommon, options) => { common.teardown(done) }) - it('should get the wantlist', (done) => { - waitForWantlistKey(ipfsB, key, done) + it('should get the wantlist', () => { + return waitForWantlistKey(ipfsB, key) }) - it('should get the wantlist by peer ID for a diffreent node', (done) => { - ipfsB.id((err, info) => { - expect(err).to.not.exist() - waitForWantlistKey(ipfsA, key, { peerId: info.id }, done) - }) + it('should get the wantlist by peer ID for a diffreent node', () => { + return waitForWantlistKey(ipfsA, key, { peerId: ipfsB.peerId.id }) }) it('should not get the wantlist when offline', function (done) { From 14aa336119bd57b3f6b17b13636464e3031cf161 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:26:03 +0000 Subject: [PATCH 02/45] chore: block async/await refactor --- src/block/get.js | 86 ++++++++++++++++++----------------------------- src/block/put.js | 59 +++++++++++++------------------- src/block/stat.js | 26 +++++--------- 3 files changed, 66 insertions(+), 105 deletions(-) diff --git a/src/block/get.js b/src/block/get.js index 0cac58251..885518d07 100644 --- a/src/block/get.js +++ b/src/block/get.js @@ -34,86 +34,66 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get by CID object', (done) => { + it('should get by CID object', async () => { const cid = new CID(hash) + const block = await ipfs.block.get(cid) - ipfs.block.get(cid, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(Buffer.from('blorb')) - expect(block.cid.multihash).to.eql(cid.multihash) - done() - }) + expect(block.data).to.eql(Buffer.from('blorb')) + expect(block.cid.multihash).to.eql(cid.multihash) }) - it('should get by CID in string', (done) => { - ipfs.block.get(multihash.toB58String(hash), (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(Buffer.from('blorb')) - expect(block.cid.multihash).to.eql(hash) - done() - }) + it('should get by CID in string', async () => { + const block = await ipfs.block.get(multihash.toB58String(hash)) + + expect(block.data).to.eql(Buffer.from('blorb')) + expect(block.cid.multihash).to.eql(hash) }) - it('should get an empty block', (done) => { - ipfs.block.put(Buffer.alloc(0), { + it('should get an empty block', async () => { + const res = await ipfs.block.put(Buffer.alloc(0), { format: 'dag-pb', mhtype: 'sha2-256', version: 0 - }, (err, block) => { - expect(err).to.not.exist() - - ipfs.block.get(block.cid, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(Buffer.alloc(0)) - done() - }) }) + + const block = await ipfs.block.get(res.cid) + + expect(block.data).to.eql(Buffer.alloc(0)) }) - it('should get a block added as CIDv0 with a CIDv1', done => { + it('should get a block added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.block.put(input, { version: 0 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.block.put(input, { version: 0 }) - const cidv0 = res.cid - expect(cidv0.version).to.equal(0) + const cidv0 = res.cid + expect(cidv0.version).to.equal(0) - const cidv1 = cidv0.toV1() + const cidv1 = cidv0.toV1() - ipfs.block.get(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output.data).to.eql(input) - done() - }) - }) + const block = await ipfs.block.get(cidv1) + expect(block.data).to.eql(input) }) - it('should get a block added as CIDv1 with a CIDv0', done => { + it('should get a block added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.block.put(input, { version: 1 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.block.put(input, { version: 1 }) - const cidv1 = res.cid - expect(cidv1.version).to.equal(1) + const cidv1 = res.cid + expect(cidv1.version).to.equal(1) - const cidv0 = cidv1.toV0() + const cidv0 = cidv1.toV0() - ipfs.block.get(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(output.data).to.eql(input) - done() - }) - }) + const block = await ipfs.block.get(cidv0) + expect(block.data).to.eql(input) }) it('should return an error for an invalid CID', () => { - return ipfs.block.get('invalid') - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.block.get('invalid')).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('Non-base58 character') }) }) } diff --git a/src/block/put.js b/src/block/put.js index 4100e3668..9c46dd282 100644 --- a/src/block/put.js +++ b/src/block/put.js @@ -31,68 +31,57 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should put a buffer, using defaults', (done) => { + it('should put a buffer, using defaults', async () => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const blob = Buffer.from('blorb') - ipfs.block.put(blob, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.be.eql(blob) - expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) - done() - }) + const block = await ipfs.block.put(blob) + + expect(block.data).to.be.eql(blob) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) }) - it('should put a buffer, using CID', (done) => { + it('should put a buffer, using CID', async () => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const cid = new CID(expectedHash) const blob = Buffer.from('blorb') - ipfs.block.put(blob, { cid: cid }, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.be.eql(blob) - expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) - done() - }) + const block = await ipfs.block.put(blob, { cid: cid }) + + expect(block.data).to.be.eql(blob) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) }) - it('should put a buffer, using options', (done) => { + it('should put a buffer, using options', async () => { const blob = Buffer.from(`TEST${Date.now()}`) - ipfs.block.put(blob, { + const block = await ipfs.block.put(blob, { format: 'raw', mhtype: 'sha2-512', version: 1 - }, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.be.eql(blob) - expect(block.cid.version).to.equal(1) - expect(block.cid.codec).to.equal('raw') - expect(multihash.decode(block.cid.multihash).name).to.equal('sha2-512') - done() }) + + expect(block.data).to.be.eql(blob) + expect(block.cid.version).to.equal(1) + expect(block.cid.codec).to.equal('raw') + expect(multihash.decode(block.cid.multihash).name).to.equal('sha2-512') }) - it('should put a Block instance', (done) => { + it('should put a Block instance', async () => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' const cid = new CID(expectedHash) const b = new Block(Buffer.from('blorb'), cid) - ipfs.block.put(b, (err, block) => { - expect(err).to.not.exist() - expect(block.data).to.eql(Buffer.from('blorb')) - expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) - done() - }) + const block = await ipfs.block.put(b) + + expect(block.data).to.eql(Buffer.from('blorb')) + expect(block.cid.multihash).to.eql(multihash.fromB58String(expectedHash)) }) - it('should error with array of blocks', (done) => { + it('should error with array of blocks', () => { const blob = Buffer.from('blorb') - ipfs.block.put([blob, blob], (err) => { - expect(err).to.be.an.instanceof(Error) - done() - }) + return expect(ipfs.block.put([blob, blob])).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/block/stat.js b/src/block/stat.js index d5bf3b11c..7143036bc 100644 --- a/src/block/stat.js +++ b/src/block/stat.js @@ -33,31 +33,23 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should stat by CID', (done) => { + it('should stat by CID', async () => { const cid = new CID(hash) - ipfs.block.stat(cid, (err, stats) => { - expect(err).to.not.exist() - expect(stats).to.have.property('key') - expect(stats).to.have.property('size') - done() - }) + const stats = await ipfs.block.stat(cid) + + expect(stats).to.have.property('key') + expect(stats).to.have.property('size') }) it('should return error for missing argument', () => { - return ipfs.block.stat(null) - .then( - () => expect.fail('should have thrown for missing parameter'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.block.stat(null)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) it('should return error for invalid argument', () => { - return ipfs.block.stat('invalid') - .then( - () => expect.fail('should have thrown for invalid parameter'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.block.stat('invalid')).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) }) } From a22f248da8fbc4f8f236ac770f0e8bdd0280bc9d Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:26:25 +0000 Subject: [PATCH 03/45] chore: bootstrap async/await refactor --- src/bootstrap/add.js | 36 +++++++++++----------------- src/bootstrap/list.js | 12 ++++------ src/bootstrap/rm.js | 55 +++++++++++++++++++++---------------------- 3 files changed, 46 insertions(+), 57 deletions(-) diff --git a/src/bootstrap/add.js b/src/bootstrap/add.js index 61c5ff560..cad035fa5 100644 --- a/src/bootstrap/add.js +++ b/src/bootstrap/add.js @@ -33,32 +33,24 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should return an error when called with an invalid arg', (done) => { - ipfs.bootstrap.add(invalidArg, (err) => { - expect(err).to.be.an.instanceof(Error) - done() - }) + it('should return an error when called with an invalid arg', () => { + return expect(ipfs.bootstrap.add(invalidArg)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) - it('should return a list containing the bootstrap peer when called with a valid arg (ip4)', (done) => { - ipfs.bootstrap.add(validIp4, (err, res) => { - expect(err).to.not.exist() - expect(res).to.be.eql({ Peers: [validIp4] }) - const peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.eql(1) - done() - }) + it('should return a list containing the bootstrap peer when called with a valid arg (ip4)', async () => { + const res = await ipfs.bootstrap.add(validIp4) + + expect(res).to.be.eql({ Peers: [validIp4] }) + const peers = res.Peers + expect(peers).to.have.property('length').that.is.equal(1) }) - it('should return a list of bootstrap peers when called with the default option', (done) => { - ipfs.bootstrap.add(null, { default: true }, (err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.above(1) - done() - }) + it('should return a list of bootstrap peers when called with the default option', async () => { + const res = await ipfs.bootstrap.add(null, { default: true }) + + const peers = res.Peers + expect(peers).to.have.property('length').that.is.gt(1) }) it('should prevent duplicate inserts of bootstrap peers', async () => { diff --git a/src/bootstrap/list.js b/src/bootstrap/list.js index 31bfe3f64..376c19dc1 100644 --- a/src/bootstrap/list.js +++ b/src/bootstrap/list.js @@ -30,13 +30,11 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should return a list of peers', (done) => { - ipfs.bootstrap.list((err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - done() - }) + it('should return a list of peers', async () => { + const res = await ipfs.bootstrap.list() + + const peers = res.Peers + expect(peers).to.exist() }) }) } diff --git a/src/bootstrap/rm.js b/src/bootstrap/rm.js index 24a77b6c6..2d5a40aa0 100644 --- a/src/bootstrap/rm.js +++ b/src/bootstrap/rm.js @@ -9,6 +9,7 @@ module.exports = (createCommon, options) => { const common = createCommon() const invalidArg = 'this/Is/So/Invalid/' + const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' describe('.bootstrap.rm', function () { this.timeout(100 * 1000) @@ -32,40 +33,38 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should return an error when called with an invalid arg', (done) => { - ipfs.bootstrap.rm(invalidArg, (err) => { - expect(err).to.be.an.instanceof(Error) - done() - }) + it('should return an error when called with an invalid arg', () => { + return expect(ipfs.bootstrap.rm(invalidArg)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) - it('should return an empty list because no peers removed when called without an arg or options', (done) => { - ipfs.bootstrap.rm(null, (err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.eql(0) - done() - }) + it('should return an empty list because no peers removed when called without an arg or options', async () => { + const res = await ipfs.bootstrap.rm(null) + + const peers = res.Peers + expect(peers).to.have.property('length').that.is.equal(0) }) - it('should return a list containing the peer removed when called with a valid arg (ip4)', (done) => { - ipfs.bootstrap.rm(null, (err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - expect(peers.length).to.eql(0) - done() - }) + it('should return a list containing the peer removed when called with a valid arg (ip4)', async () => { + const addRes = await ipfs.bootstrap.add(validIp4) + expect(addRes).to.be.eql({ Peers: [validIp4] }) + + const rmRes = await ipfs.bootstrap.rm(validIp4) + expect(rmRes).to.be.eql({ Peers: [validIp4] }) + + const peers = rmRes.Peers + expect(peers).to.have.property('length').that.is.equal(1) }) - it('should return a list of all peers removed when all option is passed', (done) => { - ipfs.bootstrap.rm(null, { all: true }, (err, res) => { - expect(err).to.not.exist() - const peers = res.Peers - expect(peers).to.exist() - done() - }) + it('should return a list of all peers removed when all option is passed', async () => { + const addRes = await ipfs.bootstrap.add(null, { default: true }) + const addedPeers = addRes.Peers + + const rmRes = await ipfs.bootstrap.rm(null, { all: true }) + const removedPeers = rmRes.Peers + + // TODO: fix bootstrap.rm as it's not returning all the nodes when all option is passed + // expect(removedPeers).to.eql(addedPeers) }) }) } From 70f8bc78a992e88e04c0417e4a5640423acdeb0c Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:27:03 +0000 Subject: [PATCH 04/45] chore: config async/await refactor --- src/config/get.js | 50 ++++++++------------------- src/config/replace.js | 28 ++++++---------- src/config/set.js | 78 ++++++++++++++----------------------------- 3 files changed, 49 insertions(+), 107 deletions(-) diff --git a/src/config/get.js b/src/config/get.js index bedb50017..2fe08aaed 100644 --- a/src/config/get.js +++ b/src/config/get.js @@ -30,51 +30,29 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should retrieve the whole config', (done) => { - ipfs.config.get((err, config) => { - expect(err).to.not.exist() - expect(config).to.be.an('object') - expect(isPlainObject(config)).to.equal(true) - done() - }) - }) + it('should retrieve the whole config', async () => { + const config = await ipfs.config.get() - it('should retrieve the whole config (promised)', () => { - return ipfs.config.get() - .then((config) => { - expect(config).to.be.an('object') - expect(isPlainObject(config)).to.equal(true) - }) + expect(config).to.be.an('object') + expect(isPlainObject(config)).to.equal(true) }) - it('should retrieve a value through a key', (done) => { - ipfs.config.get('Identity.PeerID', (err, peerId) => { - expect(err).to.not.exist() - expect(peerId).to.exist() - done() - }) + it('should retrieve a value through a key', async () => { + const peerId = await ipfs.config.get('Identity.PeerID') + expect(peerId).to.exist() }) - it('should retrieve a value through a nested key', (done) => { - ipfs.config.get('Addresses.Swarm', (err, swarmAddrs) => { - expect(err).to.not.exist() - expect(swarmAddrs).to.exist() - done() - }) + it('should retrieve a value through a nested key', async () => { + const swarmAddrs = await ipfs.config.get('Addresses.Swarm') + expect(swarmAddrs).to.exist() }) - it('should fail on non valid key', (done) => { - ipfs.config.get(1234, (err, peerId) => { - expect(err).to.exist() - done() - }) + it('should fail on non valid key', () => { + return expect(ipfs.config.get(1234)).to.eventually.be.rejected() }) - it('should fail on non existent key', (done) => { - ipfs.config.get('Bananas', (err, peerId) => { - expect(err).to.exist() - done() - }) + it('should fail on non existent key', () => { + return expect(ipfs.config.get('Bananas')).to.eventually.be.rejected() }) }) } diff --git a/src/config/replace.js b/src/config/replace.js index 8620ea099..aaa51c54d 100644 --- a/src/config/replace.js +++ b/src/config/replace.js @@ -33,26 +33,18 @@ module.exports = (createCommon, options) => { Fruit: 'Bananas' } - it('should replace the whole config', (done) => { - ipfs.config.replace(config, (err) => { - expect(err).to.not.exist() - ipfs.config.get((err, _config) => { - expect(err).to.not.exist() - expect(_config).to.deep.equal(config) - done() - }) - }) + it('should replace the whole config', async () => { + await ipfs.config.replace(config) + + const _config = await ipfs.config.get() + expect(_config).to.deep.equal(config) }) - it('should replace to empty config', (done) => { - ipfs.config.replace({}, (err) => { - expect(err).to.not.exist() - ipfs.config.get((err, _config) => { - expect(err).to.not.exist() - expect(_config).to.deep.equal({}) - done() - }) - }) + it('should replace to empty config', async () => { + await ipfs.config.replace({}) + + const _config = await ipfs.config.get() + expect(_config).to.deep.equal({}) }) }) } diff --git a/src/config/set.js b/src/config/set.js index 7843b60b4..2c0e54b47 100644 --- a/src/config/set.js +++ b/src/config/set.js @@ -29,47 +29,28 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should set a new key', (done) => { - ipfs.config.set('Fruit', 'banana', (err) => { - expect(err).to.not.exist() - ipfs.config.get('Fruit', (err, fruit) => { - expect(err).to.not.exist() - expect(fruit).to.equal('banana') - done() - }) - }) - }) + it('should set a new key', async () => { + await ipfs.config.set('Fruit', 'banana') - it('should set a new key (promised)', () => { - return ipfs.config.set('Fruit', 'banana') - .then(() => ipfs.config.get('Fruit')) - .then((fruit) => { - expect(fruit).to.equal('banana') - }) + const fruit = await ipfs.config.get('Fruit') + expect(fruit).to.equal('banana') }) - it('should set an already existing key', (done) => { - ipfs.config.set('Fruit', 'morango', (err) => { - expect(err).to.not.exist() - ipfs.config.get('Fruit', (err, fruit) => { - expect(err).to.not.exist() - expect(fruit).to.equal('morango') - done() - }) - }) + it('should set an already existing key', async () => { + await ipfs.config.set('Fruit', 'morango') + + const fruit = await ipfs.config.get('Fruit') + expect(fruit).to.equal('morango') }) - it('should set a number', (done) => { + it('should set a number', async () => { const key = 'Discovery.MDNS.Interval' const val = 11 - ipfs.config.set(key, val, function (err) { - expect(err).to.not.exist() - ipfs.config.get(key, function (err, result) { - expect(err).to.not.exist() - expect(result).to.equal(val) - done() - }) - }) + + await ipfs.config.set(key, val) + + const result = await ipfs.config.get(key) + expect(result).to.equal(val) }) it('should set a boolean', async () => { @@ -88,31 +69,22 @@ module.exports = (createCommon, options) => { expect(await ipfs.config.get(key)).to.equal(value) }) - it('should set a JSON object', (done) => { + it('should set a JSON object', async () => { const key = 'API.HTTPHeaders.Access-Control-Allow-Origin' const val = ['http://example.io'] - ipfs.config.set(key, val, function (err) { - expect(err).to.not.exist() - ipfs.config.get(key, function (err, result) { - expect(err).to.not.exist() - expect(result).to.deep.equal(val) - done() - }) - }) + + await ipfs.config.set(key, val) + + const result = await ipfs.config.get(key) + expect(result).to.deep.equal(val) }) - it('should fail on non valid key', (done) => { - ipfs.config.set(Buffer.from('heeey'), '', (err) => { - expect(err).to.exist() - done() - }) + it('should fail on non valid key', () => { + return expect(ipfs.config.set(Buffer.from('heeey'), '')).to.eventually.be.rejected() }) - it('should fail on non valid value', (done) => { - ipfs.config.set('Fruit', Buffer.from('abc'), (err) => { - expect(err).to.exist() - done() - }) + it('should fail on non valid value', () => { + return expect(ipfs.config.set('Fruit', Buffer.from('abc'))).to.eventually.be.rejected() }) }) } From fd72f40ef5e978f9445f99ee89ecb33f3777a6d8 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:27:23 +0000 Subject: [PATCH 05/45] chore: dag async/await refactor --- src/dag/get.js | 302 ++++++++++++++++-------------------------------- src/dag/put.js | 77 +++++------- src/dag/tree.js | 129 +++++++-------------- 3 files changed, 172 insertions(+), 336 deletions(-) diff --git a/src/dag/get.js b/src/dag/get.js index 9a03ffcca..ae70417c7 100644 --- a/src/dag/get.js +++ b/src/dag/get.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { series, eachSeries } = require('async') +const pEachSeries = require('p-each-series') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') @@ -43,284 +43,184 @@ module.exports = (createCommon, options) => { let cidPb let cidCbor - before((done) => { - series([ - (cb) => { - const someData = Buffer.from('some other data') - - try { - pbNode = new DAGNode(someData) - } catch (err) { - return cb(err) - } - - cborNode = { - data: someData - } - - cb() - }, - (cb) => { - try { - nodePb = new DAGNode(Buffer.from('I am inside a Protobuf')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - dagPB.util.cid(nodePb.serialize()) - .then(cid => { - cidPb = cid - cb() - }, cb) - }, - (cb) => { - nodeCbor = { - someData: 'I am inside a Cbor object', - pb: cidPb - } - - dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) - .then(cid => { - cidCbor = cid - cb() - }, cb) - }, - (cb) => { - eachSeries([ - { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, - { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } - ], (el, cb) => { - ipfs.dag.put(el.node, { - format: el.multicodec, - hashAlg: el.hashAlg - }, cb) - }, cb) - } - ], done) + before(async () => { + const someData = Buffer.from('some other data') + pbNode = new DAGNode(someData) + cborNode = { + data: someData + } + + nodePb = new DAGNode(Buffer.from('I am inside a Protobuf')) + cidPb = await dagPB.util.cid(nodePb.serialize()) + nodeCbor = { + someData: 'I am inside a Cbor object', + pb: cidPb + } + + cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) + + await pEachSeries([ + { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, + { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } + ], (el) => ipfs.dag.put(el.node, { + format: el.multicodec, + hashAlg: el.hashAlg + })) }) - it('should get a dag-pb node', (done) => { - ipfs.dag.put(pbNode, { + it('should get a dag-pb node', async () => { + const cid = await ipfs.dag.put(pbNode, { format: 'dag-pb', hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - const node = result.value - expect(pbNode.toJSON()).to.eql(node.toJSON()) - done() - }) }) + + const result = await ipfs.dag.get(cid) + + const node = result.value + expect(pbNode.toJSON()).to.eql(node.toJSON()) }) - it('should get a dag-cbor node', (done) => { - ipfs.dag.put(cborNode, { + it('should get a dag-cbor node', async () => { + const cid = await ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - - const node = result.value - expect(cborNode).to.eql(node) - done() - }) }) + + const result = await ipfs.dag.get(cid) + + const node = result.value + expect(cborNode).to.eql(node) }) - it('should get a dag-pb node with path', (done) => { - ipfs.dag.get(cidPb, '/', (err, result) => { - expect(err).to.not.exist() + it('should get a dag-pb node with path', async () => { + const result = await ipfs.dag.get(cidPb, '/') - const node = result.value + const node = result.value - dagPB.util.cid(node.serialize()) - .then(cid => { - expect(cid).to.eql(cidPb) - done() - }) - .catch(done) - }) + const cid = await dagPB.util.cid(node.serialize()) + expect(cid).to.eql(cidPb) }) - it('should get a dag-pb node local value', function (done) { - ipfs.dag.get(cidPb, 'Data', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) - done() - }) + it('should get a dag-pb node local value', async function () { + const result = await ipfs.dag.get(cidPb, 'Data') + expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) }) it.skip('should get a dag-pb node value one level deep', (done) => {}) it.skip('should get a dag-pb node value two levels deep', (done) => {}) - it('should get a dag-cbor node with path', (done) => { - ipfs.dag.get(cidCbor, '/', (err, result) => { - expect(err).to.not.exist() + it('should get a dag-cbor node with path', async () => { + const result = await ipfs.dag.get(cidCbor, '/') - const node = result.value + const node = result.value - dagCBOR.util.cid(dagCBOR.util.serialize(node)) - .then(cid => { - expect(cid).to.eql(cidCbor) - done() - }) - .catch(done) - }) + const cid = await dagCBOR.util.cid(dagCBOR.util.serialize(node)) + expect(cid).to.eql(cidCbor) }) - it('should get a dag-cbor node local value', (done) => { - ipfs.dag.get(cidCbor, 'someData', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql('I am inside a Cbor object') - done() - }) + it('should get a dag-cbor node local value', async () => { + const result = await ipfs.dag.get(cidCbor, 'someData') + expect(result.value).to.eql('I am inside a Cbor object') }) it.skip('should get dag-cbor node value one level deep', (done) => {}) it.skip('should get dag-cbor node value two levels deep', (done) => {}) it.skip('should get dag-cbor value via dag-pb node', (done) => {}) - it('should get dag-pb value via dag-cbor node', function (done) { - ipfs.dag.get(cidCbor, 'pb/Data', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) - done() - }) + it('should get dag-pb value via dag-cbor node', async function () { + const result = await ipfs.dag.get(cidCbor, 'pb/Data') + expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) }) - it('should get by CID string', (done) => { + it('should get by CID string', async () => { const cidCborStr = cidCbor.toBaseEncodedString() - ipfs.dag.get(cidCborStr, (err, result) => { - expect(err).to.not.exist() + const result = await ipfs.dag.get(cidCborStr) - const node = result.value + const node = result.value - dagCBOR.util.cid(dagCBOR.util.serialize(node)) - .then(cid => { - expect(cid).to.eql(cidCbor) - done() - }) - .catch(done) - }) + const cid = await dagCBOR.util.cid(dagCBOR.util.serialize(node)) + expect(cid).to.eql(cidCbor) }) - it('should get by CID string + path', function (done) { + it('should get by CID string + path', async function () { const cidCborStr = cidCbor.toBaseEncodedString() - ipfs.dag.get(cidCborStr + '/pb/Data', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) - done() - }) + const result = await ipfs.dag.get(cidCborStr + '/pb/Data') + expect(result.value).to.eql(Buffer.from('I am inside a Protobuf')) }) - it('should get only a CID, due to resolving locally only', function (done) { - ipfs.dag.get(cidCbor, 'pb/Data', { localResolve: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.value.equals(cidPb)).to.be.true() - done() - }) + it('should get only a CID, due to resolving locally only', async function () { + const result = await ipfs.dag.get(cidCbor, 'pb/Data', { localResolve: true }) + expect(result.value.equals(cidPb)).to.be.true() }) - it('should get a node added as CIDv0 with a CIDv1', done => { + it('should get a node added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) const node = new DAGNode(input) - ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }, (err, cid) => { - expect(err).to.not.exist() - expect(cid.version).to.equal(0) + const cid = await ipfs.dag.put(node, { format: 'dag-pb', hashAlg: 'sha2-256' }) + expect(cid.version).to.equal(0) - const cidv1 = cid.toV1() + const cidv1 = cid.toV1() - ipfs.dag.get(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output.value.Data).to.eql(input) - done() - }) - }) + const output = await ipfs.dag.get(cidv1) + expect(output.value.Data).to.eql(input) }) - it('should get a node added as CIDv1 with a CIDv0', done => { + it('should get a node added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) - const cidv1 = new CID(res[0].hash) - expect(cidv1.version).to.equal(1) + const cidv1 = new CID(res[0].hash) + expect(cidv1.version).to.equal(1) - const cidv0 = cidv1.toV0() + const cidv0 = cidv1.toV0() - ipfs.dag.get(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(Unixfs.unmarshal(output.value.Data).data).to.eql(input) - done() - }) - }) + const output = await ipfs.dag.get(cidv0) + expect(Unixfs.unmarshal(output.value.Data).data).to.eql(input) }) - it('should be able to get part of a dag-cbor node', (done) => { + it('should be able to get part of a dag-cbor node', async () => { const cbor = { foo: 'dag-cbor-bar' } - ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }, (err, cid) => { - expect(err).to.not.exist() - expect(cid.codec).to.equal('dag-cbor') - cid = cid.toBaseEncodedString('base32') - expect(cid).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') - ipfs.dag.get(cid, 'foo', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.equal('dag-cbor-bar') - done() - }) - }) + + let cid = await ipfs.dag.put(cbor, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + expect(cid.codec).to.equal('dag-cbor') + cid = cid.toBaseEncodedString('base32') + expect(cid).to.equal('bafyreic6f672hnponukaacmk2mmt7vs324zkagvu4hcww6yba6kby25zce') + + const result = await ipfs.dag.get(cid, 'foo') + expect(result.value).to.equal('dag-cbor-bar') }) - it('should be able to traverse from one dag-cbor node to another', (done) => { + it('should be able to traverse from one dag-cbor node to another', async () => { const cbor1 = { foo: 'dag-cbor-bar' } - ipfs.dag.put(cbor1, { format: 'dag-cbor', hashAlg: 'sha2-256' }, (err, cid1) => { - expect(err).to.not.exist() - - const cbor2 = { other: cid1 } + const cid1 = await ipfs.dag.put(cbor1, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + const cbor2 = { other: cid1 } - ipfs.dag.put(cbor2, { format: 'dag-cbor', hashAlg: 'sha2-256' }, (err, cid2) => { - expect(err).to.not.exist() + const cid2 = await ipfs.dag.put(cbor2, { format: 'dag-cbor', hashAlg: 'sha2-256' }) - ipfs.dag.get(cid2, 'other/foo', (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.equal('dag-cbor-bar') - done() - }) - }) - }) + const result = await ipfs.dag.get(cid2, 'other/foo') + expect(result.value).to.equal('dag-cbor-bar') }) - it('should be able to get a DAG node with format raw', (done) => { + it('should be able to get a DAG node with format raw', async () => { const buf = Buffer.from([0, 1, 2, 3]) - ipfs.dag.put(buf, { + const cid = await ipfs.dag.put(buf, { format: 'raw', hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - - ipfs.dag.get(cid, (err, result) => { - expect(err).to.not.exist() - expect(result.value).to.deep.equal(buf) - done() - }) }) + + const result = await ipfs.dag.get(cid) + expect(result.value).to.deep.equal(buf) }) }) } diff --git a/src/dag/put.js b/src/dag/put.js index f1532cbdf..746146a4b 100644 --- a/src/dag/put.js +++ b/src/dag/put.js @@ -54,86 +54,63 @@ module.exports = (createCommon, options) => { done() }) - it('should put dag-pb with default hash func (sha2-256)', (done) => { - ipfs.dag.put(pbNode, { + it('should put dag-pb with default hash func (sha2-256)', () => { + return ipfs.dag.put(pbNode, { format: 'dag-pb', hashAlg: 'sha2-256' - }, done) + }) }) - it('should put dag-pb with custom hash func (sha3-512)', (done) => { - ipfs.dag.put(pbNode, { + it('should put dag-pb with custom hash func (sha3-512)', () => { + return ipfs.dag.put(pbNode, { format: 'dag-pb', hashAlg: 'sha3-512' - }, done) + }) }) - it('should put dag-cbor with default hash func (sha2-256)', (done) => { - ipfs.dag.put(cborNode, { + it('should put dag-cbor with default hash func (sha2-256)', () => { + return ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha2-256' - }, done) + }) }) - it('should put dag-cbor with custom hash func (sha3-512)', (done) => { - ipfs.dag.put(cborNode, { + it('should put dag-cbor with custom hash func (sha3-512)', () => { + return ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha3-512' - }, done) + }) }) - it('should return the cid', (done) => { - ipfs.dag.put(cborNode, { + it('should return the cid', async () => { + const cid = await ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha2-256' - }, (err, cid) => { - expect(err).to.not.exist() - expect(cid).to.exist() - expect(CID.isCID(cid)).to.equal(true) - dagCBOR.util.cid(dagCBOR.util.serialize(cborNode)) - .then(_cid => { - expect(cid.buffer).to.eql(_cid.buffer) - done() - }) - .catch(done) }) - }) + expect(cid).to.exist() + expect(CID.isCID(cid)).to.equal(true) - it('should not fail when calling put without options', (done) => { - ipfs.dag.put(cborNode, done) + const _cid = await dagCBOR.util.cid(dagCBOR.util.serialize(cborNode)) + expect(cid.buffer).to.eql(_cid.buffer) }) - it('should not fail when calling put without options (promised)', () => { + it('should not fail when calling put without options', () => { return ipfs.dag.put(cborNode) }) - it('should set defaults when calling put without options', (done) => { - ipfs.dag.put(cborNode, (err, cid) => { - expect(err).to.not.exist() - expect(cid.codec).to.equal('dag-cbor') - expect(multihash.decode(cid.multihash).name).to.equal('sha2-256') - done() - }) - }) - - it('should set defaults when calling put without options (promised)', () => { - return ipfs.dag.put(cborNode) - .then((cid) => { - expect(cid.codec).to.equal('dag-cbor') - expect(multihash.decode(cid.multihash).name).to.equal('sha2-256') - }) + it('should set defaults when calling put without options', async () => { + const cid = await ipfs.dag.put(cborNode) + expect(cid.codec).to.equal('dag-cbor') + expect(multihash.decode(cid.multihash).name).to.equal('sha2-256') }) - it('should override hash algoritm default and resolve with it', (done) => { - ipfs.dag.put(cborNode, { + it('should override hash algoritm default and resolve with it', async () => { + const cid = await ipfs.dag.put(cborNode, { format: 'dag-cbor', hashAlg: 'sha3-512' - }, (err, cid) => { - expect(err).to.not.exist() - expect(cid.codec).to.equal('dag-cbor') - expect(multihash.decode(cid.multihash).name).to.equal('sha3-512') - done() }) + expect(cid.codec).to.equal('dag-cbor') + expect(multihash.decode(cid.multihash).name).to.equal('sha3-512') }) it.skip('should put by passing the cid instead of format and hashAlg', (done) => {}) diff --git a/src/dag/tree.js b/src/dag/tree.js index 4eb6ccf2b..a74c3a66a 100644 --- a/src/dag/tree.js +++ b/src/dag/tree.js @@ -1,8 +1,7 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') -const eachSeries = require('async/eachSeries') +const pEachSeries = require('p-each-series') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') @@ -40,101 +39,61 @@ module.exports = (createCommon, options) => { let cidPb let cidCbor - before(function (done) { - series([ - (cb) => { - try { - nodePb = new DAGNode(Buffer.from('I am inside a Protobuf')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - dagPB.util.cid(nodePb.serialize()) - .then(cid => { - cidPb = cid - cb() - }, cb) - }, - (cb) => { - nodeCbor = { - someData: 'I am inside a Cbor object', - pb: cidPb - } - - dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) - .then(cid => { - cidCbor = cid - cb() - }, cb) - }, - (cb) => { - eachSeries([ - { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, - { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } - ], (el, cb) => { - ipfs.dag.put(el.node, { - format: el.multicodec, - hashAlg: el.hashAlg - }, cb) - }, cb) - } - ], done) + before(async function () { + nodePb = new DAGNode(Buffer.from('I am inside a Protobuf')) + cidPb = await dagPB.util.cid(nodePb.serialize()) + + nodeCbor = { + someData: 'I am inside a Cbor object', + pb: cidPb + } + cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor)) + + await pEachSeries([ + { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' }, + { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' } + ], (el) => ipfs.dag.put(el.node, { + format: el.multicodec, + hashAlg: el.hashAlg + })) }) - it('should get tree with CID', (done) => { - ipfs.dag.tree(cidCbor, (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([ - 'pb', - 'someData' - ]) - done() - }) + it('should get tree with CID', async () => { + const paths = await ipfs.dag.tree(cidCbor) + expect(paths).to.eql([ + 'pb', + 'someData' + ]) }) - it('should get tree with CID and path', (done) => { - ipfs.dag.tree(cidCbor, 'someData', (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([]) - done() - }) + it('should get tree with CID and path', async () => { + const paths = await ipfs.dag.tree(cidCbor, 'someData') + expect(paths).to.eql([]) }) - it('should get tree with CID and path as String', (done) => { + it('should get tree with CID and path as String', async () => { const cidCborStr = cidCbor.toBaseEncodedString() - ipfs.dag.tree(cidCborStr + '/someData', (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.eql([]) - done() - }) + const paths = await ipfs.dag.tree(cidCborStr + '/someData') + expect(paths).to.eql([]) }) - it('should get tree with CID recursive (accross different formats)', (done) => { - ipfs.dag.tree(cidCbor, { recursive: true }, (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.have.members([ - 'pb', - 'someData', - 'pb/Links', - 'pb/Data' - ]) - done() - }) + it('should get tree with CID recursive (accross different formats)', async () => { + const paths = await ipfs.dag.tree(cidCbor, { recursive: true }) + expect(paths).to.have.members([ + 'pb', + 'someData', + 'pb/Links', + 'pb/Data' + ]) }) - it('should get tree with CID and path recursive', (done) => { - ipfs.dag.tree(cidCbor, 'pb', { recursive: true }, (err, paths) => { - expect(err).to.not.exist() - expect(paths).to.have.members([ - 'Links', - 'Data' - ]) - done() - }) + it('should get tree with CID and path recursive', async () => { + const paths = await ipfs.dag.tree(cidCbor, 'pb', { recursive: true }) + expect(paths).to.have.members([ + 'Links', + 'Data' + ]) }) }) } From a2c6b122d8e8fa5ae87fb644a1e91a9cc34c99cc Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:27:52 +0000 Subject: [PATCH 06/45] chore: dht async/await refactor --- src/dht/find-peer.js | 25 +++++--------- src/dht/find-provs.js | 75 +++++++++++++++------------------------- src/dht/get.js | 25 +++++--------- src/dht/provide.js | 79 +++++++++++++------------------------------ src/dht/put.js | 8 ++--- src/dht/query.js | 30 ++++++++-------- 6 files changed, 84 insertions(+), 158 deletions(-) diff --git a/src/dht/find-peer.js b/src/dht/find-peer.js index 028ebb155..fe56d90b1 100644 --- a/src/dht/find-peer.js +++ b/src/dht/find-peer.js @@ -41,26 +41,19 @@ module.exports = (createCommon, options) => { common.teardown(done) }) - it('should find other peers', (done) => { - nodeA.dht.findPeer(nodeB.peerId.id, (err, res) => { - expect(err).to.not.exist() + it('should find other peers', async () => { + const res = await nodeA.dht.findPeer(nodeB.peerId.id) - const id = res.id.toB58String() - const nodeAddresses = nodeB.peerId.addresses.map((addr) => addr.split('/ipfs/')[0]) // remove '/ipfs/' - const peerAddresses = res.multiaddrs.toArray().map((ma) => ma.toString().split('/ipfs/')[0]) + const id = res.id.toB58String() + const nodeAddresses = nodeB.peerId.addresses.map((addr) => addr.split('/ipfs/')[0]) // remove '/ipfs/' + const peerAddresses = res.multiaddrs.toArray().map((ma) => ma.toString().split('/ipfs/')[0]) - expect(id).to.be.eql(nodeB.peerId.id) - expect(nodeAddresses).to.include(peerAddresses[0]) - done() - }) + expect(id).to.be.eql(nodeB.peerId.id) + expect(nodeAddresses).to.include(peerAddresses[0]) }) - it('should fail to find other peer if peer does not exist', (done) => { - nodeA.dht.findPeer('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ', (err, peer) => { - expect(err).to.exist() - expect(peer).to.not.exist() - done() - }) + it('should fail to find other peer if peer does not exist', () => { + return expect(nodeA.dht.findPeer('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ')).to.eventually.be.rejected() }) }) } diff --git a/src/dht/find-provs.js b/src/dht/find-provs.js index 478eb0d20..4af69f4d5 100644 --- a/src/dht/find-provs.js +++ b/src/dht/find-provs.js @@ -2,21 +2,18 @@ 'use strict' const multihashing = require('multihashing-async') -const waterfall = require('async/waterfall') const parallel = require('async/parallel') const CID = require('cids') const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { connect } = require('../utils/swarm') -function fakeCid (cb) { +async function fakeCid () { const bytes = Buffer.from(`TEST${Date.now()}`) - multihashing(bytes, 'sha2-256', (err, mh) => { - if (err) { - cb(err) - } - cb(null, new CID(0, 'dag-pb', mh)) - }) + + const mh = await multihashing(bytes, 'sha2-256') + + return new CID(0, 'dag-pb', mh) } module.exports = (createCommon, options) => { @@ -53,54 +50,38 @@ module.exports = (createCommon, options) => { }) let providedCid - before('add providers for the same cid', function (done) { - this.timeout(10 * 1000) - parallel([ - (cb) => nodeB.object.new('unixfs-dir', cb), - (cb) => nodeC.object.new('unixfs-dir', cb) - ], (err, cids) => { - if (err) return done(err) - providedCid = cids[0] - parallel([ - (cb) => nodeB.dht.provide(providedCid, cb), - (cb) => nodeC.dht.provide(providedCid, cb) - ], done) - }) - }) + before('add providers for the same cid', async function () { + const cids = await Promise.all([ + nodeB.object.new('unixfs-dir'), + nodeC.object.new('unixfs-dir') + ]) - after(function (done) { - this.timeout(50 * 1000) + providedCid = cids[0] - common.teardown(done) + await Promise.all([ + nodeB.dht.provide(providedCid), + nodeC.dht.provide(providedCid) + ]) }) - it('should be able to find providers', function (done) { - this.timeout(20 * 1000) - - waterfall([ - (cb) => nodeA.dht.findProvs(providedCid, cb), - (provs, cb) => { - const providerIds = provs.map((p) => p.id.toB58String()) - expect(providerIds).to.have.members([ - nodeB.peerId.id, - nodeC.peerId.id - ]) - cb() - } - ], done) + it('should be able to find providers', async function () { + const provs = await nodeA.dht.findProvs(providedCid) + const providerIds = provs.map((p) => p.id.toB58String()) + + expect(providerIds).to.have.members([ + nodeB.peerId.id, + nodeC.peerId.id + ]) }) - it('should take options to override timeout config', function (done) { + it('should take options to override timeout config', async function () { const options = { timeout: 1 } - waterfall([ - (cb) => fakeCid(cb), - (cidV0, cb) => nodeA.dht.findProvs(cidV0, options, (err) => { - expect(err).to.exist() - cb(null) - }) - ], done) + + const cidV0 = await fakeCid() + + await expect(nodeA.dht.findProvs(cidV0, options)).to.be.rejected() }) }) } diff --git a/src/dht/get.js b/src/dht/get.js index c3d053a50..7bdb20852 100644 --- a/src/dht/get.js +++ b/src/dht/get.js @@ -2,7 +2,6 @@ 'use strict' const hat = require('hat') -const waterfall = require('async/waterfall') const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { connect } = require('../utils/swarm') @@ -43,27 +42,19 @@ module.exports = (createCommon, options) => { common.teardown(done) }) - it('should error when getting a non-existent key from the DHT', (done) => { - nodeA.dht.get('non-existing', { timeout: 100 }, (err, value) => { - expect(err).to.be.an.instanceof(Error) - done() - }) + it('should error when getting a non-existent key from the DHT', () => { + return expect(nodeA.dht.get('non-existing', { timeout: 100 })).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) - it('should get a value after it was put on another node', function (done) { - this.timeout(80 * 1000) - + it('should get a value after it was put on another node', async () => { const key = Buffer.from(hat()) const value = Buffer.from(hat()) - waterfall([ - cb => nodeB.dht.put(key, value, cb), - cb => nodeA.dht.get(key, cb), - (result, cb) => { - expect(result).to.eql(value) - cb() - } - ], done) + await nodeB.dht.put(key, value) + const result = await nodeA.dht.get(key) + + expect(result).to.eql(value) }) }) } diff --git a/src/dht/provide.js b/src/dht/provide.js index ca927a2f2..3b08526ca 100644 --- a/src/dht/provide.js +++ b/src/dht/provide.js @@ -38,80 +38,47 @@ module.exports = (createCommon, options) => { common.teardown(done) }) - it('should provide local CID', (done) => { - ipfs.add(Buffer.from('test'), (err, res) => { - if (err) return done(err) + it('should provide local CID', async () => { + const res = await ipfs.add(Buffer.from('test')) - ipfs.dht.provide(new CID(res[0].hash), (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.dht.provide(new CID(res[0].hash)) }) - it('should not provide if block not found locally', (done) => { + it('should not provide if block not found locally', () => { const cid = new CID('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ') - ipfs.dht.provide(cid, (err) => { - expect(err).to.exist() - expect(err.message).to.include('not found locally') - done() - }) + return expect(ipfs.dht.provide(cid)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('not found locally') }) - it('should allow multiple CIDs to be passed', (done) => { - ipfs.add([ + it('should allow multiple CIDs to be passed', async () => { + const res = await ipfs.add([ { content: Buffer.from('t0') }, { content: Buffer.from('t1') } - ], (err, res) => { - if (err) return done(err) + ]) - ipfs.dht.provide([ - new CID(res[0].hash), - new CID(res[1].hash) - ], (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.dht.provide([ + new CID(res[0].hash), + new CID(res[1].hash) + ]) }) - it('should provide a CIDv1', (done) => { - ipfs.add(Buffer.from('test'), { cidVersion: 1 }, (err, res) => { - if (err) return done(err) + it('should provide a CIDv1', async () => { + const res = await ipfs.add(Buffer.from('test'), { cidVersion: 1 }) - const cid = new CID(res[0].hash) + const cid = new CID(res[0].hash) - ipfs.dht.provide(cid, (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.dht.provide(cid) }) - it('should provide a CIDv1 string', (done) => { - ipfs.add(Buffer.from('test'), { cidVersion: 1 }, (err, res) => { - if (err) return done(err) - - const cid = res[0].hash - ipfs.dht.provide(cid, (err) => { - expect(err).to.not.exist() - done() - }) - }) - }) - it('should error on non CID arg', (done) => { - ipfs.dht.provide({}, (err) => { - expect(err).to.exist() - done() - }) + it('should error on non CID arg', () => { + return expect(ipfs.dht.provide({})).to.eventually.be.rejected() }) - it('should error on array containing non CID arg', (done) => { - ipfs.dht.provide([{}], (err) => { - expect(err).to.exist() - done() - }) + it('should error on array containing non CID arg', () => { + return expect(ipfs.dht.provide([{}])).to.eventually.be.rejected() }) }) } diff --git a/src/dht/put.js b/src/dht/put.js index a1c39794e..f6e1510d2 100644 --- a/src/dht/put.js +++ b/src/dht/put.js @@ -36,15 +36,11 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should put a value to the DHT', (done) => { - this.timeout(80 * 1000) + it('should put a value to the DHT', async () => { const key = Buffer.from('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') const data = Buffer.from('data') - nodeA.dht.put(key, data, (err) => { - expect(err).to.not.exist() - done() - }) + await nodeA.dht.put(key, data) }) }) } diff --git a/src/dht/query.js b/src/dht/query.js index 05bfd8ddb..dffa483cb 100644 --- a/src/dht/query.js +++ b/src/dht/query.js @@ -1,6 +1,7 @@ /* eslint-env mocha */ 'use strict' +const pTimeout = require('p-timeout') const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { connect } = require('../utils/swarm') @@ -41,27 +42,24 @@ module.exports = (createCommon, options) => { common.teardown(done) }) - it('should return the other node in the query', function (done) { + it('should return the other node in the query', async function () { const timeout = 150 * 1000 this.timeout(timeout) - let skipped = false + try { + const peers = await pTimeout(nodeA.dht.query(nodeB.peerId.id), timeout - 1000) - // This test is meh. DHT works best with >= 20 nodes. Therefore a - // failure might happen, but we don't want to report it as such. - // Hence skip the test before the timeout is reached - const timeoutId = setTimeout(function () { - skipped = true - this.skip() - }.bind(this), timeout - 1000) - - nodeA.dht.query(nodeB.peerId.id, (err, peers) => { - if (skipped) return - clearTimeout(timeoutId) - expect(err).to.not.exist() expect(peers.map((p) => p.id.toB58String())).to.include(nodeB.peerId.id) - done() - }) + } catch (err) { + if (err.name === 'TimeoutError') { + // This test is meh. DHT works best with >= 20 nodes. Therefore a + // failure might happen, but we don't want to report it as such. + // Hence skip the test before the timeout is reached + this.skip() + } else { + throw err + } + } }) }) } From 8bc5fc9bca94c1641e3970e4561e511ac41e2e59 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:28:19 +0000 Subject: [PATCH 07/45] chore: files-mfs async/await refactor --- src/files-mfs/cp.js | 39 +++----- src/files-mfs/flush.js | 28 ++---- src/files-mfs/ls-pull-stream.js | 93 +++++++----------- src/files-mfs/ls-readable-stream.js | 92 +++++++----------- src/files-mfs/ls.js | 79 ++++++---------- src/files-mfs/mkdir.js | 21 ++--- src/files-mfs/mv.js | 45 +++------ src/files-mfs/read-pull-stream.js | 39 +++----- src/files-mfs/read-readable-stream.js | 35 +++---- src/files-mfs/read.js | 30 +++--- src/files-mfs/rm.js | 40 +++----- src/files-mfs/stat.js | 131 +++++++++++--------------- src/files-mfs/write.js | 33 ++----- 13 files changed, 256 insertions(+), 449 deletions(-) diff --git a/src/files-mfs/cp.js b/src/files-mfs/cp.js index 5eea87531..d1a2b44fa 100644 --- a/src/files-mfs/cp.js +++ b/src/files-mfs/cp.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { fixtures } = require('../files-regular/utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -33,47 +32,31 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should copy file, expect error', (done) => { + it('should copy file, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.cp(`${testDir}/c`, `${testDir}/b`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.cp(`${testDir}/c`, `${testDir}/b`)).to.eventually.be.rejected() }) - it('should copy file, expect no error', (done) => { + it('should copy file, expect no error', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('TEST'), { create: true }, cb), - (cb) => ipfs.files.cp(`${testDir}/a`, `${testDir}/b`, cb) - ], (err) => { - expect(err).to.not.exist() - done() - }) + await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.write(`${testDir}/a`, Buffer.from('TEST'), { create: true }) + await ipfs.files.cp(`${testDir}/a`, `${testDir}/b`) }) - it('should copy dir, expect error', (done) => { + it('should copy dir, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.cp(`${testDir}/lv1/lv3`, `${testDir}/lv1/lv4`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.cp(`${testDir}/lv1/lv3`, `${testDir}/lv1/lv4`)).to.eventually.be.rejected() }) - it('should copy dir, expect no error', (done) => { + it('should copy dir, expect no error', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, cb), - (cb) => ipfs.files.cp(`${testDir}/lv1/lv2`, `${testDir}/lv1/lv3`, cb) - ], (err) => { - expect(err).to.not.exist() - done() - }) + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) + await ipfs.files.cp(`${testDir}/lv1/lv2`, `${testDir}/lv1/lv3`) }) it('should copy from outside of mfs', async () => { diff --git a/src/files-mfs/flush.js b/src/files-mfs/flush.js index 647f0ec46..16a9e938c 100644 --- a/src/files-mfs/flush.js +++ b/src/files-mfs/flush.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -32,32 +31,23 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not flush not found file/dir, expect error', (done) => { + it('should not flush not found file/dir, expect error', async () => { const testDir = `/test-${hat()}` - ipfs.files.flush(`${testDir}/404`, (err) => { + try { + await ipfs.files.flush(`${testDir}/404`) + } catch (err) { expect(err).to.exist() - done() - }) + } }) - it('should flush root', (done) => { - ipfs.files.flush((err) => { - expect(err).to.not.exist() - done() - }) - }) + it('should flush root', () => ipfs.files.flush()) - it('should flush specific dir', (done) => { + it('should flush specific dir', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.flush(testDir, cb) - ], (err) => { - expect(err).to.not.exist() - done() - }) + await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.flush(testDir) }) }) } diff --git a/src/files-mfs/ls-pull-stream.js b/src/files-mfs/ls-pull-stream.js index e9977c127..b91339d18 100644 --- a/src/files-mfs/ls-pull-stream.js +++ b/src/files-mfs/ls-pull-stream.js @@ -1,12 +1,9 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') -const pull = require('pull-stream/pull') -const onEnd = require('pull-stream/sinks/on-end') -const collect = require('pull-stream/sinks/collect') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -35,73 +32,51 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not ls not found file/dir, expect error', (done) => { + it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - pull( - ipfs.files.lsPullStream(`${testDir}/404`), - onEnd((err) => { - expect(err).to.exist() - expect(err.message).to.include('does not exist') - done() - }) - ) + return expect(pullToPromise.any(ipfs.files.lsPullStream(`${testDir}/404`))).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('does not exist') }) - it('should ls directory', (done) => { + it('should ls directory', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - pull( - ipfs.files.lsPullStream(testDir), - collect((err, entries) => { - expect(err).to.not.exist() - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - done() - }) - ) - }) + const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir)) + + expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { name: 'b', type: 0, size: 0, hash: '' }, + { name: 'lv1', type: 0, size: 0, hash: '' } + ]) }) - it('should ls directory with long option', (done) => { + it('should ls directory with long option', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - pull( - ipfs.files.lsPullStream(testDir, { long: true }), - collect((err, entries) => { - expect(err).to.not.exist() - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - } - ]) - done() - }) - ) - }) + const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir, { long: true })) + + expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { + name: 'b', + type: 0, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' + }, + { + name: 'lv1', + type: 1, + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' + } + ]) }) }) } diff --git a/src/files-mfs/ls-readable-stream.js b/src/files-mfs/ls-readable-stream.js index af489a6fd..311bf7220 100644 --- a/src/files-mfs/ls-readable-stream.js +++ b/src/files-mfs/ls-readable-stream.js @@ -1,9 +1,9 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -32,75 +32,55 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not ls not found file/dir, expect error', (done) => { + it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - const stream = ipfs.files.lsReadableStream(`${testDir}/404`) - stream.on('data', () => {}) - stream.once('error', (err) => { - expect(err).to.exist() - expect(err.message).to.include('does not exist') - done() - }) + return expect(getStream(stream)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('does not exist') }) - it('should ls directory', (done) => { + it('should ls directory', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - const stream = ipfs.files.lsReadableStream(testDir) - const entries = [] + const stream = ipfs.files.lsReadableStream(testDir) - stream.on('data', entry => entries.push(entry)) + const entries = await getStream.array(stream) - stream.once('end', () => { - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - done() - }) - }) + expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { name: 'b', type: 0, size: 0, hash: '' }, + { name: 'lv1', type: 0, size: 0, hash: '' } + ]) }) - it('should ls directory with long option', (done) => { + it('should ls directory with long option', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - const stream = ipfs.files.lsReadableStream(testDir, { long: true }) - const entries = [] - - stream.on('data', entry => entries.push(entry)) - - stream.once('end', () => { - expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - } - ]) - done() - }) - }) + await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + + const stream = ipfs.files.lsReadableStream(testDir, { long: true }) + const entries = await getStream.array(stream) + + expect(entries.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { + name: 'b', + type: 0, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' + }, + { + name: 'lv1', + type: 1, + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' + } + ]) }) }) } diff --git a/src/files-mfs/ls.js b/src/files-mfs/ls.js index 469f16b68..46fe52560 100644 --- a/src/files-mfs/ls.js +++ b/src/files-mfs/ls.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { fixtures } = require('../files-regular/utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -33,64 +32,48 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not ls not found file/dir, expect error', (done) => { + it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.ls(`${testDir}/404`, (err, info) => { - expect(err).to.exist() - expect(info).to.not.exist() - done() - }) + return expect(ipfs.files.ls(`${testDir}/404`)).to.eventually.be.rejected() }) - it('should ls directory', (done) => { + it('should ls directory', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - ipfs.files.ls(testDir, (err, info) => { - expect(err).to.not.exist() - expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { name: 'b', type: 0, size: 0, hash: '' }, - { name: 'lv1', type: 0, size: 0, hash: '' } - ]) - done() - }) - }) + const info = await ipfs.files.ls(testDir) + + expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { name: 'b', type: 0, size: 0, hash: '' }, + { name: 'lv1', type: 0, size: 0, hash: '' } + ]) }) - it('should ls directory with long option', (done) => { + it('should ls directory with long option', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() - - ipfs.files.ls(testDir, { long: true }, (err, info) => { - expect(err).to.not.exist() - expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ - { - name: 'b', - type: 0, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' - }, - { - name: 'lv1', - type: 1, - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - } - ]) - done() - }) - }) + await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) + + const info = await ipfs.files.ls(testDir, { long: true }) + + expect(info.sort((a, b) => a.name.localeCompare(b.name))).to.eql([ + { + name: 'b', + type: 0, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T' + }, + { + name: 'lv1', + type: 1, + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' + } + ]) }) it('should ls from outside of mfs', async () => { diff --git a/src/files-mfs/mkdir.js b/src/files-mfs/mkdir.js index 81190a42c..a6081f439 100644 --- a/src/files-mfs/mkdir.js +++ b/src/files-mfs/mkdir.js @@ -31,29 +31,20 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should make directory on root', (done) => { + it('should make directory on root', () => { const testDir = `/test-${hat()}` - ipfs.files.mkdir(testDir, (err) => { - expect(err).to.not.exist() - done() - }) + return ipfs.files.mkdir(testDir) }) - it('should make directory and its parents', (done) => { + it('should make directory and its parents', () => { const testDir = `/test-${hat()}` - ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, (err) => { - expect(err).to.not.exist() - done() - }) + return ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) }) - it('should not make already existent directory', (done) => { - ipfs.files.mkdir('/', (err) => { - expect(err).to.exist() - done() - }) + it('should not make already existent directory', () => { + return expect(ipfs.files.mkdir('/')).to.eventually.be.rejected() }) }) } diff --git a/src/files-mfs/mv.js b/src/files-mfs/mv.js index f98b71a3d..83835a4f9 100644 --- a/src/files-mfs/mv.js +++ b/src/files-mfs/mv.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -30,51 +29,33 @@ module.exports = (createCommon, options) => { }) }) - before((done) => { - series([ - (cb) => ipfs.files.mkdir('/test/lv1/lv2', { parents: true }, cb), - (cb) => ipfs.files.write('/test/a', Buffer.from('Hello, world!'), { create: true }, cb) - ], done) + before(async () => { + await ipfs.files.mkdir('/test/lv1/lv2', { p: true }) + await ipfs.files.write('/test/a', Buffer.from('Hello, world!'), { create: true }) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should not move not found file/dir, expect error', (done) => { + it('should not move not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.mv(`${testDir}/404`, `${testDir}/a`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.mv(`${testDir}/404`, `${testDir}/a`)).to.eventually.be.rejected() }) - it('should move file, expect no error', (done) => { + it('should move file, expect no error', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - ipfs.files.mv(`${testDir}/a`, `${testDir}/c`, (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.files.mv(`${testDir}/a`, `${testDir}/c`) }) - it('should move dir, expect no error', (done) => { + it('should move dir, expect no error', async () => { const testDir = `/test-${hat()}` - ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, (err) => { - expect(err).to.not.exist() - - ipfs.files.mv('/test/lv1/lv2', '/test/lv1/lv4', (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) + await ipfs.files.mv('/test/lv1/lv2', '/test/lv1/lv4') }) }) } diff --git a/src/files-mfs/read-pull-stream.js b/src/files-mfs/read-pull-stream.js index 34cf6c289..b4c4b718e 100644 --- a/src/files-mfs/read-pull-stream.js +++ b/src/files-mfs/read-pull-stream.js @@ -1,11 +1,9 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') -const pull = require('pull-stream/pull') -const collect = require('pull-stream/sinks/collect') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -34,37 +32,24 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not read not found, expect error', (done) => { + it('should not read not found, expect error', () => { const testDir = `/test-${hat()}` - pull( - ipfs.files.readPullStream(`${testDir}/404`), - collect((err) => { - expect(err).to.exist() - expect(err.message).to.contain('does not exist') - done() - }) - ) + return expect(pullToPromise.any(ipfs.files.readPullStream(`${testDir}/404`))).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('does not exist') }) - it('should read file', (done) => { + it('should read file', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(testDir) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - pull( - ipfs.files.readPullStream(`${testDir}/a`), - collect((err, bufs) => { - expect(err).to.not.exist() - expect(bufs).to.eql([Buffer.from('Hello, world!')]) - done() - }) - ) - }) + const bufs = await pullToPromise.any(ipfs.files.readPullStream(`${testDir}/a`)) + + expect(bufs).to.eql([Buffer.from('Hello, world!')]) }) }) } diff --git a/src/files-mfs/read-readable-stream.js b/src/files-mfs/read-readable-stream.js index d4f525339..6a6491898 100644 --- a/src/files-mfs/read-readable-stream.js +++ b/src/files-mfs/read-readable-stream.js @@ -1,10 +1,9 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') -const bl = require('bl') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -33,36 +32,26 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not read not found, expect error', (done) => { + it('should not read not found, expect error', () => { const testDir = `/test-${hat()}` - const stream = ipfs.files.readReadableStream(`${testDir}/404`) - stream.on('data', () => {}) - stream.once('error', (err) => { - expect(err).to.exist() - expect(err.message).to.contain('does not exist') - done() - }) + return expect(getStream(stream)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('message') + .that.include('does not exist') }) - it('should read file', (done) => { + it('should read file', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(testDir) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - const stream = ipfs.files.readReadableStream(`${testDir}/a`) + const stream = ipfs.files.readReadableStream(`${testDir}/a`) - stream.pipe(bl((err, buf) => { - expect(err).to.not.exist() - expect(buf).to.eql(Buffer.from('Hello, world!')) - done() - })) - }) + const buf = await getStream(stream) + expect(buf).to.eql('Hello, world!') }) }) } diff --git a/src/files-mfs/read.js b/src/files-mfs/read.js index 012803493..0a306ea7a 100644 --- a/src/files-mfs/read.js +++ b/src/files-mfs/read.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { fixtures } = require('../files-regular/utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -33,31 +32,24 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not read not found, expect error', (done) => { + it('should not read not found, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.read(`${testDir}/404`, (err) => { - expect(err).to.exist() - expect(err.message).to.contain('does not exist') - done() - }) + return expect(ipfs.files.cp(`${testDir}/c`, `${testDir}/b`)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.to.have.property('message') + .that.include('does not exist') }) - it('should read file', (done) => { + it('should read file', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(testDir) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - ipfs.files.read(`${testDir}/a`, (err, buf) => { - expect(err).to.not.exist() - expect(buf).to.eql(Buffer.from('Hello, world!')) - done() - }) - }) + const buf = await ipfs.files.read(`${testDir}/a`) + + expect(buf).to.eql(Buffer.from('Hello, world!')) }) it('should read from outside of mfs', async () => { diff --git a/src/files-mfs/rm.js b/src/files-mfs/rm.js index 649a79268..abe456f85 100644 --- a/src/files-mfs/rm.js +++ b/src/files-mfs/rm.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -32,42 +31,33 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not remove not found file/dir, expect error', (done) => { + it('should not remove not found file/dir, expect error', () => { const testDir = `/test-${hat()}` - ipfs.files.rm(`${testDir}/a`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.rm(`${testDir}/a`)).to.eventually.be.rejected() }) - it('should remove file, expect no error', (done) => { + it('should remove file, expect no error', async () => { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/c`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.write(`${testDir}/c`, Buffer.from('Hello, world!'), { create: true }) - ipfs.files.rm(`${testDir}/c`, (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.files.rm(`${testDir}/c`) + + const contents = await ipfs.files.ls(testDir) + expect(contents).to.be.an('array').and.to.be.empty() }) - it('should remove dir, expect no error', (done) => { + it('should remove dir, expect no error', async () => { const testDir = `/test-${hat()}` - ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }, (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) - ipfs.files.rm(`${testDir}/lv1/lv2`, { recursive: true }, (err) => { - expect(err).to.not.exist() - done() - }) - }) + await ipfs.files.rm(`${testDir}/lv1/lv2`, { recursive: true }) + + const lv1Contents = await ipfs.files.ls(`${testDir}/lv1`) + expect(lv1Contents).to.be.an('array').and.to.be.empty() }) }) } diff --git a/src/files-mfs/stat.js b/src/files-mfs/stat.js index 42c23305e..87713181b 100644 --- a/src/files-mfs/stat.js +++ b/src/files-mfs/stat.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const hat = require('hat') const { fixtures } = require('../files-regular/utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -31,109 +30,91 @@ module.exports = (createCommon, options) => { }) }) - before((done) => ipfs.add(fixtures.smallFile.data, done)) + before(async () => { await ipfs.add(fixtures.smallFile.data) }) after((done) => common.teardown(done)) - it('should not stat not found file/dir, expect error', function (done) { + it('should not stat not found file/dir, expect error', function () { const testDir = `/test-${hat()}` - ipfs.files.stat(`${testDir}/404`, (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.stat(`${testDir}/404`)).to.eventually.be.rejected() }) - it('should stat file', function (done) { + it('should stat file', async function () { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) - ipfs.files.stat(`${testDir}/b`, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.include({ - type: 'file', - blocks: 1, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', - cumulativeSize: 71, - withLocality: false - }) - expect(stat.local).to.be.undefined() - expect(stat.sizeLocal).to.be.undefined() - done() - }) + const stat = await ipfs.files.stat(`${testDir}/b`) + + expect(stat).to.include({ + type: 'file', + blocks: 1, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + cumulativeSize: 71, + withLocality: false }) + expect(stat.local).to.be.undefined() + expect(stat.sizeLocal).to.be.undefined() }) - it('should stat dir', function (done) { + it('should stat dir', async function () { const testDir = `/test-${hat()}` - series([ - (cb) => ipfs.files.mkdir(testDir, { parents: true }, cb), - (cb) => ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }, cb) - ], (err) => { - expect(err).to.not.exist() + await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) - ipfs.files.stat(testDir, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.include({ - type: 'directory', - blocks: 1, - size: 0, - hash: 'QmQGn7EvzJZRbhcwHrp4UeMeS56WsLmrey9JhfkymjzXQu', - cumulativeSize: 118, - withLocality: false - }) - expect(stat.local).to.be.undefined() - expect(stat.sizeLocal).to.be.undefined() - done() - }) + const stat = await ipfs.files.stat(testDir) + + expect(stat).to.include({ + type: 'directory', + blocks: 1, + size: 0, + hash: 'QmQGn7EvzJZRbhcwHrp4UeMeS56WsLmrey9JhfkymjzXQu', + cumulativeSize: 118, + withLocality: false }) + expect(stat.local).to.be.undefined() + expect(stat.sizeLocal).to.be.undefined() }) // TODO enable this test when this feature gets released on go-ipfs - it.skip('should stat withLocal file', function (done) { - ipfs.files.stat('/test/b', { withLocal: true }, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'file', - blocks: 1, - size: 13, - hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', - cumulativeSize: 71, - withLocality: true, - local: true, - sizeLocal: 71 - }) - done() + it.skip('should stat withLocal file', async function () { + const stat = await ipfs.files.stat('/test/b', { withLocal: true }) + + expect(stat).to.eql({ + type: 'file', + blocks: 1, + size: 13, + hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T', + cumulativeSize: 71, + withLocality: true, + local: true, + sizeLocal: 71 }) }) // TODO enable this test when this feature gets released on go-ipfs - it.skip('should stat withLocal dir', function (done) { - ipfs.files.stat('/test', { withLocal: true }, (err, stat) => { - expect(err).to.not.exist() - expect(stat).to.eql({ - type: 'directory', - blocks: 2, - size: 0, - hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', - cumulativeSize: 216, - withLocality: true, - local: true, - sizeLocal: 216 - }) - done() + it.skip('should stat withLocal dir', async function () { + const stat = await ipfs.files.stat('/test', { withLocal: true }) + + expect(stat).to.eql({ + type: 'directory', + blocks: 2, + size: 0, + hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto', + cumulativeSize: 216, + withLocality: true, + local: true, + sizeLocal: 216 }) }) it('should stat outside of mfs', async () => { const stat = await ipfs.files.stat('/ipfs/' + fixtures.smallFile.cid) + expect(stat).to.include({ type: 'file', blocks: 0, diff --git a/src/files-mfs/write.js b/src/files-mfs/write.js index 7b9a9f4a8..a8352a025 100644 --- a/src/files-mfs/write.js +++ b/src/files-mfs/write.js @@ -31,41 +31,28 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should not write to non existent file, expect error', function (done) { + it('should not write to non existent file, expect error', function () { const testDir = `/test-${hat()}` - ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), (err) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'))).to.eventually.be.rejected() }) - it('should write to non existent file with create flag', function (done) { + it('should write to non existent file with create flag', async function () { const testPath = `/test-${hat()}` - ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true }, (err) => { - expect(err).to.not.exist() + await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true }) - ipfs.files.stat(testPath, (err, stats) => { - expect(err).to.not.exist() - expect(stats.type).to.equal('file') - done() - }) - }) + const stats = await ipfs.files.stat(testPath) + expect(stats.type).to.equal('file') }) - it('should write to deeply nested non existent file with create and parents flags', function (done) { + it('should write to deeply nested non existent file with create and parents flags', async function () { const testPath = `/foo/bar/baz/test-${hat()}` - ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true, parents: true }, (err) => { - expect(err).to.not.exist() + await ipfs.files.write(testPath, Buffer.from('Hello, world!'), { create: true, parents: true }) - ipfs.files.stat(testPath, (err, stats) => { - expect(err).to.not.exist() - expect(stats.type).to.equal('file') - done() - }) - }) + const stats = await ipfs.files.stat(testPath) + expect(stats.type).to.equal('file') }) }) } From 2154ebef26dcd4578df6db214d0265994de9d884 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:28:54 +0000 Subject: [PATCH 08/45] chore: files-regular async/await refactor --- src/files-regular/add-from-fs.js | 67 ++-- src/files-regular/add-from-stream.js | 11 +- src/files-regular/add-from-url.js | 128 ++++---- src/files-regular/add-pull-stream.js | 37 +-- src/files-regular/add-readable-stream.js | 20 +- src/files-regular/add.js | 302 +++++++----------- src/files-regular/cat-pull-stream.js | 33 +- src/files-regular/cat-readable-stream.js | 20 +- src/files-regular/cat.js | 210 +++++------- src/files-regular/get-pull-stream.js | 26 +- src/files-regular/get-readable-stream.js | 29 +- src/files-regular/get.js | 293 +++++++---------- src/files-regular/ls-pull-stream.js | 127 ++++---- src/files-regular/ls-readable-stream.js | 122 ++++--- src/files-regular/ls.js | 206 ++++++------ src/files-regular/refs-local-pull-stream.js | 9 +- .../refs-local-readable-stream.js | 9 +- src/files-regular/refs-local-tests.js | 18 +- src/files-regular/refs-local.js | 2 +- src/files-regular/refs-pull-stream.js | 11 +- src/files-regular/refs-readable-stream.js | 11 +- src/files-regular/refs-tests.js | 153 ++++----- 22 files changed, 744 insertions(+), 1100 deletions(-) diff --git a/src/files-regular/add-from-fs.js b/src/files-regular/add-from-fs.js index b2f35cfa9..b8d27bdf3 100644 --- a/src/files-regular/add-from-fs.js +++ b/src/files-regular/add-from-fs.js @@ -35,66 +35,55 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add a directory from the file system', (done) => { + it('should add a directory from the file system', async () => { const filesPath = path.join(fixturesPath, 'test-folder') - ipfs.addFromFs(filesPath, { recursive: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.above(8) - done() - }) + + const result = await ipfs.addFromFs(filesPath, { recursive: true }) + expect(result.length).to.be.above(8) }) - it('should add a directory from the file system with an odd name', (done) => { + it('should add a directory from the file system with an odd name', async () => { const filesPath = path.join(fixturesPath, 'weird name folder [v0]') - ipfs.addFromFs(filesPath, { recursive: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.above(8) - done() - }) + + const result = await ipfs.addFromFs(filesPath, { recursive: true }) + expect(result.length).to.be.above(8) }) - it('should ignore a directory from the file system', (done) => { + it('should ignore a directory from the file system', async () => { const filesPath = path.join(fixturesPath, 'test-folder') - ipfs.addFromFs(filesPath, { recursive: true, ignore: ['files/**'] }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.below(9) - done() - }) + + const result = await ipfs.addFromFs(filesPath, { recursive: true, ignore: ['files/**'] }) + expect(result.length).to.be.below(9) }) - it('should add a file from the file system', (done) => { + it('should add a file from the file system', async () => { const filePath = path.join(fixturesPath, 'testfile.txt') - ipfs.addFromFs(filePath, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.equal(1) - expect(result[0].path).to.equal('testfile.txt') - done() - }) + + const result = await ipfs.addFromFs(filePath) + expect(result.length).to.equal(1) + expect(result[0].path).to.equal('testfile.txt') }) - it('should add a hidden file in a directory from the file system', (done) => { + it('should add a hidden file in a directory from the file system', async () => { const filesPath = path.join(fixturesPath, 'hidden-files-folder') - ipfs.addFromFs(filesPath, { recursive: true, hidden: true }, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.be.above(10) - expect(result.map(object => object.path)).to.include('hidden-files-folder/.hiddenTest.txt') - expect(result.map(object => object.hash)).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt') - done() - }) + + const result = await ipfs.addFromFs(filesPath, { recursive: true, hidden: true }) + expect(result.length).to.be.above(10) + expect(result.map(object => object.path)).to.include('hidden-files-folder/.hiddenTest.txt') + expect(result.map(object => object.hash)).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt') }) - it('should add a file from the file system with only-hash=true', function () { + it('should add a file from the file system with only-hash=true', async function () { this.slow(10 * 1000) const content = String(Math.random() + Date.now()) const filepath = path.join(os.tmpdir(), `${content}.txt`) fs.writeFileSync(filepath, content) - return ipfs.addFromFs(filepath, { onlyHash: true }) - .then(out => { - fs.unlinkSync(filepath) - return expectTimeout(ipfs.object.get(out[0].hash), 4000) - }) + const out = await ipfs.addFromFs(filepath, { onlyHash: true }) + + fs.unlinkSync(filepath) + await expectTimeout(ipfs.object.get(out[0].hash), 4000) }) }) } diff --git a/src/files-regular/add-from-stream.js b/src/files-regular/add-from-stream.js index 96ae0938d..f7a61676b 100644 --- a/src/files-regular/add-from-stream.js +++ b/src/files-regular/add-from-stream.js @@ -32,7 +32,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add from a stream', (done) => { + it('should add from a stream', async () => { const stream = new Readable({ read () { this.push(fixtures.bigFile.data) @@ -40,12 +40,9 @@ module.exports = (createCommon, options) => { } }) - ipfs.addFromStream(stream, (err, result) => { - expect(err).to.not.exist() - expect(result.length).to.equal(1) - expect(result[0].hash).to.equal(fixtures.bigFile.cid) - done() - }) + const result = await ipfs.addFromStream(stream) + expect(result.length).to.equal(1) + expect(result[0].hash).to.equal(fixtures.bigFile.cid) }) }) } diff --git a/src/files-regular/add-from-url.js b/src/files-regular/add-from-url.js index 87d3e530d..9190c2600 100644 --- a/src/files-regular/add-from-url.js +++ b/src/files-regular/add-from-url.js @@ -1,8 +1,8 @@ /* eslint-env mocha */ 'use strict' +const pTimeout = require('p-timeout') const { getDescribe, getIt, expect } = require('../utils/mocha') -const parallel = require('async/parallel') const { echoUrl, redirectUrl } = require('../utils/echo-http-server') module.exports = (createCommon, options) => { @@ -31,100 +31,90 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add from a HTTP URL', (done) => { + it('should add from a HTTP URL', async () => { const text = `TEST${Date.now()}` const url = echoUrl(text) - parallel({ - result: (cb) => ipfs.addFromURL(url, cb), - expectedResult: (cb) => ipfs.add(Buffer.from(text), cb) - }, (err, { result, expectedResult }) => { - expect(err).to.not.exist() - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result[0].hash).to.equal(expectedResult[0].hash) - expect(result[0].size).to.equal(expectedResult[0].size) - expect(result[0].path).to.equal(text) - done() - }) + + const [result, expectedResult] = await Promise.all([ + ipfs.addFromURL(url), + ipfs.add(Buffer.from(text)) + ]) + + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result[0].hash).to.equal(expectedResult[0].hash) + expect(result[0].size).to.equal(expectedResult[0].size) + expect(result[0].path).to.equal(text) }) - it('should add from a HTTP URL with redirection', (done) => { + it('should add from a HTTP URL with redirection', async () => { const text = `TEST${Date.now()}` const url = echoUrl(text) + '?foo=bar#buzz' - parallel({ - result: (cb) => ipfs.addFromURL(redirectUrl(url), cb), - expectedResult: (cb) => ipfs.add(Buffer.from(text), cb) - }, (err, { result, expectedResult }) => { - expect(err).to.not.exist() - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result[0].hash).to.equal(expectedResult[0].hash) - expect(result[0].size).to.equal(expectedResult[0].size) - expect(result[0].path).to.equal(text) - done() - }) + const [result, expectedResult] = await Promise.all([ + ipfs.addFromURL(redirectUrl(url)), + ipfs.add(Buffer.from(text)) + ]) + + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result[0].hash).to.equal(expectedResult[0].hash) + expect(result[0].size).to.equal(expectedResult[0].size) + expect(result[0].path).to.equal(text) }) - it('should add from a URL with only-hash=true', (done) => { + it('should add from a URL with only-hash=true', async function () { const text = `TEST${Date.now()}` const url = echoUrl(text) - ipfs.addFromURL(url, { onlyHash: true }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.addFromURL(url, { onlyHash: true }) + + try { // A successful object.get for this size data took my laptop ~14ms - let didTimeout = false - const timeoutId = setTimeout(() => { - didTimeout = true - done() - }, 500) + await pTimeout(ipfs.object.get(res[0].hash), 500) + } catch (err) { + if (err.name === 'TimeoutError') { + // This doesn't seem to be the right approach: + // the test shouldn't be passing when it gets a timeout error + // but this is pretty the same logic as the previous callback one + return Promise.resolve() + } - ipfs.object.get(res[0].hash, () => { - clearTimeout(timeoutId) - if (didTimeout) return - expect(new Error('did not timeout')).to.not.exist() - }) - }) + throw err + } }) - it('should add from a URL with wrap-with-directory=true', (done) => { + it('should add from a URL with wrap-with-directory=true', async () => { const filename = `TEST${Date.now()}.txt` // also acts as data const url = echoUrl(filename) + '?foo=bar#buzz' const addOpts = { wrapWithDirectory: true } - parallel({ - result: (cb) => ipfs.addFromURL(url, addOpts, cb), - expectedResult: (cb) => ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts, cb) - }, (err, { result, expectedResult }) => { - expect(err).to.not.exist() - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result).to.deep.equal(expectedResult) - done() - }) + + const [result, expectedResult] = await Promise.all([ + ipfs.addFromURL(url, addOpts), + ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts) + ]) + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result).to.deep.equal(expectedResult) }) - it('should add from a URL with wrap-with-directory=true and URL-escaped file name', (done) => { + it('should add from a URL with wrap-with-directory=true and URL-escaped file name', async () => { const filename = `320px-Domažlice,_Jiráskova_43_(${Date.now()}).jpg` // also acts as data const url = echoUrl(filename) + '?foo=bar#buzz' const addOpts = { wrapWithDirectory: true } - parallel({ - result: (cb) => ipfs.addFromURL(url, addOpts, cb), - expectedResult: (cb) => ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts, cb) - }, (err, { result, expectedResult }) => { - expect(err).to.not.exist() - expect(result.err).to.not.exist() - expect(expectedResult.err).to.not.exist() - expect(result).to.deep.equal(expectedResult) - done() - }) + + const [result, expectedResult] = await Promise.all([ + ipfs.addFromURL(url, addOpts), + ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts) + ]) + + expect(result.err).to.not.exist() + expect(expectedResult.err).to.not.exist() + expect(result).to.deep.equal(expectedResult) }) - it('should not add from an invalid url', (done) => { - ipfs.addFromURL('123http://invalid', (err, result) => { - expect(err).to.exist() - expect(result).to.not.exist() - done() - }) + it('should not add from an invalid url', () => { + return expect(ipfs.addFromURL('123http://invalid')).to.eventually.be.rejected() }) }) } diff --git a/src/files-regular/add-pull-stream.js b/src/files-regular/add-pull-stream.js index e51eb3eb0..351f187bd 100644 --- a/src/files-regular/add-pull-stream.js +++ b/src/files-regular/add-pull-stream.js @@ -4,6 +4,7 @@ const { fixtures } = require('./utils') const pull = require('pull-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -32,7 +33,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add pull stream of valid files and dirs', function (done) { + it('should add pull stream of valid files and dirs', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -53,35 +54,21 @@ module.exports = (createCommon, options) => { const stream = ipfs.addPullStream() - pull( - pull.values(files), - stream, - pull.collect((err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await pullToPromise.any(pull(pull.values(files), stream)) + const testFolderIndex = filesAdded.length - 1 - filesAdded.forEach((file) => { - if (file.path === 'test-folder') { - expect(file.hash).to.equal(fixtures.directory.cid) - done() - } - }) - }) - ) + expect(filesAdded).to.have.nested.property(`[${testFolderIndex}].path`, 'test-folder') + expect(filesAdded).to.have.nested.property(`[${testFolderIndex}].hash`, fixtures.directory.cid) }) - it('should add with object chunks and pull stream content', (done) => { + it('should add with object chunks and pull stream content', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' + const data = [{ content: pull.values([Buffer.from('test')]) }] + const stream = ipfs.addPullStream() - pull( - pull.values([{ content: pull.values([Buffer.from('test')]) }]), - ipfs.addPullStream(), - pull.collect((err, res) => { - if (err) return done(err) - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - done() - }) - ) + const res = await pullToPromise.any(pull(pull.values(data), stream)) + expect(res).to.have.property('length', 1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) }) }) } diff --git a/src/files-regular/add-readable-stream.js b/src/files-regular/add-readable-stream.js index 291875485..5410572b6 100644 --- a/src/files-regular/add-readable-stream.js +++ b/src/files-regular/add-readable-stream.js @@ -3,6 +3,7 @@ const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -31,7 +32,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add readable stream of valid files and dirs', function (done) { + it('should add readable stream of valid files and dirs', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -52,20 +53,13 @@ module.exports = (createCommon, options) => { const stream = ipfs.addReadableStream() - stream.on('error', (err) => { - expect(err).to.not.exist() - }) - - stream.on('data', (file) => { - if (file.path === 'test-folder') { - expect(file.hash).to.equal(fixtures.directory.cid) - } - }) - - stream.on('end', done) - files.forEach((file) => stream.write(file)) stream.end() + + const filesArray = await getStream.array(stream) + const file = filesArray[filesArray.length - 1] + + expect(file.hash).to.equal(fixtures.directory.cid) }) }) } diff --git a/src/files-regular/add.js b/src/files-regular/add.js index 4d3c34d9d..155009140 100644 --- a/src/files-regular/add.js +++ b/src/files-regular/add.js @@ -35,19 +35,14 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add a File', function (done) { - if (supportsFileReader) { - ipfs.add(new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }), (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') - done() - }) - } else { - this.skip('skip in node') - } + it('should add a File', async function () { + if (!supportsFileReader) return this.skip('skip in node') + + const filesAdded = await ipfs.add(new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' })) + expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') }) - it('should add a File as tuple', function (done) { + it('should add a File as tuple', async function () { if (!supportsFileReader) return this.skip('skip in node') const tuple = { @@ -55,14 +50,11 @@ module.exports = (createCommon, options) => { content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }) } - ipfs.add(tuple, (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') - done() - }) + const filesAdded = await ipfs.add(tuple) + expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') }) - it('should add a File as array of tuple', function (done) { + it('should add a File as array of tuple', async function () { if (!supportsFileReader) return this.skip('skip in node') const tuple = { @@ -70,51 +62,33 @@ module.exports = (createCommon, options) => { content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }) } - ipfs.add([tuple], (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') - done() - }) + const filesAdded = await ipfs.add([tuple]) + expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') }) - it('should add a Buffer', (done) => { - ipfs.add(fixtures.smallFile.data, (err, filesAdded) => { - expect(err).to.not.exist() + it('should add a Buffer', async () => { + const filesAdded = await ipfs.add(fixtures.smallFile.data) + expect(filesAdded).to.have.length(1) - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal(fixtures.smallFile.cid) - // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(fixtures.smallFile.data.length) - done() - }) + const file = filesAdded[0] + expect(file.hash).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal(fixtures.smallFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(fixtures.smallFile.data.length) }) - it('should add a Buffer (promised)', () => { - return ipfs.add(fixtures.smallFile.data) - .then((filesAdded) => { - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal(fixtures.smallFile.cid) - }) - }) - - it('should add a BIG Buffer', (done) => { - ipfs.add(fixtures.bigFile.data, (err, filesAdded) => { - expect(err).to.not.exist() + it('should add a BIG Buffer', async () => { + const filesAdded = await ipfs.add(fixtures.bigFile.data) + expect(filesAdded).to.have.length(1) - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.bigFile.cid) - expect(file.path).to.equal(fixtures.bigFile.cid) - // file.size counts the overhead by IPLD nodes and unixfs protobuf - expect(file.size).greaterThan(fixtures.bigFile.data.length) - done() - }) + const file = filesAdded[0] + expect(file.hash).to.equal(fixtures.bigFile.cid) + expect(file.path).to.equal(fixtures.bigFile.cid) + // file.size counts the overhead by IPLD nodes and unixfs protobuf + expect(file.size).greaterThan(fixtures.bigFile.data.length) }) - it('should add a BIG Buffer with progress enabled', (done) => { + it('should add a BIG Buffer with progress enabled', async () => { let progCalled = false let accumProgress = 0 function handler (p) { @@ -122,89 +96,70 @@ module.exports = (createCommon, options) => { accumProgress = p } - ipfs.add(fixtures.bigFile.data, { progress: handler }, (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.bigFile.cid) - expect(file.path).to.equal(fixtures.bigFile.cid) + const filesAdded = await ipfs.add(fixtures.bigFile.data, { progress: handler }) + expect(filesAdded).to.have.length(1) - expect(progCalled).to.be.true() - expect(accumProgress).to.equal(fixtures.bigFile.data.length) - done() - }) + const file = filesAdded[0] + expect(file.hash).to.equal(fixtures.bigFile.cid) + expect(file.path).to.equal(fixtures.bigFile.cid) + expect(progCalled).to.be.true() + expect(accumProgress).to.equal(fixtures.bigFile.data.length) }) - it('should add a Buffer as tuple', (done) => { + it('should add a Buffer as tuple', async () => { const tuple = { path: 'testfile.txt', content: fixtures.smallFile.data } - ipfs.add([ - tuple - ], (err, filesAdded) => { - expect(err).to.not.exist() - - expect(filesAdded).to.have.length(1) - const file = filesAdded[0] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal('testfile.txt') + const filesAdded = await ipfs.add([tuple]) + expect(filesAdded).to.have.length(1) - done() - }) + const file = filesAdded[0] + expect(file.hash).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal('testfile.txt') }) - it('should add a string', (done) => { + it('should add a string', async () => { const data = 'a string' const expectedCid = 'QmQFRCwEpwQZ5aQMqCsCaFbdjNLLHoyZYDjr92v1F7HeqX' - ipfs.add(data, (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add(data) + expect(filesAdded).to.be.length(1) - expect(filesAdded).to.be.length(1) - const { path, size, hash } = filesAdded[0] - expect(path).to.equal(expectedCid) - expect(size).to.equal(16) - expect(hash).to.equal(expectedCid) - done() - }) + const { path, size, hash } = filesAdded[0] + expect(path).to.equal(expectedCid) + expect(size).to.equal(16) + expect(hash).to.equal(expectedCid) }) - it('should add a TypedArray', (done) => { + it('should add a TypedArray', async () => { const data = Uint8Array.from([1, 3, 8]) const expectedCid = 'QmRyUEkVCuHC8eKNNJS9BDM9jqorUvnQJK1DM81hfngFqd' - ipfs.add(data, (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add(data) + expect(filesAdded).to.be.length(1) - expect(filesAdded).to.be.length(1) - const { path, size, hash } = filesAdded[0] - expect(path).to.equal(expectedCid) - expect(size).to.equal(11) - expect(hash).to.equal(expectedCid) - done() - }) + const { path, size, hash } = filesAdded[0] + expect(path).to.equal(expectedCid) + expect(size).to.equal(11) + expect(hash).to.equal(expectedCid) }) - it('should add readable stream', (done) => { + it('should add readable stream', async () => { const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' const rs = new Readable() rs.push(Buffer.from('some data')) rs.push(null) - ipfs.add(rs, (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add(rs) + expect(filesAdded).to.be.length(1) - expect(filesAdded).to.be.length(1) - const file = filesAdded[0] - expect(file.path).to.equal(expectedCid) - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedCid) - done() - }) + const file = filesAdded[0] + expect(file.path).to.equal(expectedCid) + expect(file.size).to.equal(17) + expect(file.hash).to.equal(expectedCid) }) - it('should add array of objects with readable stream content', (done) => { + it('should add array of objects with readable stream content', async () => { const expectedCid = 'QmVv4Wz46JaZJeH5PMV4LGbRiiMKEmszPYY3g6fjGnVXBS' const rs = new Readable() @@ -213,50 +168,32 @@ module.exports = (createCommon, options) => { const tuple = { path: 'data.txt', content: rs } - ipfs.add([tuple], (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add([tuple]) + expect(filesAdded).to.be.length(1) - expect(filesAdded).to.be.length(1) - const file = filesAdded[0] - expect(file.path).to.equal('data.txt') - expect(file.size).to.equal(17) - expect(file.hash).to.equal(expectedCid) - done() - }) + const file = filesAdded[0] + expect(file.path).to.equal('data.txt') + expect(file.size).to.equal(17) + expect(file.hash).to.equal(expectedCid) }) - it('should add pull stream', (done) => { + it('should add pull stream', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - ipfs.add(pull.values([Buffer.from('test')]), (err, res) => { - if (err) return done(err) - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - done() - }) + const res = await ipfs.add(pull.values([Buffer.from('test')])) + expect(res).to.have.length(1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) }) - it('should add pull stream (promised)', () => { + it('should add array of objects with pull stream content', async () => { const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - return ipfs.add(pull.values([Buffer.from('test')])) - .then((res) => { - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) - }) - - it('should add array of objects with pull stream content (promised)', () => { - const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm' - - return ipfs.add([{ content: pull.values([Buffer.from('test')]) }]) - .then((res) => { - expect(res).to.have.length(1) - expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) - }) + const res = await ipfs.add([{ content: pull.values([Buffer.from('test')]) }]) + expect(res).to.have.length(1) + expect(res[0]).to.deep.equal({ path: expectedCid, hash: expectedCid, size: 12 }) }) - it('should add a nested directory as array of tupples', function (done) { + it('should add a nested directory as array of tupples', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -275,17 +212,14 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + const res = await ipfs.add(dirs) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - done() - }) + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) }) - it('should add a nested directory as array of tupples with progress', function (done) { + it('should add a nested directory as array of tupples with progress', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -315,19 +249,16 @@ module.exports = (createCommon, options) => { accumProgress += p } - ipfs.add(dirs, { progress: handler }, (err, filesAdded) => { - expect(err).to.not.exist() - const root = filesAdded[filesAdded.length - 1] + const filesAdded = await ipfs.add(dirs, { progress: handler }) - expect(progCalled).to.be.true() - expect(accumProgress).to.be.at.least(total) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - done() - }) + const root = filesAdded[filesAdded.length - 1] + expect(progCalled).to.be.true() + expect(accumProgress).to.be.at.least(total) + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) }) - it('should add files to a directory non sequentially', function (done) { + it('should add files to a directory non sequentially', async function () { const content = path => ({ path: `test-dir/${path}`, content: fixtures.directory.files[path.split('/').pop()] @@ -340,55 +271,42 @@ module.exports = (createCommon, options) => { content('a/alice.txt') ] - ipfs.add(input, (err, filesAdded) => { - expect(err).to.not.exist() - - const toPath = ({ path }) => path - const nonSeqDirFilePaths = input.map(toPath).filter(p => p.includes('/a/')) - const filesAddedPaths = filesAdded.map(toPath) + const filesAdded = await ipfs.add(input) - expect(nonSeqDirFilePaths.every(p => filesAddedPaths.includes(p))) - .to.be.true() + const toPath = ({ path }) => path + const nonSeqDirFilePaths = input.map(toPath).filter(p => p.includes('/a/')) + const filesAddedPaths = filesAdded.map(toPath) - done() - }) + expect(nonSeqDirFilePaths.every(p => filesAddedPaths.includes(p))).to.be.true() }) - it('should fail when passed invalid input', (done) => { + it('should fail when passed invalid input', () => { const nonValid = 138 - ipfs.add(nonValid, (err, result) => { - expect(err).to.exist() - done() - }) + return expect(ipfs.add(nonValid)).to.eventually.be.rejected() }) - it('should wrap content in a directory', (done) => { + it('should wrap content in a directory', async () => { const data = { path: 'testfile.txt', content: fixtures.smallFile.data } - ipfs.add(data, { wrapWithDirectory: true }, (err, filesAdded) => { - expect(err).to.not.exist() - expect(filesAdded).to.have.length(2) - const file = filesAdded[0] - const wrapped = filesAdded[1] - expect(file.hash).to.equal(fixtures.smallFile.cid) - expect(file.path).to.equal('testfile.txt') - expect(wrapped.path).to.equal('') - done() - }) + const filesAdded = await ipfs.add(data, { wrapWithDirectory: true }) + expect(filesAdded).to.have.length(2) + + const file = filesAdded[0] + const wrapped = filesAdded[1] + expect(file.hash).to.equal(fixtures.smallFile.cid) + expect(file.path).to.equal('testfile.txt') + expect(wrapped.path).to.equal('') }) - it('should add with only-hash=true (promised)', function () { + it('should add with only-hash=true', async function () { this.slow(10 * 1000) const content = String(Math.random() + Date.now()) - return ipfs.add(Buffer.from(content), { onlyHash: true }) - .then(files => { - expect(files).to.have.length(1) + const files = await ipfs.add(Buffer.from(content), { onlyHash: true }) + expect(files).to.have.length(1) - // 'ipfs.object.get()' should timeout because content wasn't actually added - return expectTimeout(ipfs.object.get(files[0].hash), 4000) - }) + await expectTimeout(ipfs.object.get(files[0].hash), 4000) }) }) } diff --git a/src/files-regular/cat-pull-stream.js b/src/files-regular/cat-pull-stream.js index 37fd3da08..3be3a5410 100644 --- a/src/files-regular/cat-pull-stream.js +++ b/src/files-regular/cat-pull-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const pull = require('pull-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -30,25 +30,20 @@ module.exports = (createCommon, options) => { }) }) - before((done) => ipfs.add(fixtures.smallFile.data, done)) + before(() => ipfs.add(fixtures.smallFile.data)) - after((done) => common.teardown(done)) + after(() => common.teardown()) - it('should return a Pull Stream for a CID', (done) => { + it('should return a Pull Stream for a CID', async () => { const stream = ipfs.catPullStream(fixtures.smallFile.cid) - pull( - stream, - pull.concat((err, data) => { - expect(err).to.not.exist() - expect(data.length).to.equal(fixtures.smallFile.data.length) - expect(data).to.eql(fixtures.smallFile.data.toString()) - done() - }) - ) + const data = Buffer.concat(await pullToPromise.any(stream)) + + expect(data.length).to.equal(fixtures.smallFile.data.length) + expect(data.toString()).to.deep.equal(fixtures.smallFile.data.toString()) }) - it('should export a chunk of a file in a Pull Stream', (done) => { + it('should export a chunk of a file in a Pull Stream', async () => { const offset = 1 const length = 3 @@ -57,14 +52,8 @@ module.exports = (createCommon, options) => { length }) - pull( - stream, - pull.concat((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.equal('lz ') - done() - }) - ) + const data = Buffer.concat(await pullToPromise.any(stream)) + expect(data.toString()).to.equal('lz ') }) }) } diff --git a/src/files-regular/cat-readable-stream.js b/src/files-regular/cat-readable-stream.js index 29885fa9a..517405978 100644 --- a/src/files-regular/cat-readable-stream.js +++ b/src/files-regular/cat-readable-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const bl = require('bl') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -35,17 +35,14 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should return a Readable Stream for a CID', (done) => { + it('should return a Readable Stream for a CID', async () => { const stream = ipfs.catReadableStream(fixtures.bigFile.cid) + const data = await getStream.buffer(stream) - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data).to.eql(fixtures.bigFile.data) - done() - })) + expect(data).to.eql(fixtures.bigFile.data) }) - it('should export a chunk of a file in a Readable Stream', (done) => { + it('should export a chunk of a file in a Readable Stream', async () => { const offset = 1 const length = 3 @@ -54,11 +51,8 @@ module.exports = (createCommon, options) => { length }) - stream.pipe(bl((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.equal('lz ') - done() - })) + const data = await getStream.buffer(stream) + expect(data.toString()).to.equal('lz ') }) }) } diff --git a/src/files-regular/cat.js b/src/files-regular/cat.js index 0710e22ad..9723536ef 100644 --- a/src/files-regular/cat.js +++ b/src/files-regular/cat.js @@ -3,7 +3,6 @@ const { fixtures } = require('./utils') const bs58 = require('bs58') -const parallel = require('async/parallel') const CID = require('cids') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -34,196 +33,133 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - before((done) => { - parallel([ - (cb) => ipfs.add(fixtures.smallFile.data, cb), - (cb) => ipfs.add(fixtures.bigFile.data, cb) - ], done) - }) - - it('should cat with a base58 string encoded multihash', (done) => { - ipfs.cat(fixtures.smallFile.cid, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - }) + before(() => Promise.all([ + ipfs.add(fixtures.smallFile.data), + ipfs.add(fixtures.bigFile.data) + ])) - it('should cat with a base58 string encoded multihash (promised)', () => { - return ipfs.cat(fixtures.smallFile.cid) - .then((data) => { - expect(data.toString()).to.contain('Plz add me!') - }) + it('should cat with a base58 string encoded multihash', async () => { + const data = await ipfs.cat(fixtures.smallFile.cid) + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat with a Buffer multihash', (done) => { + it('should cat with a Buffer multihash', async () => { const cid = Buffer.from(bs58.decode(fixtures.smallFile.cid)) - ipfs.cat(cid, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) + const data = await ipfs.cat(cid) + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat with a CID object', (done) => { + it('should cat with a CID object', async () => { const cid = new CID(fixtures.smallFile.cid) - ipfs.cat(cid, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) + const data = await ipfs.cat(cid) + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat a file added as CIDv0 with a CIDv1', done => { + it('should cat a file added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 0 }) - const cidv0 = new CID(res[0].hash) - expect(cidv0.version).to.equal(0) + const cidv0 = new CID(res[0].hash) + expect(cidv0.version).to.equal(0) - const cidv1 = cidv0.toV1() + const cidv1 = cidv0.toV1() - ipfs.cat(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output).to.eql(input) - done() - }) - }) + const output = await ipfs.cat(cidv1) + expect(output).to.eql(input) }) - it('should cat a file added as CIDv1 with a CIDv0', done => { + it('should cat a file added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) - const cidv1 = new CID(res[0].hash) - expect(cidv1.version).to.equal(1) + const cidv1 = new CID(res[0].hash) + expect(cidv1.version).to.equal(1) - const cidv0 = cidv1.toV0() + const cidv0 = cidv1.toV0() - ipfs.cat(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(output).to.eql(input) - done() - }) - }) + const output = await ipfs.cat(cidv0) + expect(output).to.eql(input) }) - it('should cat a BIG file', (done) => { - ipfs.cat(fixtures.bigFile.cid, (err, data) => { - expect(err).to.not.exist() - expect(data.length).to.equal(fixtures.bigFile.data.length) - expect(data).to.eql(fixtures.bigFile.data) - done() - }) + it('should cat a BIG file', async () => { + const data = await ipfs.cat(fixtures.bigFile.cid) + expect(data.length).to.equal(fixtures.bigFile.data.length) + expect(data).to.eql(fixtures.bigFile.data) }) - it('should cat with IPFS path', (done) => { + it('should cat with IPFS path', async () => { const ipfsPath = '/ipfs/' + fixtures.smallFile.cid - ipfs.cat(ipfsPath, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) + const data = await ipfs.cat(ipfsPath) + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat with IPFS path, nested value', (done) => { - const file = { path: 'a/testfile.txt', content: fixtures.smallFile.data } + it('should cat with IPFS path, nested value', async () => { + const fileToAdd = { path: 'a/testfile.txt', content: fixtures.smallFile.data } - ipfs.add([file], (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add([fileToAdd]) - const file = filesAdded.find((f) => f.path === 'a') - expect(file).to.exist() + const file = await filesAdded.find((f) => f.path === 'a') + expect(file).to.exist() - ipfs.cat(`/ipfs/${file.hash}/testfile.txt`, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - }) + const data = await ipfs.cat(`/ipfs/${file.hash}/testfile.txt`) + + expect(data.toString()).to.contain('Plz add me!') }) - it('should cat with IPFS path, deeply nested value', (done) => { - const file = { path: 'a/b/testfile.txt', content: fixtures.smallFile.data } + it('should cat with IPFS path, deeply nested value', async () => { + const fileToAdd = { path: 'a/b/testfile.txt', content: fixtures.smallFile.data } - ipfs.add([file], (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add([fileToAdd]) - const file = filesAdded.find((f) => f.path === 'a') - expect(file).to.exist() + const file = filesAdded.find((f) => f.path === 'a') + expect(file).to.exist() - ipfs.cat(`/ipfs/${file.hash}/b/testfile.txt`, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - }) + const data = await ipfs.cat(`/ipfs/${file.hash}/b/testfile.txt`) + expect(data.toString()).to.contain('Plz add me!') }) - it('should error on invalid key (promised)', () => { + it('should error on invalid key', () => { const invalidCid = 'somethingNotMultihash' - return ipfs.cat(invalidCid) - .catch((err) => { - expect(err).to.exist() - - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) + return expect(ipfs.cat(invalidCid)).to.eventually.be.rejected() }) - it('should error on unknown path (promised)', () => { - return ipfs.cat(fixtures.smallFile.cid + '/does-not-exist') - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.be.oneOf([ - 'file does not exist', - 'no link named "does-not-exist" under Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP']) - }) + it('should error on unknown path', () => { + return expect(ipfs.cat(fixtures.smallFile.cid + '/does-not-exist')).to.eventually.be.rejected() + .and.be.an.instanceOf(Error) + .and.to.have.property('message') + .to.be.oneOf([ + 'file does not exist', + 'no link named "does-not-exist" under Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' + ]) }) - it('should error on dir path (promised)', () => { + it('should error on dir path', async () => { const file = { path: 'dir/testfile.txt', content: fixtures.smallFile.data } - return ipfs.add([file]) - .then((filesAdded) => { - expect(filesAdded.length).to.equal(2) - const files = filesAdded.filter((file) => file.path === 'dir') - expect(files.length).to.equal(1) - const dir = files[0] - return ipfs.cat(dir.hash) - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.contain('this dag node is a directory') - }) - }) + const filesAdded = await ipfs.add([file]) + expect(filesAdded.length).to.equal(2) + + const files = filesAdded.filter((file) => file.path === 'dir') + expect(files.length).to.equal(1) + + const dir = files[0] + + const err = await expect(ipfs.cat(dir.hash)).to.be.rejected() + expect(err.message).to.contain('this dag node is a directory') }) - it('should export a chunk of a file', (done) => { + it('should export a chunk of a file', async () => { const offset = 1 const length = 3 - ipfs.cat(fixtures.smallFile.cid, { - offset, - length - }, (err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.equal('lz ') - done() - }) + const data = await ipfs.cat(fixtures.smallFile.cid, { offset, length }) + expect(data.toString()).to.equal('lz ') }) }) } diff --git a/src/files-regular/get-pull-stream.js b/src/files-regular/get-pull-stream.js index 178550436..576e09471 100644 --- a/src/files-regular/get-pull-stream.js +++ b/src/files-regular/get-pull-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const pull = require('pull-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -30,29 +30,17 @@ module.exports = (createCommon, options) => { }) }) - before((done) => ipfs.add(fixtures.smallFile.data, done)) + before(() => ipfs.add(fixtures.smallFile.data)) after((done) => common.teardown(done)) - it('should return a Pull Stream of Pull Streams', (done) => { + it('should return a Pull Stream of Pull Streams', async () => { const stream = ipfs.getPullStream(fixtures.smallFile.cid) - pull( - stream, - pull.collect((err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - pull( - files[0].content, - pull.concat((err, data) => { - expect(err).to.not.exist() - expect(data.toString()).to.contain('Plz add me!') - done() - }) - ) - }) - ) + const files = await pullToPromise.any(stream) + + const data = Buffer.concat(await pullToPromise.any(files[0].content)) + expect(data.toString()).to.contain('Plz add me!') }) }) } diff --git a/src/files-regular/get-readable-stream.js b/src/files-regular/get-readable-stream.js index 98bfb3517..9ba872cdd 100644 --- a/src/files-regular/get-readable-stream.js +++ b/src/files-regular/get-readable-stream.js @@ -2,9 +2,9 @@ 'use strict' const { fixtures } = require('./utils') -const concat = require('concat-stream') const through = require('through2') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -35,21 +35,24 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should return a Readable Stream of Readable Streams', (done) => { + it('should return a Readable Stream of Readable Streams', async () => { const stream = ipfs.getReadableStream(fixtures.smallFile.cid) - const files = [] - stream.pipe(through.obj((file, enc, next) => { - file.content.pipe(concat((content) => { - files.push({ path: file.path, content: content }) + // I was not able to use 'get-stream' module here + // as it exceeds the timeout. I think it might be related + // to 'pump' module that get-stream uses + const files = await new Promise((resolve, reject) => { + const filesArr = [] + stream.pipe(through.obj(async (file, enc, next) => { + const content = await getStream.buffer(file.content) + filesArr.push({ path: file.path, content: content }) next() - })) - }, () => { - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(files[0].content.toString()).to.contain('Plz add me!') - done() - })) + }, () => resolve(filesArr))) + }) + + expect(files).to.be.length(1) + expect(files[0].path).to.eql(fixtures.smallFile.cid) + expect(files[0].content.toString()).to.contain('Plz add me!') }) }) } diff --git a/src/files-regular/get.js b/src/files-regular/get.js index 334a0c34a..46f176502 100644 --- a/src/files-regular/get.js +++ b/src/files-regular/get.js @@ -4,7 +4,6 @@ const { fixtures } = require('./utils') const bs58 = require('bs58') const parallel = require('async/parallel') -const series = require('async/series') const CID = require('cids') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -42,222 +41,172 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get with a base58 encoded multihash', (done) => { - ipfs.get(fixtures.smallFile.cid, (err, files) => { - expect(err).to.not.exist() - - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) + it('should get with a base58 encoded multihash', async () => { + const files = await ipfs.get(fixtures.smallFile.cid) + expect(files).to.be.length(1) + expect(files[0].path).to.eql(fixtures.smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') }) - it('should get with a base58 encoded multihash (promised)', () => { - return ipfs.get(fixtures.smallFile.cid) - .then((files) => { - expect(files).to.be.length(1) - expect(files[0].path).to.equal(fixtures.smallFile.cid) - expect(files[0].content.toString()).to.contain('Plz add me!') - }) - }) - - it('should get with a Buffer multihash', (done) => { + it('should get with a Buffer multihash', async () => { const cidBuf = Buffer.from(bs58.decode(fixtures.smallFile.cid)) - ipfs.get(cidBuf, (err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].path).to.eql(fixtures.smallFile.cid) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) + const files = await ipfs.get(cidBuf) + expect(files).to.be.length(1) + expect(files[0].path).to.eql(fixtures.smallFile.cid) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') }) - it('should get a file added as CIDv0 with a CIDv1', done => { + it('should get a file added as CIDv0 with a CIDv1', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 0 }) - const cidv0 = new CID(res[0].hash) - expect(cidv0.version).to.equal(0) + const cidv0 = new CID(res[0].hash) + expect(cidv0.version).to.equal(0) - const cidv1 = cidv0.toV1() + const cidv1 = cidv0.toV1() - ipfs.get(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output[0].content).to.eql(input) - done() - }) - }) + const output = await ipfs.get(cidv1) + expect(output[0].content).to.eql(input) }) - it('should get a file added as CIDv1 with a CIDv0', done => { + it('should get a file added as CIDv1 with a CIDv0', async () => { const input = Buffer.from(`TEST${Date.now()}`) - ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) - const cidv1 = new CID(res[0].hash) - expect(cidv1.version).to.equal(1) + const cidv1 = new CID(res[0].hash) + expect(cidv1.version).to.equal(1) - const cidv0 = cidv1.toV0() + const cidv0 = cidv1.toV0() - ipfs.get(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(output[0].content).to.eql(input) - done() - }) - }) + const output = await ipfs.get(cidv0) + expect(output[0].content).to.eql(input) }) - it('should get a BIG file', (done) => { - ipfs.get(fixtures.bigFile.cid, (err, files) => { - expect(err).to.not.exist() + it('should get a BIG file', async () => { + const files = await ipfs.get(fixtures.bigFile.cid) + expect(files.length).to.equal(1) + expect(files[0].path).to.equal(fixtures.bigFile.cid) + expect(files[0].content.length).to.eql(fixtures.bigFile.data.length) + expect(files[0].content).to.eql(fixtures.bigFile.data) + }) - expect(files.length).to.equal(1) - expect(files[0].path).to.equal(fixtures.bigFile.cid) - expect(files[0].content.length).to.eql(fixtures.bigFile.data.length) - expect(files[0].content).to.eql(fixtures.bigFile.data) - done() + it('should get a directory', async function () { + const content = (name) => ({ + path: `test-folder/${name}`, + content: fixtures.directory.files[name] }) - }) - it('should get a directory', function (done) { - series([ - (cb) => { - const content = (name) => ({ - path: `test-folder/${name}`, - content: fixtures.directory.files[name] - }) - - const emptyDir = (name) => ({ path: `test-folder/${name}` }) - - const dirs = [ - content('pp.txt'), - content('holmes.txt'), - content('jungle.txt'), - content('alice.txt'), - emptyDir('empty-folder'), - content('files/hello.txt'), - content('files/ipfs.txt'), - emptyDir('files/empty') - ] - - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - cb() - }) - }, - (cb) => { - ipfs.get(fixtures.directory.cid, (err, files) => { - expect(err).to.not.exist() - - files = files.sort((a, b) => { - if (a.path > b.path) return 1 - if (a.path < b.path) return -1 - return 0 - }) - - // Check paths - const paths = files.map((file) => { return file.path }) - expect(paths).to.include.members([ - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' - ]) - - // Check contents - const contents = files.map((file) => { - return file.content - ? file.content.toString() - : null - }) - - expect(contents).to.include.members([ - fixtures.directory.files['alice.txt'].toString(), - fixtures.directory.files['files/hello.txt'].toString(), - fixtures.directory.files['files/ipfs.txt'].toString(), - fixtures.directory.files['holmes.txt'].toString(), - fixtures.directory.files['jungle.txt'].toString(), - fixtures.directory.files['pp.txt'].toString() - ]) - cb() - }) - } - ], done) + const emptyDir = (name) => ({ path: `test-folder/${name}` }) + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt'), + content('alice.txt'), + emptyDir('empty-folder'), + content('files/hello.txt'), + content('files/ipfs.txt'), + emptyDir('files/empty') + ] + + const res = await ipfs.add(dirs) + const root = res[res.length - 1] + + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) + + let files = await ipfs.get(fixtures.directory.cid) + + files = files.sort((a, b) => { + if (a.path > b.path) return 1 + if (a.path < b.path) return -1 + return 0 + }) + + // Check paths + const paths = files.map((file) => { return file.path }) + expect(paths).to.include.members([ + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/empty', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/hello.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files/ipfs.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt' + ]) + + // Check contents + const contents = files.map((file) => { + return file.content + ? file.content.toString() + : null + }) + + expect(contents).to.include.members([ + fixtures.directory.files['alice.txt'].toString(), + fixtures.directory.files['files/hello.txt'].toString(), + fixtures.directory.files['files/ipfs.txt'].toString(), + fixtures.directory.files['holmes.txt'].toString(), + fixtures.directory.files['jungle.txt'].toString(), + fixtures.directory.files['pp.txt'].toString() + ]) }) - it('should get with ipfs path, as object and nested value', (done) => { + it('should get with ipfs path, as object and nested value', async () => { const file = { path: 'a/testfile.txt', content: fixtures.smallFile.data } - ipfs.add(file, (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add(file) - filesAdded.forEach((file) => { - if (file.path === 'a') { - ipfs.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) - } - }) + filesAdded.forEach(async (file) => { + if (file.path === 'a') { + const files = await ipfs.get(`/ipfs/${file.hash}/testfile.txt`) + expect(files).to.be.length(1) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + } }) }) - it('should get with ipfs path, as array and nested value', (done) => { + it('should get with ipfs path, as array and nested value', async () => { const file = { path: 'a/testfile.txt', content: fixtures.smallFile.data } - ipfs.add([file], (err, filesAdded) => { - expect(err).to.not.exist() + const filesAdded = await ipfs.add([file]) - filesAdded.forEach((file) => { - if (file.path === 'a') { - ipfs.get(`/ipfs/${file.hash}/testfile.txt`, (err, files) => { - expect(err).to.not.exist() - expect(files).to.be.length(1) - expect(files[0].content.toString('utf8')).to.contain('Plz add me!') - done() - }) - } - }) + filesAdded.forEach(async (file) => { + if (file.path === 'a') { + const files = await ipfs.get(`/ipfs/${file.hash}/testfile.txt`) + expect(files).to.be.length(1) + expect(files[0].content.toString('utf8')).to.contain('Plz add me!') + } }) }) - it('should error on invalid key', () => { + it('should error on invalid key', async () => { const invalidCid = 'somethingNotMultihash' - return ipfs.get(invalidCid) - .catch((err) => { - expect(err).to.exist() - const errString = err.toString() - if (errString === 'Error: invalid ipfs ref path') { - expect(err.toString()).to.contain('Error: invalid ipfs ref path') - } - if (errString === 'Error: Invalid Key') { - expect(err.toString()).to.contain('Error: Invalid Key') - } - }) + const err = await expect(ipfs.get(invalidCid)).to.be.rejected() + + switch (err.toString()) { + case 'Error: invalid ipfs ref path': + expect(err.toString()).to.contain('Error: invalid ipfs ref path') + break + case 'Error: Invalid Key': + expect(err.toString()).to.contain('Error: Invalid Key') + break + default: + break + } }) }) } diff --git a/src/files-regular/ls-pull-stream.js b/src/files-regular/ls-pull-stream.js index 7ac99a242..1e534be7c 100644 --- a/src/files-regular/ls-pull-stream.js +++ b/src/files-regular/ls-pull-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const pull = require('pull-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -32,7 +32,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should pull stream ls with a base58 encoded CID', function (done) { + it('should pull stream ls with a base58 encoded CID', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -51,75 +51,66 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) + const res = await ipfs.add(dirs) - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - const stream = ipfs.lsPullStream(cid) + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) - pull( - stream, - pull.collect((err, files) => { - expect(err).to.not.exist() + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsPullStream(cid) - expect(files).to.eql([ - { - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11685, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' - }, - { - depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' - }, - { - depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 0, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' - }, - { - depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 581878, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' - }, - { - depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2294, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' - }, - { - depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4540, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' - } - ]) - done() - }) - ) - }) + const files = await pullToPromise.any(stream) + expect(files).to.eql([ + { + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11685, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' + }, + { + depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' + }, + { + depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 0, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' + }, + { + depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 581878, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' + }, + { + depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2294, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' + }, + { + depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4540, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' + } + ]) }) }) } diff --git a/src/files-regular/ls-readable-stream.js b/src/files-regular/ls-readable-stream.js index bf70728ef..55185d537 100644 --- a/src/files-regular/ls-readable-stream.js +++ b/src/files-regular/ls-readable-stream.js @@ -2,8 +2,8 @@ 'use strict' const { fixtures } = require('./utils') -const concat = require('concat-stream') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -32,7 +32,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should readable stream ls with a base58 encoded CID', function (done) { + it('should readable stream ls with a base58 encoded CID', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -51,70 +51,66 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] + const res = await ipfs.add(dirs) - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - const stream = ipfs.lsReadableStream(cid) + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const stream = ipfs.lsReadableStream(cid) - stream.pipe(concat((files) => { - expect(files).to.eql([ - { - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11685, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' - }, - { - depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' - }, - { - depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 0, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' - }, - { - depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 581878, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' - }, - { - depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2294, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' - }, - { - depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4540, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' - } - ]) - done() - })) - }) + const files = await getStream.array(stream) + expect(files).to.eql([ + { + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11685, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' + }, + { + depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' + }, + { + depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 0, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' + }, + { + depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 581878, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' + }, + { + depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2294, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' + }, + { + depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4540, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' + } + ]) }) }) } diff --git a/src/files-regular/ls.js b/src/files-regular/ls.js index 6d528d686..c3217e46f 100644 --- a/src/files-regular/ls.js +++ b/src/files-regular/ls.js @@ -35,7 +35,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should ls with a base58 encoded CID', function (done) { + it('should ls with a base58 encoded CID', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -54,73 +54,68 @@ module.exports = (createCommon, options) => { emptyDir('files/empty') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - const root = res[res.length - 1] - - expect(root.path).to.equal('test-folder') - expect(root.hash).to.equal(fixtures.directory.cid) - - const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' - ipfs.ls(cid, (err, files) => { - expect(err).to.not.exist() - - expect(files).to.eql([ - { - depth: 1, - name: 'alice.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', - size: 11685, - hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', - type: 'file' - }, - { - depth: 1, - name: 'empty-folder', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', - size: 0, - hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', - type: 'dir' - }, - { - depth: 1, - name: 'files', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', - size: 0, - hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', - type: 'dir' - }, - { - depth: 1, - name: 'holmes.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', - size: 581878, - hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', - type: 'file' - }, - { - depth: 1, - name: 'jungle.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', - size: 2294, - hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', - type: 'file' - }, - { - depth: 1, - name: 'pp.txt', - path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', - size: 4540, - hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', - type: 'file' - } - ]) - done() - }) - }) + const res = await ipfs.add(dirs) + + const root = res[res.length - 1] + expect(root.path).to.equal('test-folder') + expect(root.hash).to.equal(fixtures.directory.cid) + + const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP' + const files = await ipfs.ls(cid) + + expect(files).to.eql([ + { + depth: 1, + name: 'alice.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt', + size: 11685, + hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi', + type: 'file' + }, + { + depth: 1, + name: 'empty-folder', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder', + size: 0, + hash: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', + type: 'dir' + }, + { + depth: 1, + name: 'files', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files', + size: 0, + hash: 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74', + type: 'dir' + }, + { + depth: 1, + name: 'holmes.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt', + size: 581878, + hash: 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr', + type: 'file' + }, + { + depth: 1, + name: 'jungle.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt', + size: 2294, + hash: 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9', + type: 'file' + }, + { + depth: 1, + name: 'pp.txt', + path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt', + size: 4540, + hash: 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn', + type: 'file' + } + ]) }) - it('should ls files added as CIDv0 with a CIDv1', done => { + it('should ls files added as CIDv0 with a CIDv1', async () => { const dir = randomName('DIR') const input = [ @@ -128,26 +123,22 @@ module.exports = (createCommon, options) => { { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } ] - ipfs.add(input, { cidVersion: 0 }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 0 }) - const cidv0 = new CID(res[res.length - 1].hash) - expect(cidv0.version).to.equal(0) + const cidv0 = new CID(res[res.length - 1].hash) + expect(cidv0.version).to.equal(0) - const cidv1 = cidv0.toV1() + const cidv1 = cidv0.toV1() - ipfs.ls(cidv1, (err, output) => { - expect(err).to.not.exist() - expect(output.length).to.equal(input.length) - output.forEach(({ hash }) => { - expect(res.find(file => file.hash === hash)).to.exist() - }) - done() - }) + const output = await ipfs.ls(cidv1) + expect(output.length).to.equal(input.length) + + output.forEach(({ hash }) => { + expect(res.find(file => file.hash === hash)).to.exist() }) }) - it('should ls files added as CIDv1 with a CIDv0', done => { + it('should ls files added as CIDv1 with a CIDv0', async () => { const dir = randomName('DIR') const input = [ @@ -155,42 +146,30 @@ module.exports = (createCommon, options) => { { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } ] - ipfs.add(input, { cidVersion: 1, rawLeaves: false }, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false }) - const cidv1 = new CID(res[res.length - 1].hash) - expect(cidv1.version).to.equal(1) + const cidv1 = new CID(res[res.length - 1].hash) + expect(cidv1.version).to.equal(1) - const cidv0 = cidv1.toV1() + const cidv0 = cidv1.toV1() - ipfs.ls(cidv0, (err, output) => { - expect(err).to.not.exist() - expect(output.length).to.equal(input.length) - output.forEach(({ hash }) => { - expect(res.find(file => file.hash === hash)).to.exist() - }) - done() - }) + const output = await ipfs.ls(cidv0) + expect(output.length).to.equal(input.length) + + output.forEach(({ hash }) => { + expect(res.find(file => file.hash === hash)).to.exist() }) }) - it('should correctly handle a non existing hash', (done) => { - ipfs.ls('surelynotavalidhashheh?', (err, res) => { - expect(err).to.exist() - expect(res).to.not.exist() - done() - }) + it('should correctly handle a non existing hash', () => { + return expect(ipfs.ls('surelynotavalidhashheh?')).to.eventually.be.rejected() }) - it('should correctly handle a non exiting path', (done) => { - ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there', (err, res) => { - expect(err).to.exist() - expect(res).to.not.exist() - done() - }) + it('should correctly handle a non existing path', () => { + return expect(ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there')).to.eventually.be.rejected() }) - it('should ls files by path', done => { + it('should ls files by path', async () => { const dir = randomName('DIR') const input = [ @@ -198,17 +177,12 @@ module.exports = (createCommon, options) => { { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) } ] - ipfs.add(input, (err, res) => { - expect(err).to.not.exist() + const res = await ipfs.add(input) + const output = await ipfs.ls(`/ipfs/${res[res.length - 1].hash}`) + expect(output.length).to.equal(input.length) - ipfs.ls(`/ipfs/${res[res.length - 1].hash}`, (err, output) => { - expect(err).to.not.exist() - expect(output.length).to.equal(input.length) - output.forEach(({ hash }) => { - expect(res.find(file => file.hash === hash)).to.exist() - }) - done() - }) + output.forEach(({ hash }) => { + expect(res.find(file => file.hash === hash)).to.exist() }) }) }) diff --git a/src/files-regular/refs-local-pull-stream.js b/src/files-regular/refs-local-pull-stream.js index 0f2b69774..88774247b 100644 --- a/src/files-regular/refs-local-pull-stream.js +++ b/src/files-regular/refs-local-pull-stream.js @@ -1,14 +1,13 @@ /* eslint-env mocha */ 'use strict' -const pull = require('pull-stream') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { const ipfsRefsLocal = (ipfs) => { - return (cb) => { - const stream = ipfs.refs.localPullStream() - pull(stream, pull.collect(cb)) - } + const stream = ipfs.refs.localPullStream() + + return pullToPromise.any(stream) } require('./refs-local-tests')(createCommon, '.refs.localPullStream', ipfsRefsLocal, options) } diff --git a/src/files-regular/refs-local-readable-stream.js b/src/files-regular/refs-local-readable-stream.js index 9b1fbec7b..236961d19 100644 --- a/src/files-regular/refs-local-readable-stream.js +++ b/src/files-regular/refs-local-readable-stream.js @@ -1,15 +1,12 @@ /* eslint-env mocha */ 'use strict' -const concat = require('concat-stream') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const ipfsRefsLocal = (ipfs) => { - return (cb) => { - const stream = ipfs.refs.localReadableStream() - stream.on('error', cb) - stream.pipe(concat((refs) => cb(null, refs))) - } + const stream = ipfs.refs.localReadableStream() + return getStream.array(stream) } require('./refs-local-tests')(createCommon, '.refs.localReadableStream', ipfsRefsLocal, options) } diff --git a/src/files-regular/refs-local-tests.js b/src/files-regular/refs-local-tests.js index af6f7fcb8..11f60f7de 100644 --- a/src/files-regular/refs-local-tests.js +++ b/src/files-regular/refs-local-tests.js @@ -31,7 +31,7 @@ module.exports = (createCommon, suiteName, ipfsRefsLocal, options) => { after((done) => common.teardown(done)) - it('should get local refs', function (done) { + it('should get local refs', async function () { const content = (name) => ({ path: `test-folder/${name}`, content: fixtures.directory.files[name] @@ -42,19 +42,13 @@ module.exports = (createCommon, suiteName, ipfsRefsLocal, options) => { content('holmes.txt') ] - ipfs.add(dirs, (err, res) => { - expect(err).to.not.exist() - - ipfsRefsLocal(ipfs)((err, refs) => { - expect(err).to.not.exist() + await ipfs.add(dirs) - const cids = refs.map(r => r.ref) - expect(cids).to.include('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn') - expect(cids).to.include('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr') + const refs = await ipfsRefsLocal(ipfs) - done() - }) - }) + const cids = refs.map(r => r.ref) + expect(cids).to.include('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn') + expect(cids).to.include('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr') }) }) } diff --git a/src/files-regular/refs-local.js b/src/files-regular/refs-local.js index d3f0b8150..53737e5d5 100644 --- a/src/files-regular/refs-local.js +++ b/src/files-regular/refs-local.js @@ -2,6 +2,6 @@ 'use strict' module.exports = (createCommon, options) => { - const ipfsRefsLocal = (ipfs) => (cb) => ipfs.refs.local(cb) + const ipfsRefsLocal = (ipfs) => ipfs.refs.local() require('./refs-local-tests')(createCommon, '.refs.local', ipfsRefsLocal, options) } diff --git a/src/files-regular/refs-pull-stream.js b/src/files-regular/refs-pull-stream.js index d26027371..518857542 100644 --- a/src/files-regular/refs-pull-stream.js +++ b/src/files-regular/refs-pull-stream.js @@ -1,14 +1,13 @@ /* eslint-env mocha */ 'use strict' -const pull = require('pull-stream') +const pullToPromise = require('pull-to-promise') module.exports = (createCommon, options) => { - const ipfsRefs = (ipfs) => { - return (path, params, cb) => { - const stream = ipfs.refsPullStream(path, params) - pull(stream, pull.collect(cb)) - } + const ipfsRefs = (ipfs) => (path, params) => { + const stream = ipfs.refsPullStream(path, params) + + return pullToPromise.any(stream) } require('./refs-tests')(createCommon, '.refsPullStream', ipfsRefs, options) } diff --git a/src/files-regular/refs-readable-stream.js b/src/files-regular/refs-readable-stream.js index 23bc40065..b49072ea8 100644 --- a/src/files-regular/refs-readable-stream.js +++ b/src/files-regular/refs-readable-stream.js @@ -1,15 +1,12 @@ /* eslint-env mocha */ 'use strict' -const concat = require('concat-stream') +const getStream = require('get-stream') module.exports = (createCommon, options) => { - const ipfsRefs = (ipfs) => { - return (path, params, cb) => { - const stream = ipfs.refsReadableStream(path, params) - stream.on('error', cb) - stream.pipe(concat((refs) => cb(null, refs))) - } + const ipfsRefs = (ipfs) => (path, params) => { + const stream = ipfs.refsReadableStream(path, params) + return getStream.array(stream) } require('./refs-tests')(createCommon, '.refsReadableStream', ipfsRefs, options) } diff --git a/src/files-regular/refs-tests.js b/src/files-regular/refs-tests.js index efb324304..b91761ef9 100644 --- a/src/files-regular/refs-tests.js +++ b/src/files-regular/refs-tests.js @@ -1,7 +1,8 @@ /* eslint-env mocha */ 'use strict' -const mapSeries = require('async/mapSeries') +const pMapSeries = require('p-map-series') +const pTimeout = require('p-timeout') const { getDescribe, getIt, expect } = require('../utils/mocha') const loadFixture = require('aegir/fixtures') const CID = require('cids') @@ -31,20 +32,14 @@ module.exports = (createCommon, suiteName, ipfsRefs, options) => { }) }) - before(function (done) { - loadPbContent(ipfs, getMockObjects(), (err, cid) => { - expect(err).to.not.exist() - pbRootCb = cid - done() - }) + before(async function () { + const cid = await loadPbContent(ipfs, getMockObjects()) + pbRootCb = cid }) - before(function (done) { - loadDagContent(ipfs, getMockObjects(), (err, cid) => { - expect(err).to.not.exist() - dagRootCid = cid - done() - }) + before(async function () { + const cid = await loadDagContent(ipfs, getMockObjects()) + dagRootCid = cid }) after((done) => common.teardown(done)) @@ -52,73 +47,50 @@ module.exports = (createCommon, suiteName, ipfsRefs, options) => { for (const [name, options] of Object.entries(getRefsTests())) { const { path, params, expected, expectError, expectTimeout } = options // eslint-disable-next-line no-loop-func - it(name, function (done) { + it(name, async function () { this.timeout(20 * 1000) - // If we're expecting a timeout, call done when it expires - let timeout + // Call out to IPFS + const p = (path ? path(pbRootCb) : pbRootCb) + if (expectTimeout) { - timeout = setTimeout(() => { - done() - done = null - }, expectTimeout) + return expect(pTimeout(ipfsRefs(ipfs)(p, params), expectTimeout)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.to.have.property('name') + .to.eql('TimeoutError') } - // Call out to IPFS - const p = (path ? path(pbRootCb) : pbRootCb) - ipfsRefs(ipfs)(p, params, (err, refs) => { - if (!done) { - // Already timed out - return - } - - if (expectError) { - // Expected an error - expect(err).to.exist() - return done() - } - - if (expectTimeout && !err) { - // Expected a timeout but there wasn't one - return expect.fail('Expected timeout error') - } - - // Check there was no error and the refs match what was expected - expect(err).to.not.exist() - expect(refs.map(r => r.ref)).to.eql(expected) + if (expectError) { + return expect(ipfsRefs(ipfs)(p, params)).to.be.eventually.rejected.and.be.an.instanceOf(Error) + } - // Clear any pending timeout - clearTimeout(timeout) + const refs = await ipfsRefs(ipfs)(p, params) - done() - }) + // Check there was no error and the refs match what was expected + expect(refs.map(r => r.ref)).to.eql(expected) }) } - it('dag refs test', function (done) { + it('dag refs test', async function () { this.timeout(20 * 1000) // Call out to IPFS - ipfsRefs(ipfs)(`/ipfs/${dagRootCid}`, { recursive: true }, (err, refs) => { - // Check there was no error and the refs match what was expected - expect(err).to.not.exist() - expect(refs.map(r => r.ref).sort()).to.eql([ - 'QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbSC', - 'QmVwtsLUHurA6wUirPSdGeEW5tfBEqenXpeRaqr8XN7bNY', - 'QmXGL3ZdYV5rNLCfHe1QsFSQGekRFzgbBu1B3XGZ7DV9fd', - 'QmcSVZRN5E814KkPy4EHnftNAR7htbFvVhRKKqFs4FBwDG', - 'QmcSVZRN5E814KkPy4EHnftNAR7htbFvVhRKKqFs4FBwDG', - 'QmdBcHbK7uDQav8YrHsfKju3EKn48knxjd96KRMFs3gtS9', - 'QmeX96opBHZHLySMFoNiWS5msxjyX6rqtr3Rr1u7uxn7zJ', - 'Qmf8MwTnY7VdcnF8WcoJ3GB24NmNd1HsGzuEWCtUYDP38x', - 'bafyreiagelcmhfn33zuslkdo7fkes3dzcr2nju6meh75zm6vqklfqiojam', - 'bafyreic2f6adq5tqnbrvwiqc3jkz2cf4tz3cz2rp6plpij2qaoufgsxwmi', - 'bafyreidoqtyvflv5v4c3gd3izxvpq4flke55ayurbrnhsxh7z5wwjc6v6e', - 'bafyreifs2ub2lnq6n2quqbi3zb5homs5iqlmm77b3am252cqzxiu7phwpy' - ]) - - done() - }) + const refs = await ipfsRefs(ipfs)(`/ipfs/${dagRootCid}`, { recursive: true }) + // Check the refs match what was expected + expect(refs.map(r => r.ref).sort()).to.eql([ + 'QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbSC', + 'QmVwtsLUHurA6wUirPSdGeEW5tfBEqenXpeRaqr8XN7bNY', + 'QmXGL3ZdYV5rNLCfHe1QsFSQGekRFzgbBu1B3XGZ7DV9fd', + 'QmcSVZRN5E814KkPy4EHnftNAR7htbFvVhRKKqFs4FBwDG', + 'QmcSVZRN5E814KkPy4EHnftNAR7htbFvVhRKKqFs4FBwDG', + 'QmdBcHbK7uDQav8YrHsfKju3EKn48knxjd96KRMFs3gtS9', + 'QmeX96opBHZHLySMFoNiWS5msxjyX6rqtr3Rr1u7uxn7zJ', + 'Qmf8MwTnY7VdcnF8WcoJ3GB24NmNd1HsGzuEWCtUYDP38x', + 'bafyreiagelcmhfn33zuslkdo7fkes3dzcr2nju6meh75zm6vqklfqiojam', + 'bafyreic2f6adq5tqnbrvwiqc3jkz2cf4tz3cz2rp6plpij2qaoufgsxwmi', + 'bafyreidoqtyvflv5v4c3gd3izxvpq4flke55ayurbrnhsxh7z5wwjc6v6e', + 'bafyreifs2ub2lnq6n2quqbi3zb5homs5iqlmm77b3am252cqzxiu7phwpy' + ]) }) }) } @@ -340,43 +312,38 @@ function getRefsTests () { } } -function loadPbContent (ipfs, node, callback) { +function loadPbContent (ipfs, node) { const store = { - putData: (data, cb) => ipfs.object.put({ Data: data, Links: [] }, cb), - putLinks: (links, cb) => { + putData: (data) => ipfs.object.put({ Data: data, Links: [] }), + putLinks: (links) => ipfs.object.put({ Data: '', Links: links.map(({ name, cid }) => ({ Name: name, Hash: cid, Size: 8 })) - }, cb) - } + }) } - loadContent(ipfs, store, node, callback) + return loadContent(ipfs, store, node) } -function loadDagContent (ipfs, node, callback) { +function loadDagContent (ipfs, node) { const store = { - putData: (data, cb) => { - ipfs.add(data, (err, res) => { - if (err) { - return cb(err) - } - return cb(null, res[0].hash) - }) + putData: async (data) => { + const res = await ipfs.add(data) + return res[0].hash }, - putLinks: (links, cb) => { + putLinks: (links) => { const obj = {} for (const { name, cid } of links) { obj[name] = new CID(cid) } - ipfs.dag.put(obj, cb) + return ipfs.dag.put(obj) } } - loadContent(ipfs, store, node, callback) + return loadContent(ipfs, store, node) } -function loadContent (ipfs, store, node, callback) { +async function loadContent (ipfs, store, node) { if (Buffer.isBuffer(node)) { - return store.putData(node, callback) + return store.putData(node) } if (typeof node === 'object') { @@ -389,16 +356,12 @@ function loadContent (ipfs, store, node, callback) { } return 0 }) - mapSeries(sorted, ([name, child], cb) => { - loadContent(ipfs, store, child, (err, cid) => { - cb(err, { name, cid: cid && cid.toString() }) - }) - }, (err, res) => { - if (err) { - return callback(err) - } - store.putLinks(res, callback) + const res = await pMapSeries(sorted, async ([name, child]) => { + const cid = await loadContent(ipfs, store, child) + return { name, cid: cid && cid.toString() } }) + + return store.putLinks(res) } } From 7e4eabc8dbb750dabcc04c48791516877cbc346f Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:30:10 +0000 Subject: [PATCH 09/45] chore: key async/await refactor --- src/key/export.js | 9 +++------ src/key/gen.js | 13 +++++-------- src/key/import.js | 19 +++++++------------ src/key/list.js | 28 ++++++++++------------------ src/key/rename.js | 29 +++++++++++------------------ src/key/rm.js | 25 +++++++++---------------- 6 files changed, 45 insertions(+), 78 deletions(-) diff --git a/src/key/export.js b/src/key/export.js index 557d323dc..359dbe2db 100644 --- a/src/key/export.js +++ b/src/key/export.js @@ -29,12 +29,9 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should export "self" key', function (done) { - ipfs.key.export('self', hat(), (err, pem) => { - expect(err).to.not.exist() - expect(pem).to.exist() - done() - }) + it('should export "self" key', async function () { + const pem = await ipfs.key.export('self', hat()) + expect(pem).to.exist() }) }) } diff --git a/src/key/gen.js b/src/key/gen.js index 355123b41..111c14216 100644 --- a/src/key/gen.js +++ b/src/key/gen.js @@ -34,16 +34,13 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) keyTypes.forEach((kt) => { - it(`should generate a new ${kt.type} key`, function (done) { + it(`should generate a new ${kt.type} key`, async function () { this.timeout(20 * 1000) const name = hat() - ipfs.key.gen(name, kt, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', name) - expect(key).to.have.property('id') - done() - }) + const key = await ipfs.key.gen(name, kt) + expect(key).to.exist() + expect(key).to.have.property('name', name) + expect(key).to.have.property('id') }) }) }) diff --git a/src/key/import.js b/src/key/import.js index 60065d3e5..62f46403d 100644 --- a/src/key/import.js +++ b/src/key/import.js @@ -29,21 +29,16 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should import an exported key', (done) => { + it('should import an exported key', async () => { const password = hat() - ipfs.key.export('self', password, (err, pem) => { - expect(err).to.not.exist() - expect(pem).to.exist() + const pem = await ipfs.key.export('self', password) + expect(pem).to.exist() - ipfs.key.import('clone', pem, password, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', 'clone') - expect(key).to.have.property('id') - done() - }) - }) + const key = await ipfs.key.import('clone', pem, password) + expect(key).to.exist() + expect(key).to.have.property('name', 'clone') + expect(key).to.have.property('id') }) }) } diff --git a/src/key/list.js b/src/key/list.js index ad5b935cd..3f471c958 100644 --- a/src/key/list.js +++ b/src/key/list.js @@ -2,7 +2,7 @@ /* eslint max-nested-callbacks: ["error", 6] */ 'use strict' -const timesSeries = require('async/timesSeries') +const pTimes = require('p-times') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -31,27 +31,19 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should list all the keys', function (done) { + it('should list all the keys', async function () { this.timeout(60 * 1000) - timesSeries(3, (n, cb) => { - ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }, cb) - }, (err, keys) => { - expect(err).to.not.exist() - - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.be.an('array') - expect(res.length).to.be.above(keys.length - 1) + const keys = await pTimes(3, () => ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }), { concurrency: 1 }) - keys.forEach(key => { - const found = res.find(({ id, name }) => name === key.name && id === key.id) - expect(found).to.exist() - }) + const res = await ipfs.key.list() + expect(res).to.exist() + expect(res).to.be.an('array') + expect(res.length).to.be.above(keys.length - 1) - done() - }) + keys.forEach(key => { + const found = res.find(({ id, name }) => name === key.name && id === key.id) + expect(found).to.exist() }) }) }) diff --git a/src/key/rename.js b/src/key/rename.js index b2c89561a..3bb02df1a 100644 --- a/src/key/rename.js +++ b/src/key/rename.js @@ -30,30 +30,23 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should rename a key', function (done) { + it('should rename a key', async function () { this.timeout(30 * 1000) const oldName = hat() const newName = hat() - ipfs.key.gen(oldName, { type: 'rsa', size: 2048 }, (err, key) => { - expect(err).to.not.exist() + const key = await ipfs.key.gen(oldName, { type: 'rsa', size: 2048 }) - ipfs.key.rename(oldName, newName, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('was', oldName) - expect(res).to.have.property('now', newName) - expect(res).to.have.property('id', key.id) - - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - expect(res.find(k => k.name === newName)).to.exist() - expect(res.find(k => k.name === oldName)).to.not.exist() - done() - }) - }) - }) + const renameRes = await ipfs.key.rename(oldName, newName) + expect(renameRes).to.exist() + expect(renameRes).to.have.property('was', oldName) + expect(renameRes).to.have.property('now', newName) + expect(renameRes).to.have.property('id', key.id) + + const res = await ipfs.key.list() + expect(res.find(k => k.name === newName)).to.exist() + expect(res.find(k => k.name === oldName)).to.not.exist() }) }) } diff --git a/src/key/rm.js b/src/key/rm.js index 87ce3e93b..f303c7b69 100644 --- a/src/key/rm.js +++ b/src/key/rm.js @@ -30,25 +30,18 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should rm a key', function (done) { + it('should rm a key', async function () { this.timeout(30 * 1000) - ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }, (err, key) => { - expect(err).to.not.exist() + const key = await ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }) - ipfs.key.rm(key.name, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('name', key.name) - expect(res).to.have.property('id', key.id) - - ipfs.key.list((err, res) => { - expect(err).to.not.exist() - expect(res.find(k => k.name === key.name)).to.not.exist() - done() - }) - }) - }) + const removeRes = await ipfs.key.rm(key.name) + expect(removeRes).to.exist() + expect(removeRes).to.have.property('name', key.name) + expect(removeRes).to.have.property('id', key.id) + + const res = await ipfs.key.list() + expect(res.find(k => k.name === key.name)).to.not.exist() }) }) } From 1f810f9bf8f4f5804ae02dd87e5a2e71da5e22b5 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:30:36 +0000 Subject: [PATCH 10/45] chore: miscellaneous async/await refactor --- src/miscellaneous/dns.js | 24 ++++++++++++------------ src/miscellaneous/id.js | 19 ++++--------------- src/miscellaneous/stop.js | 15 +++++---------- src/miscellaneous/version.js | 22 +++++----------------- 4 files changed, 26 insertions(+), 54 deletions(-) diff --git a/src/miscellaneous/dns.js b/src/miscellaneous/dns.js index a8e2fe8a5..665d1c321 100644 --- a/src/miscellaneous/dns.js +++ b/src/miscellaneous/dns.js @@ -32,25 +32,25 @@ module.exports = (createCommon, options) => { common.teardown(done) }) - it('should non-recursively resolve ipfs.io', () => { - return ipfs.dns('ipfs.io', { recursive: false }).then(res => { + it('should non-recursively resolve ipfs.io', async () => { + const res = await ipfs.dns('ipfs.io', { recursive: false }) + // matches pattern /ipns/ - expect(res).to.match(/\/ipns\/.+$/) - }) + expect(res).to.match(/\/ipns\/.+$/) }) - it('should recursively resolve ipfs.io', () => { - return ipfs.dns('ipfs.io', { recursive: true }).then(res => { + it('should recursively resolve ipfs.io', async () => { + const res = await ipfs.dns('ipfs.io', { recursive: true }) + // matches pattern /ipfs/ - expect(res).to.match(/\/ipfs\/.+$/) - }) + expect(res).to.match(/\/ipfs\/.+$/) }) - it('should resolve subdomain docs.ipfs.io', () => { - return ipfs.dns('docs.ipfs.io').then(res => { + it('should resolve subdomain docs.ipfs.io', async () => { + const res = await ipfs.dns('docs.ipfs.io') + // matches pattern /ipfs/ - expect(res).to.match(/\/ipfs\/.+$/) - }) + expect(res).to.match(/\/ipfs\/.+$/) }) }) } diff --git a/src/miscellaneous/id.js b/src/miscellaneous/id.js index 4718ce768..36ffbfed4 100644 --- a/src/miscellaneous/id.js +++ b/src/miscellaneous/id.js @@ -27,21 +27,10 @@ module.exports = (createCommon, options) => { common.teardown(done) }) - it('should get the node ID', (done) => { - ipfs.id((err, res) => { - expect(err).to.not.exist() - expect(res).to.have.a.property('id') - expect(res).to.have.a.property('publicKey') - done() - }) - }) - - it('should get the node ID (promised)', () => { - return ipfs.id() - .then((res) => { - expect(res).to.have.a.property('id') - expect(res).to.have.a.property('publicKey') - }) + it('should get the node ID', async () => { + const res = await ipfs.id() + expect(res).to.have.a.property('id') + expect(res).to.have.a.property('publicKey') }) }) } diff --git a/src/miscellaneous/stop.js b/src/miscellaneous/stop.js index fcc7eb515..779eae2ba 100644 --- a/src/miscellaneous/stop.js +++ b/src/miscellaneous/stop.js @@ -31,19 +31,14 @@ module.exports = (createCommon, options) => { }) // must be last test to run - it('should stop the node', function (done) { + it('should stop the node2', async function () { this.timeout(10 * 1000) - ipfs.stop((err) => { - expect(err).to.not.exist() + await ipfs.stop() - // Trying to stop an already stopped node should return an error - // as the node can't respond to requests anymore - ipfs.stop((err) => { - expect(err).to.exist() - done() - }) - }) + // Trying to stop an already stopped node should return an error + // as the node can't respond to requests anymore + return expect(ipfs.stop()).to.eventually.be.rejected() }) }) } diff --git a/src/miscellaneous/version.js b/src/miscellaneous/version.js index 3c8e95e92..f92f519e9 100644 --- a/src/miscellaneous/version.js +++ b/src/miscellaneous/version.js @@ -30,23 +30,11 @@ module.exports = (createCommon, options) => { common.teardown(done) }) - it('should get the node version', (done) => { - ipfs.version((err, result) => { - expect(err).to.not.exist() - expect(result).to.have.a.property('version') - expect(result).to.have.a.property('commit') - expect(result).to.have.a.property('repo') - done() - }) - }) - - it('should get the node version (promised)', () => { - return ipfs.version() - .then((result) => { - expect(result).to.have.a.property('version') - expect(result).to.have.a.property('commit') - expect(result).to.have.a.property('repo') - }) + it('should get the node version', async () => { + const result = await ipfs.version() + expect(result).to.have.a.property('version') + expect(result).to.have.a.property('commit') + expect(result).to.have.a.property('repo') }) }) } From 7ba7a2de60a52c8fd58ab5340f483e26e2099ebe Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:30:59 +0000 Subject: [PATCH 11/45] chore: name-pubsub async/await refactor --- src/name-pubsub/cancel.js | 60 ++++++++++++++------------------------- src/name-pubsub/state.js | 14 ++++----- src/name-pubsub/subs.js | 32 +++++++-------------- 3 files changed, 37 insertions(+), 69 deletions(-) diff --git a/src/name-pubsub/cancel.js b/src/name-pubsub/cancel.js index b46dcb827..9f4fe529a 100644 --- a/src/name-pubsub/cancel.js +++ b/src/name-pubsub/cancel.js @@ -2,7 +2,6 @@ /* eslint-env mocha */ 'use strict' -const auto = require('async/auto') const PeerId = require('peer-id') const { spawnNodeWithId } = require('../utils/spawn') @@ -38,51 +37,36 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should return false when the name that is intended to cancel is not subscribed', function (done) { + it('should return false when the name that is intended to cancel is not subscribed', async function () { this.timeout(60 * 1000) - ipfs.name.pubsub.cancel(nodeId, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('canceled') - expect(res.canceled).to.eql(false) - - done() - }) + const res = await ipfs.name.pubsub.cancel(nodeId) + expect(res).to.exist() + expect(res).to.have.property('canceled') + expect(res.canceled).to.eql(false) }) - it('should cancel a subscription correctly returning true', function (done) { + it('should cancel a subscription correctly returning true', async function () { this.timeout(300 * 1000) - PeerId.create({ bits: 512 }, (err, peerId) => { - expect(err).to.not.exist() + const peerId = await PeerId.create({ bits: 512 }) - const id = peerId.toB58String() - const ipnsPath = `/ipns/${id}` + const id = peerId.toB58String() + const ipnsPath = `/ipns/${id}` - ipfs.name.pubsub.subs((err, res) => { - expect(err).to.not.exist() - expect(res).to.be.an('array').that.does.not.include(ipnsPath) - - ipfs.name.resolve(id, (err) => { - expect(err).to.exist() - auto({ - subs1: (cb) => ipfs.name.pubsub.subs(cb), - cancel: ['subs1', (_, cb) => ipfs.name.pubsub.cancel(ipnsPath, cb)], - subs2: ['cancel', (_, cb) => ipfs.name.pubsub.subs(cb)] - }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.subs1).to.be.an('array').that.does.include(ipnsPath) - expect(res.cancel).to.have.property('canceled') - expect(res.cancel.canceled).to.eql(true) - expect(res.subs2).to.be.an('array').that.does.not.include(ipnsPath) - - done() - }) - }) - }) - }) + const subs = await ipfs.name.pubsub.subs() + expect(subs).to.be.an('array').that.does.not.include(ipnsPath) + + await expect(ipfs.name.resolve(id)).to.be.rejected() + + const subs1 = await ipfs.name.pubsub.subs() + const cancel = await ipfs.name.pubsub.cancel(ipnsPath) + const subs2 = await ipfs.name.pubsub.subs() + + expect(subs1).to.be.an('array').that.does.include(ipnsPath) + expect(cancel).to.have.property('canceled') + expect(cancel.canceled).to.eql(true) + expect(subs2).to.be.an('array').that.does.not.include(ipnsPath) }) }) } diff --git a/src/name-pubsub/state.js b/src/name-pubsub/state.js index c153c0d08..bc6bfb368 100644 --- a/src/name-pubsub/state.js +++ b/src/name-pubsub/state.js @@ -31,17 +31,13 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get the current state of pubsub', function (done) { + it('should get the current state of pubsub', async function () { this.timeout(50 * 1000) - ipfs.name.pubsub.state((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.have.property('enabled') - expect(res.enabled).to.be.eql(true) - - done() - }) + const res = await ipfs.name.pubsub.state() + expect(res).to.exist() + expect(res).to.have.property('enabled') + expect(res.enabled).to.be.eql(true) }) }) } diff --git a/src/name-pubsub/subs.js b/src/name-pubsub/subs.js index d7a529370..5624cd44f 100644 --- a/src/name-pubsub/subs.js +++ b/src/name-pubsub/subs.js @@ -32,37 +32,25 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get an empty array as a result of subscriptions before any resolve', function (done) { + it('should get an empty array as a result of subscriptions before any resolve', async function () { this.timeout(60 * 1000) - ipfs.name.pubsub.subs((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res).to.eql([]) - - done() - }) + const res = await ipfs.name.pubsub.subs() + expect(res).to.exist() + expect(res).to.eql([]) }) - it('should get the list of subscriptions updated after a resolve', function (done) { + it('should get the list of subscriptions updated after a resolve', async function () { this.timeout(300 * 1000) const id = 'QmNP1ASen5ZREtiJTtVD3jhMKhoPb1zppET1tgpjHx2NGA' - ipfs.name.pubsub.subs((err, res) => { - expect(err).to.not.exist() - expect(res).to.eql([]) // initally empty + const subs = await ipfs.name.pubsub.subs() + expect(subs).to.eql([]) // initally empty - ipfs.name.resolve(id, (err) => { - expect(err).to.exist() + await expect(ipfs.name.resolve(id)).to.be.rejected() - ipfs.name.pubsub.subs((err, res) => { - expect(err).to.not.exist() - expect(res).to.be.an('array').that.does.include(`/ipns/${id}`) - - done() - }) - }) - }) + const res = await ipfs.name.pubsub.subs() + expect(res).to.be.an('array').that.does.include(`/ipns/${id}`) }) }) } From 6a4c1bbf6ddca0fe40738a51acc4a88173934d0a Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:31:19 +0000 Subject: [PATCH 12/45] chore: name async/await refactor --- src/name/publish.js | 52 +++++++++++++++++---------------------------- src/name/resolve.js | 2 +- 2 files changed, 20 insertions(+), 34 deletions(-) diff --git a/src/name/publish.js b/src/name/publish.js index 40e7f21ed..b998cb1e6 100644 --- a/src/name/publish.js +++ b/src/name/publish.js @@ -38,29 +38,25 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should publish an IPNS record with the default params', function (done) { + it('should publish an IPNS record with the default params', async function () { this.timeout(50 * 1000) const value = fixture.cid - ipfs.name.publish(value, { allowOffline: true }, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.name).to.equal(nodeId) - expect(res.value).to.equal(`/ipfs/${value}`) - - done() - }) + const res = await ipfs.name.publish(value, { 'allow-offline': true }) + expect(res).to.exist() + expect(res.name).to.equal(nodeId) + expect(res.value).to.equal(`/ipfs/${value}`) }) it('should publish correctly with the lifetime option and resolve', async () => { const [{ path }] = await ipfs.add(Buffer.from('should publish correctly with the lifetime option and resolve')) - await ipfs.name.publish(path, { allowOffline: true, resolve: false, lifetime: '2h' }) + await ipfs.name.publish(path, { 'allow-offline': true, resolve: false, lifetime: '2h' }) return expect(await ipfs.name.resolve(`/ipns/${nodeId}`)).to.eq(`/ipfs/${path}`) }) - it('should publish correctly when the file was not added but resolve is disabled', function (done) { + it('should publish correctly when the file was not added but resolve is disabled', async function () { this.timeout(50 * 1000) const value = 'QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU' @@ -70,20 +66,16 @@ module.exports = (createCommon, options) => { lifetime: '1m', ttl: '10s', key: 'self', - allowOffline: true + 'allow-offline': true } - ipfs.name.publish(value, options, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.name).to.equal(nodeId) - expect(res.value).to.equal(`/ipfs/${value}`) - - done() - }) + const res = await ipfs.name.publish(value, options) + expect(res).to.exist() + expect(res.name).to.equal(nodeId) + expect(res.value).to.equal(`/ipfs/${value}`) }) - it('should publish with a key received as param, instead of using the key of the node', function (done) { + it('should publish with a key received as param, instead of using the key of the node', async function () { this.timeout(90 * 1000) const value = fixture.cid @@ -92,21 +84,15 @@ module.exports = (createCommon, options) => { lifetime: '24h', ttl: '10s', key: keyName, - allowOffline: true + 'allow-offline': true } - ipfs.key.gen(keyName, { type: 'rsa', size: 2048 }, function (err, key) { - expect(err).to.not.exist() - - ipfs.name.publish(value, options, (err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - expect(res.name).to.equal(key.id) - expect(res.value).to.equal(`/ipfs/${value}`) + const key = await ipfs.key.gen(keyName, { type: 'rsa', size: 2048 }) - done() - }) - }) + const res = await ipfs.name.publish(value, options) + expect(res).to.exist() + expect(res.name).to.equal(key.id) + expect(res.value).to.equal(`/ipfs/${value}`) }) }) } diff --git a/src/name/resolve.js b/src/name/resolve.js index 5537e83ea..99060b9fd 100644 --- a/src/name/resolve.js +++ b/src/name/resolve.js @@ -4,7 +4,7 @@ const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const delay = require('../utils/delay') +const delay = require('delay') const CID = require('cids') module.exports = (createCommon, options) => { From 3910b68c7b0e35dad64802b5f697b94e5faacc1c Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:31:48 +0000 Subject: [PATCH 13/45] chore: object async/await refactor --- src/object/data.js | 85 +++------ src/object/get.js | 307 ++++++-------------------------- src/object/links.js | 202 ++++++--------------- src/object/new.js | 20 +-- src/object/patch/add-link.js | 176 +++++------------- src/object/patch/append-data.js | 34 +--- src/object/patch/rm-link.js | 97 ++-------- src/object/patch/set-data.js | 37 +--- src/object/put.js | 178 ++++-------------- src/object/stat.js | 211 +++++++--------------- src/object/utils.js | 24 +-- 11 files changed, 299 insertions(+), 1072 deletions(-) diff --git a/src/object/data.js b/src/object/data.js index bfed3f398..5438137dd 100644 --- a/src/object/data.js +++ b/src/object/data.js @@ -33,38 +33,32 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get data by multihash', (done) => { + it('should get data by multihash', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } - ipfs.object.put(testObj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.data(nodeCid, (err, data) => { - expect(err).to.not.exist() + const nodeCid = await ipfs.object.put(testObj) - // because js-ipfs-api can't infer - // if the returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(testObj.Data).to.eql(data) - done() - }) - }) + let data = await ipfs.object.data(nodeCid) + // because js-ipfs-api can't infer + // if the returned Data is Buffer or String + if (typeof data === 'string') { + data = Buffer.from(data) + } + expect(testObj.Data).to.deep.equal(data) }) - it('should get data by multihash (promised)', async () => { + it('should get data by base58 encoded multihash', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } const nodeCid = await ipfs.object.put(testObj) - let data = await ipfs.object.data(nodeCid) + let data = await ipfs.object.data(bs58.encode(nodeCid.buffer), { enc: 'base58' }) // because js-ipfs-api can't infer // if the returned Data is Buffer or String if (typeof data === 'string') { @@ -73,66 +67,29 @@ module.exports = (createCommon, options) => { expect(testObj.Data).to.deep.equal(data) }) - it('should get data by base58 encoded multihash', (done) => { + it('should get data by base58 encoded multihash string', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } - ipfs.object.put(testObj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.data(bs58.encode(nodeCid.buffer), { enc: 'base58' }, (err, data) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer - // if the returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(testObj.Data).to.eql(data) - done() - }) - }) - }) + const nodeCid = await ipfs.object.put(testObj) - it('should get data by base58 encoded multihash string', (done) => { - const testObj = { - Data: Buffer.from(hat()), - Links: [] + let data = await ipfs.object.data(bs58.encode(nodeCid.buffer).toString(), { enc: 'base58' }) + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof data === 'string') { + data = Buffer.from(data) } - - ipfs.object.put(testObj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.data(bs58.encode(nodeCid.buffer).toString(), { enc: 'base58' }, (err, data) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof data === 'string') { - data = Buffer.from(data) - } - expect(testObj.Data).to.eql(data) - done() - }) - }) + expect(testObj.Data).to.eql(data) }) it('returns error for request without argument', () => { - return ipfs.object.data(null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.data(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { - ipfs.object.data('invalid', { enc: 'base58' }) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.data('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/get.js b/src/object/get.js index 4bac9c934..938a5246d 100644 --- a/src/object/get.js +++ b/src/object/get.js @@ -3,7 +3,6 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') const UnixFs = require('ipfs-unixfs') @@ -37,59 +36,13 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get object by multihash', (done) => { + it('should get object by multihash', async () => { const obj = { Data: Buffer.from(hat()), Links: [] } - let node1 - let node1Cid - let node2 - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - node1Cid = cid - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - node1 = node - cb() - }) - }) - }, - (cb) => { - ipfs.object.get(node1Cid, (err, node) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - - node2 = node - - cb() - }) - }, - (cb) => { - expect(node1.Data).to.eql(node2.Data) - expect(node1.Links).to.eql(node2.Links) - cb() - } - ], done) - }) - - it('should get object by multihash (promised)', async () => { - const testObj = { - Data: Buffer.from(hat()), - Links: [] - } - - const node1Cid = await ipfs.object.put(testObj) + const node1Cid = await ipfs.object.put(obj) const node1 = await ipfs.object.get(node1Cid) let node2 = await ipfs.object.get(node1Cid) @@ -99,56 +52,11 @@ module.exports = (createCommon, options) => { node2 = new DAGNode(Buffer.from(node2.Data), node2.Links, node2.size) } - expect(node1.Data).to.deep.equal(node2.Data) - expect(node1.Links).to.deep.equal(node2.Links) - }) - - it('should get object by multihash string', (done) => { - const obj = { - Data: Buffer.from(hat()), - Links: [] - } - - let node1 - let node1Cid - let node2 - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - node1Cid = cid - - ipfs.object.get(node1Cid, (err, node) => { - expect(err).to.not.exist() - node1 = node - cb() - }) - }) - }, - (cb) => { - // get object from ipfs multihash string - ipfs.object.get(node1Cid.toBaseEncodedString(), (err, node) => { - expect(err).to.not.exist() - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - - node2 = node - cb() - }) - }, - (cb) => { - expect(node1.Data).to.eql(node2.Data) - expect(node1.Links).to.eql(node2.Links) - cb() - } - ], done) + expect(node1.Data).to.eql(node2.Data) + expect(node1.Links).to.eql(node2.Links) }) - it('should get object by multihash string (promised)', async () => { + it('should get object by multihash string', async () => { const obj = { Data: Buffer.from(hat()), Links: [] @@ -168,189 +76,86 @@ module.exports = (createCommon, options) => { expect(node1.Links).to.deep.equal(node2.Links) }) - it('should get object with links by multihash string', (done) => { - let node1a - let node1b - let node1bCid - let node1c - let node2 - - series([ - (cb) => { - try { - node1a = new DAGNode(Buffer.from('Some data 1')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from('Some data 2')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - asDAGLink(node2, 'some-link', (err, link) => { - if (err) { - return cb(err) - } - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - node1bCid = cid - cb() - }) - }, - (cb) => { - ipfs.object.get(node1bCid, (err, node) => { - expect(err).to.not.exist() - - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - - node1c = node - cb() - }) - }, - (cb) => { - expect(node1a.Data).to.eql(node1c.Data) - cb() - } - ], done) + it('should get object with links by multihash string', async () => { + const node1a = new DAGNode(Buffer.from('Some data 1')) + const node2 = new DAGNode(Buffer.from('Some data 2')) + + const link = await asDAGLink(node2, 'some-link') + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + + const node1bCid = await ipfs.object.put(node1b) + let node1c = await ipfs.object.get(node1bCid) + + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node1c.Data === 'string') { + node1c = new DAGNode(Buffer.from(node1c.Data), node1c.Links, node1c.size) + } + + expect(node1a.Data).to.eql(node1c.Data) }) - it('should get object by base58 encoded multihash', (done) => { + it('should get object by base58 encoded multihash', async () => { const obj = { Data: Buffer.from(hat()), Links: [] } - let node1a - let node1aCid - let node1b - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - node1aCid = cid - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }) - }, - (cb) => { - ipfs.object.get(node1aCid, { enc: 'base58' }, (err, node) => { - expect(err).to.not.exist() - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - node1b = node - cb() - }) - }, - (cb) => { - expect(node1a.Data).to.eql(node1b.Data) - expect(node1a.Links).to.eql(node1b.Links) - cb() - } - ], done) + const node1aCid = await ipfs.object.put(obj) + const node1a = await ipfs.object.get(node1aCid) + let node1b = await ipfs.object.get(node1aCid, { enc: 'base58' }) + + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node1b.Data === 'string') { + node1b = new DAGNode(Buffer.from(node1b.Data), node1b.Links, node1b.size) + } + + expect(node1a.Data).to.eql(node1b.Data) + expect(node1a.Links).to.eql(node1b.Links) }) - it('should get object by base58 encoded multihash string', (done) => { + it('should get object by base58 encoded multihash string', async () => { const obj = { Data: Buffer.from(hat()), Links: [] } - let node1a - let node1aCid - let node1b - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - node1aCid = cid - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - node1a = node - cb() - }) - }) - }, - (cb) => { - ipfs.object.get(node1aCid.toBaseEncodedString(), { enc: 'base58' }, (err, node) => { - expect(err).to.not.exist() - // because js-ipfs-api can't infer if the - // returned Data is Buffer or String - if (typeof node.Data === 'string') { - node = new DAGNode(Buffer.from(node.Data), node.Links, node.size) - } - node1b = node - cb() - }) - }, - (cb) => { - expect(node1a.Data).to.eql(node1b.Data) - expect(node1a.Links).to.eql(node1b.Links) - cb() - } - ], done) + const node1aCid = await ipfs.object.put(obj) + const node1a = await ipfs.object.get(node1aCid) + let node1b = await ipfs.object.get(node1aCid.toBaseEncodedString(), { enc: 'base58' }) + + // because js-ipfs-api can't infer if the + // returned Data is Buffer or String + if (typeof node1b.Data === 'string') { + node1b = new DAGNode(Buffer.from(node1b.Data), node1b.Links, node1b.size) + } + + expect(node1a.Data).to.eql(node1b.Data) + expect(node1a.Links).to.eql(node1b.Links) }) - it('should supply unaltered data', () => { + it('should supply unaltered data', async () => { // has to be big enough to span several DAGNodes const data = crypto.randomBytes(1024 * 3000) - return ipfs.add({ + const result = await ipfs.add({ path: '', content: data }) - .then((result) => { - return ipfs.object.get(result[0].hash) - }) - .then((node) => { - const meta = UnixFs.unmarshal(node.Data) - expect(meta.fileSize()).to.equal(data.length) - }) + const node = await ipfs.object.get(result[0].hash) + const meta = UnixFs.unmarshal(node.Data) + + expect(meta.fileSize()).to.equal(data.length) }) it('should error for request without argument', () => { - return ipfs.object.get(null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.get(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { - return ipfs.object.get('invalid', { enc: 'base58' }) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.get('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/links.js b/src/object/links.js index 2dd51d4e0..e5c9ae6fc 100644 --- a/src/object/links.js +++ b/src/object/links.js @@ -4,7 +4,6 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') const { asDAGLink } = require('./utils') @@ -37,28 +36,37 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get empty links by multihash', (done) => { + it('should get empty links by multihash', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() + const cid = await ipfs.object.put(testObj) + const node = await ipfs.object.get(cid) + const links = await ipfs.object.links(cid) - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() + expect(node.Links).to.eql(links) + }) - ipfs.object.links(cid, (err, links) => { - expect(err).to.not.exist() - expect(node.Links).to.deep.equal(links) - done() - }) - }) + it('should get links by multihash', async () => { + const node1a = new DAGNode(Buffer.from('Some data 1')) + const node2 = new DAGNode(Buffer.from('Some data 2')) + + const link = await asDAGLink(node2, 'some-link') + + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + const node1bCid = await ipfs.object.put(node1b) + + const links = await ipfs.object.links(node1bCid) + expect(node1b.Links[0]).to.eql({ + Hash: links[0].Hash, + Tsize: links[0].Tsize, + Name: links[0].Name }) }) - it('should get empty links by multihash (promised)', async () => { + it('should get links by base58 encoded multihash', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] @@ -66,161 +74,61 @@ module.exports = (createCommon, options) => { const cid = await ipfs.object.put(testObj) const node = await ipfs.object.get(cid) - const links = await ipfs.object.links(cid) - - expect(node.Links).to.eql(links) - }) - it('should get links by multihash', (done) => { - let node1a - let node1b - let node1bCid - let node2 - - series([ - (cb) => { - try { - node1a = new DAGNode(Buffer.from('Some data 1')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from('Some data 2')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - asDAGLink(node2, 'some-link', (err, link) => { - expect(err).to.not.exist() - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - - node1bCid = cid - - cb() - }) - }, - (cb) => { - ipfs.object.links(node1bCid, (err, links) => { - expect(err).to.not.exist() - expect(node1b.Links[0]).to.eql({ - Hash: links[0].Hash, - Tsize: links[0].Tsize, - Name: links[0].Name - }) - cb() - }) - } - ], done) + const links = await ipfs.object.links(cid.buffer, { enc: 'base58' }) + expect(node.Links).to.deep.equal(links) }) - it('should get links by base58 encoded multihash', (done) => { + it('should get links by base58 encoded multihash string', async () => { const testObj = { Data: Buffer.from(hat()), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() + const cid = await ipfs.object.put(testObj) + const node = await ipfs.object.get(cid) - ipfs.object.links(cid.buffer, { enc: 'base58' }, (err, links) => { - expect(err).to.not.exist() - expect(node.Links).to.deep.equal(links) - done() - }) - }) - }) + const links = await ipfs.object.links(cid.toBaseEncodedString(), { enc: 'base58' }) + expect(node.Links).to.deep.equal(links) }) - it('should get links by base58 encoded multihash string', (done) => { - const testObj = { - Data: Buffer.from(hat()), - Links: [] - } + it('should get links from CBOR object', async () => { + const hashes = [] - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() + const res1 = await ipfs.add(Buffer.from('test data')) + hashes.push(res1[0].hash) - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() + const res2 = await ipfs.add(Buffer.from('more test data')) + hashes.push(res2[0].hash) - ipfs.object.links(cid.toBaseEncodedString(), { enc: 'base58' }, (err, links) => { - expect(err).to.not.exist() - expect(node.Links).to.deep.equal(links) - done() - }) - }) - }) - }) + const obj = { + some: 'data', + mylink: new CID(hashes[0]), + myobj: { + anotherLink: new CID(hashes[1]) + } + } + const cid = await ipfs.dag.put(obj) - it('should get links from CBOR object', (done) => { - const hashes = [] - ipfs.add(Buffer.from('test data'), (err, res1) => { - expect(err).to.not.exist() - hashes.push(res1[0].hash) - ipfs.add(Buffer.from('more test data'), (err, res2) => { - hashes.push(res2[0].hash) - expect(err).to.not.exist() - const obj = { - some: 'data', - mylink: new CID(hashes[0]), - myobj: { - anotherLink: new CID(hashes[1]) - } - } - ipfs.dag.put(obj, (err, cid) => { - expect(err).to.not.exist() - ipfs.object.links(cid, (err, links) => { - expect(err).to.not.exist() - expect(links.length).to.eql(2) - - // TODO: js-ipfs succeeds but go returns empty strings for link name - // const names = [links[0].name, links[1].name] - // expect(names).includes('mylink') - // expect(names).includes('myobj/anotherLink') - - const cids = [links[0].Hash.toString(), links[1].Hash.toString()] - expect(cids).includes(hashes[0]) - expect(cids).includes(hashes[1]) - - done() - }) - }) - }) - }) + const links = await ipfs.object.links(cid) + expect(links.length).to.eql(2) + + // TODO: js-ipfs succeeds but go returns empty strings for link name + // const names = [links[0].name, links[1].name] + // expect(names).includes('mylink') + // expect(names).includes('myobj/anotherLink') + + const cids = [links[0].Hash.toString(), links[1].Hash.toString()] + expect(cids).includes(hashes[0]) + expect(cids).includes(hashes[1]) }) it('returns error for request without argument', () => { - return ipfs.object.links(null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.links(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { - ipfs.object.links('invalid', { enc: 'base58' }) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.links('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/new.js b/src/object/new.js index bcd78c14d..e2756caa3 100644 --- a/src/object/new.js +++ b/src/object/new.js @@ -30,28 +30,12 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should create a new object with no template', (done) => { - ipfs.object.new((err, cid) => { - expect(err).to.not.exist() - expect(cid.toBaseEncodedString()).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') - done() - }) - }) - - it('should create a new object with no template (promised)', async () => { + it('should create a new object with no template', async () => { const cid = await ipfs.object.new() expect(cid.toBaseEncodedString()).to.equal('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') }) - it('should create a new object with unixfs-dir template', (done) => { - ipfs.object.new('unixfs-dir', (err, cid) => { - expect(err).to.not.exist() - expect(cid.toBaseEncodedString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') - done() - }) - }) - - it('should create a new object with unixfs-dir template (promised)', async () => { + it('should create a new object with unixfs-dir template', async () => { const cid = await ipfs.object.new('unixfs-dir') expect(cid.toBaseEncodedString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') }) diff --git a/src/object/patch/add-link.js b/src/object/patch/add-link.js index bf571c218..7c4d55fbf 100644 --- a/src/object/patch/add-link.js +++ b/src/object/patch/add-link.js @@ -3,14 +3,8 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const { getDescribe, getIt, expect } = require('../../utils/mocha') -const { - calculateCid, - createDAGNode, - addLinkToDAGNode, - asDAGLink -} = require('../utils') +const { asDAGLink } = require('../utils') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -39,145 +33,57 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add a link to an existing node', (done) => { - let testNodeCid - let node1bCid - let node1a - let node1b - let node2 - + it('should add a link to an existing node', async () => { const obj = { Data: Buffer.from('patch test object'), Links: [] } - - series([ - (cb) => { - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - testNodeCid = cid - cb() - }) - }, - (cb) => { - try { - node1a = new DAGNode(obj.Data, obj.Links) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from('some other node')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - // note: we need to put the linked obj, otherwise IPFS won't - // timeout. Reason: it needs the node to get its size - ipfs.object.put(node2, (err, cid) => { - expect(err).to.not.exist() - - cb() - }) - }, - (cb) => { - asDAGLink(node2, 'link-to-node', (err, link) => { - expect(err).to.not.exist() - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - - node1bCid = cid - - cb() - }) - }, - (cb) => { - ipfs.object.patch.addLink(testNodeCid, node1b.Links[0], (err, cid) => { - expect(err).to.not.exist() - expect(node1bCid).to.eql(cid) - cb() - }) - } - /* TODO: revisit this assertions. - (cb) => { - // note: make sure we can link js plain objects - const content = Buffer.from(JSON.stringify({ - title: 'serialized object' - }, null, 0)) - ipfs.add(content, (err, result) => { - expect(err).to.not.exist() - expect(result).to.exist() - expect(result).to.have.lengthOf(1) - const object = result.pop() - node3 = { - name: object.hash, - multihash: object.hash, - size: object.size - } - cb() - }) - }, - (cb) => { - ipfs.object.patch.addLink(testNodeWithLinkMultihash, node3, (err, node) => { - expect(err).to.not.exist() - expect(node).to.exist() - testNodeWithLinkMultihash = node.multihash - testLinkPlainObject = node3 - cb() - }) - } - */ - ], done) - }) - - it('should add a link to an existing node (promised)', async () => { - const obj = { - Data: Buffer.from('patch test object (promised)'), - Links: [] + // link to add + const node2 = new DAGNode(Buffer.from('some other node')) + // note: we need to put the linked obj, otherwise IPFS won't + // timeout. Reason: it needs the node to get its size + await ipfs.object.put(node2) + const link = await asDAGLink(node2, 'link-to-node') + + // manual create dag step by step + const node1a = new DAGNode(obj.Data, obj.Links) + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + const node1bCid = await ipfs.object.put(node1b) + + // add link with patch.addLink + const testNodeCid = await ipfs.object.put(obj) + const cid = await ipfs.object.patch.addLink(testNodeCid, link) + + // assert both are equal + expect(node1bCid).to.eql(cid) + + /* TODO: revisit this assertions. + // note: make sure we can link js plain objects + const content = Buffer.from(JSON.stringify({ + title: 'serialized object' + }, null, 0)) + const result = await ipfs.add(content) + expect(result).to.exist() + expect(result).to.have.lengthOf(1) + const object = result.pop() + const node3 = { + name: object.hash, + multihash: object.hash, + size: object.size } - - const parentCid = await ipfs.object.put(obj) - const parent = await ipfs.object.get(parentCid) - const childCid = await ipfs.object.put(await createDAGNode(Buffer.from('some other node'), [])) - const child = await ipfs.object.get(childCid) - const newParent = await addLinkToDAGNode(parent, { - name: 'link-to-node', - size: child.size, - cid: childCid - }) - const newParentCid = await calculateCid(newParent) - const nodeFromObjectPatchCid = await ipfs.object.patch.addLink(parentCid, newParent.Links[0]) - - expect(newParentCid).to.eql(nodeFromObjectPatchCid) + const node = await ipfs.object.patch.addLink(testNodeWithLinkMultihash, node3) + expect(node).to.exist() + testNodeWithLinkMultihash = node.multihash + testLinkPlainObject = node3 + */ }) it('returns error for request without arguments', () => { - return ipfs.object.patch.addLink(null, null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.addLink(null, null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with only one invalid argument', () => { - return ipfs.object.patch.addLink('invalid', null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.addLink('invalid', null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/patch/append-data.js b/src/object/patch/append-data.js index 9d7b56a81..5a8aeb3b8 100644 --- a/src/object/patch/append-data.js +++ b/src/object/patch/append-data.js @@ -31,51 +31,25 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should append data to an existing node', (done) => { + it('should append data to an existing node', async () => { const obj = { Data: Buffer.from('patch test object'), Links: [] } - ipfs.object.put(obj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.patch.appendData(nodeCid, Buffer.from('append'), (err, patchedNodeCid) => { - expect(err).to.not.exist() - expect(patchedNodeCid).to.not.deep.equal(nodeCid) - done() - }) - }) - }) - - it('should append data to an existing node (promised)', async () => { - const obj = { - Data: Buffer.from('patch test object (promised)'), - Links: [] - } - const nodeCid = await ipfs.object.put(obj) const patchedNodeCid = await ipfs.object.patch.appendData(nodeCid, Buffer.from('append')) - - expect(nodeCid).to.not.deep.equal(patchedNodeCid) + expect(patchedNodeCid).to.not.deep.equal(nodeCid) }) it('returns error for request without key & data', () => { - return ipfs.object.patch.appendData(null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.appendData(null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request without data', () => { const filePath = 'test/fixtures/test-data/badnode.json' - return ipfs.object.patch.appendData(null, filePath) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.appendData(null, filePath)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/patch/rm-link.js b/src/object/patch/rm-link.js index 03195bf78..1be87b4b1 100644 --- a/src/object/patch/rm-link.js +++ b/src/object/patch/rm-link.js @@ -1,9 +1,6 @@ /* eslint-env mocha */ 'use strict' -const dagPB = require('ipld-dag-pb') -const DAGLink = dagPB.DAGLink -const series = require('async/series') const { getDescribe, getIt, expect } = require('../../utils/mocha') const { asDAGLink } = require('../utils') @@ -34,73 +31,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should remove a link from an existing node', (done) => { - let node1aCid - let node1bCid - let node2 - let node2Cid - let testLink - - const obj1 = { - Data: Buffer.from('patch test object 1'), - Links: [] - } - - const obj2 = { - Data: Buffer.from('patch test object 2'), - Links: [] - } - - series([ - (cb) => { - ipfs.object.put(obj1, (err, cid) => { - expect(err).to.not.exist() - node1aCid = cid - cb() - }) - }, - (cb) => { - ipfs.object.put(obj2, (err, cid) => { - expect(err).to.not.exist() - node2Cid = cid - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - node2 = node - cb() - }) - }) - }, - (cb) => { - testLink = new DAGLink('link-to-node', node2.size, node2Cid) - - ipfs.object.patch.addLink(node1aCid, testLink, (err, cid) => { - expect(err).to.not.exist() - node1bCid = cid - cb() - }) - }, - (cb) => { - ipfs.object.patch.rmLink(node1bCid, testLink, (err, cid) => { - expect(err).to.not.exist() - expect(cid).to.not.deep.equal(node1bCid) - expect(cid).to.deep.equal(node1aCid) - cb() - }) - } - /* TODO: revisit this assertions. - (cb) => { - ipfs.object.patch.rmLink(testNodeWithLinkMultihash, testLinkPlainObject, (err, node) => { - expect(err).to.not.exist() - expect(node.multihash).to.not.deep.equal(testNodeWithLinkMultihash) - cb() - }) - } - */ - ], done) - }) - - it('should remove a link from an existing node (promised)', async () => { + it('should remove a link from an existing node', async () => { const obj1 = { Data: Buffer.from('patch test object 1'), Links: [] @@ -120,33 +51,29 @@ module.exports = (createCommon, options) => { expect(withoutChildCid).to.not.deep.equal(parentCid) expect(withoutChildCid).to.deep.equal(nodeCid) + + /* TODO: revisit this assertions. + const node = await ipfs.object.patch.rmLink(testNodeWithLinkMultihash, testLinkPlainObject) + expect(node.multihash).to.not.deep.equal(testNodeWithLinkMultihash) + */ }) it('returns error for request without arguments', () => { - return ipfs.object.patch.rmLink(null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.rmLink(null, null)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) it('returns error for request only one invalid argument', () => { - return ipfs.object.patch.rmLink('invalid', null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.rmLink('invalid', null)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) it('returns error for request with invalid first argument', () => { const root = '' const link = 'foo' - return ipfs.object.patch.rmLink(root, link) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.rmLink(root, link)).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/patch/set-data.js b/src/object/patch/set-data.js index 7a7c619ba..28e138525 100644 --- a/src/object/patch/set-data.js +++ b/src/object/patch/set-data.js @@ -31,36 +31,13 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should set data for an existing node', (done) => { + it('should set data for an existing node', async () => { const obj = { Data: Buffer.from('patch test object'), Links: [] } const patchData = Buffer.from('set') - ipfs.object.put(obj, (err, nodeCid) => { - expect(err).to.not.exist() - - ipfs.object.patch.setData(nodeCid, patchData, (err, patchedNodeCid) => { - expect(err).to.not.exist() - expect(nodeCid).to.not.deep.equal(patchedNodeCid) - - ipfs.object.get(patchedNodeCid, (err, patchedNode) => { - expect(err).to.not.exist() - expect(patchedNode.Data).to.eql(patchData) - done() - }) - }) - }) - }) - - it('should set data for an existing node (promised)', async () => { - const obj = { - Data: Buffer.from('patch test object (promised)'), - Links: [] - } - const patchData = Buffer.from('set') - const nodeCid = await ipfs.object.put(obj) const patchedNodeCid = await ipfs.object.patch.setData(nodeCid, patchData) const patchedNode = await ipfs.object.get(patchedNodeCid) @@ -70,21 +47,13 @@ module.exports = (createCommon, options) => { }) it('returns error for request without key & data', () => { - return ipfs.object.patch.setData(null, null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.setData(null, null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request without data', () => { const filePath = 'test/fixtures/test-data/badnode.json' - return ipfs.object.patch.setData(null, filePath) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.patch.setData(null, filePath)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/put.js b/src/object/put.js index f838a8781..f3a5362cb 100644 --- a/src/object/put.js +++ b/src/object/put.js @@ -3,7 +3,6 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const hat = require('hat') const { getDescribe, getIt, expect } = require('../utils/mocha') const { asDAGLink } = require('./utils') @@ -35,26 +34,7 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should put an object', (done) => { - const obj = { - Data: Buffer.from(hat()), - Links: [] - } - - ipfs.object.put(obj, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(nodeJSON.data).to.eql(obj.Data) - expect(nodeJSON.links).to.eql(obj.Links) - done() - }) - }) - }) - - it('should put an object (promised)', async () => { + it('should put an object', async () => { const obj = { Data: Buffer.from(hat()), Links: [] @@ -68,7 +48,7 @@ module.exports = (createCommon, options) => { expect(obj.Links).to.deep.equal(nodeJSON.links) }) - it('should put a JSON encoded Buffer', (done) => { + it('should put a JSON encoded Buffer', async () => { const obj = { Data: Buffer.from(hat()), Links: [] @@ -81,138 +61,58 @@ module.exports = (createCommon, options) => { const buf = Buffer.from(JSON.stringify(obj2)) - ipfs.object.put(buf, { enc: 'json' }, (err, cid) => { - expect(err).to.not.exist() + const cid = await ipfs.object.put(buf, { enc: 'json' }) - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(nodeJSON.data).to.eql(node.Data) - done() - }) - }) + const node = await ipfs.object.get(cid) + const nodeJSON = node.toJSON() + expect(nodeJSON.data).to.eql(node.Data) }) - it('should put a Protobuf encoded Buffer', (done) => { - let node - let serialized - - series([ - (cb) => { - try { - node = new DAGNode(Buffer.from(hat())) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - serialized = node.serialize() - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - ipfs.object.put(serialized, { enc: 'protobuf' }, (err, cid) => { - expect(err).to.not.exist() - ipfs.object.get(cid, (err, node2) => { - expect(err).to.not.exist() - expect(node2.Data).to.deep.equal(node.Data) - expect(node2.Links).to.deep.equal(node.Links) - cb() - }) - }) - } - ], done) + it('should put a Protobuf encoded Buffer', async () => { + const node = new DAGNode(Buffer.from(hat())) + const serialized = node.serialize() + + const cid = await ipfs.object.put(serialized, { enc: 'protobuf' }) + const node2 = await ipfs.object.get(cid) + expect(node2.Data).to.deep.equal(node.Data) + expect(node2.Links).to.deep.equal(node.Links) }) - it('should put a Buffer as data', (done) => { + it('should put a Buffer as data', async () => { const data = Buffer.from(hat()) - ipfs.object.put(data, (err, cid) => { - expect(err).to.not.exist() - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - const nodeJSON = node.toJSON() - expect(data).to.deep.equal(nodeJSON.data) - expect([]).to.deep.equal(nodeJSON.links) - done() - }) - }) + const cid = await ipfs.object.put(data) + const node = await ipfs.object.get(cid) + const nodeJSON = node.toJSON() + expect(data).to.deep.equal(nodeJSON.data) + expect([]).to.deep.equal(nodeJSON.links) }) - it('should put a Protobuf DAGNode', (done) => { + it('should put a Protobuf DAGNode', async () => { const dNode = new DAGNode(Buffer.from(hat())) - ipfs.object.put(dNode, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - expect(dNode.Data).to.deep.equal(node.Data) - expect(dNode.Links).to.deep.equal(node.Links) - done() - }) - }) + const cid = await ipfs.object.put(dNode) + const node = await ipfs.object.get(cid) + expect(dNode.Data).to.deep.equal(node.Data) + expect(dNode.Links).to.deep.equal(node.Links) }) - it('should fail if a string is passed', (done) => { - ipfs.object.put(hat(), (err) => { - expect(err).to.exist() - done() - }) + it('should fail if a string is passed', () => { + return expect(ipfs.object.put(hat())).to.eventually.be.rejected() }) - it('should put a Protobuf DAGNode with a link', (done) => { - let node1a - let node1b - let node2 - - series([ - (cb) => { - try { - node1a = new DAGNode(Buffer.from(hat())) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from(hat())) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - asDAGLink(node2, 'some-link', (err, link) => { - expect(err).to.not.exist() - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.get(cid, (err, node) => { - expect(err).to.not.exist() - expect(node1b.Data).to.deep.equal(node.Data) - expect(node1b.Links).to.deep.equal(node.Links) - cb() - }) - }) - } - ], done) + it('should put a Protobuf DAGNode with a link', async () => { + const node1a = new DAGNode(Buffer.from(hat())) + const node2 = new DAGNode(Buffer.from(hat())) + + const link = await asDAGLink(node2, 'some-link') + + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + + const cid = await ipfs.object.put(node1b) + const node = await ipfs.object.get(cid) + expect(node1b.Data).to.deep.equal(node.Data) + expect(node1b.Links).to.deep.equal(node.Links) }) }) } diff --git a/src/object/stat.js b/src/object/stat.js index 3c1da06cc..cd139b48e 100644 --- a/src/object/stat.js +++ b/src/object/stat.js @@ -4,7 +4,6 @@ const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode -const series = require('async/series') const { getDescribe, getIt, expect } = require('../utils/mocha') const { asDAGLink } = require('./utils') @@ -35,40 +34,14 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get stats by multihash', (done) => { + it('should get stats by multihash', async () => { const testObj = { Data: Buffer.from('get test object'), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() - - ipfs.object.stat(cid, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - done() - }) - }) - }) - - it('should get stats for object by multihash (promised)', async () => { - const testObj = { - Data: Buffer.from('get test object'), - Links: [] - } - - await ipfs.object.put(testObj) - const stats = await ipfs.object.stat('QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ') - + const cid = await ipfs.object.put(testObj) + const stats = await ipfs.object.stat(cid) const expected = { Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', NumLinks: 0, @@ -77,158 +50,96 @@ module.exports = (createCommon, options) => { DataSize: 15, CumulativeSize: 17 } - expect(expected).to.deep.equal(stats) }) - it('should respect timeout option', (done) => { + it('should respect timeout option', async () => { const testObj = { Data: Buffer.from('get test object'), Links: [] } - ipfs.object.put(testObj, (err) => { - expect(err).to.not.exist() - const timeout = 2 - const startTime = new Date() - const badCid = 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ' - - // we can test that we are passing in opts by testing the timeout option for a CID that doesn't exist - ipfs.object.stat(badCid, { timeout: `${timeout}s` }, (err, stats) => { - const timeForRequest = (new Date() - startTime) / 1000 - expect(err).to.exist() - expect(err.message).to.equal('failed to get block for QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ: context deadline exceeded') - expect(stats).to.not.exist() - expect(timeForRequest).to.not.lessThan(timeout) - expect(timeForRequest).to.not.greaterThan(timeout + 1) - done() - }) - }) + await ipfs.object.put(testObj) + + const timeout = 2 + const startTime = new Date() + const badCid = 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ' + + const err = await expect(ipfs.object.stat(badCid, { timeout: `${timeout}s` })).to.be.rejected() + const timeForRequest = (new Date() - startTime) / 1000 + + expect(err).to.have.property('message', 'failed to get block for QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ: context deadline exceeded') + expect(timeForRequest).to.not.lessThan(timeout) + expect(timeForRequest).to.not.greaterThan(timeout + 1) }) - it('should get stats for object with links by multihash', (done) => { - let node1a - let node1b - let node1bCid - let node2 - - series([ - (cb) => { - try { - node1a = new DAGNode(Buffer.from('Some data 1')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - try { - node2 = new DAGNode(Buffer.from('Some data 2')) - } catch (err) { - return cb(err) - } - - cb() - }, - (cb) => { - asDAGLink(node2, 'some-link', (err, link) => { - expect(err).to.not.exist() - - node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) - - cb() - }) - }, - (cb) => { - ipfs.object.put(node1b, (err, cid) => { - expect(err).to.not.exist() - node1bCid = cid - cb() - }) - }, - (cb) => { - ipfs.object.stat(node1bCid, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmPR7W4kaADkAo4GKEVVPQN81EDUFCHJtqejQZ5dEG7pBC', - NumLinks: 1, - BlockSize: 64, - LinksSize: 53, - DataSize: 11, - CumulativeSize: 77 - } - expect(expected).to.eql(stats) - cb() - }) - } - ], done) + it('should get stats for object with links by multihash', async () => { + const node1a = new DAGNode(Buffer.from('Some data 1')) + const node2 = new DAGNode(Buffer.from('Some data 2')) + + const link = await asDAGLink(node2, 'some-link') + + const node1b = new DAGNode(node1a.Data, node1a.Links.concat(link)) + const node1bCid = await ipfs.object.put(node1b) + + const stats = await ipfs.object.stat(node1bCid) + const expected = { + Hash: 'QmPR7W4kaADkAo4GKEVVPQN81EDUFCHJtqejQZ5dEG7pBC', + NumLinks: 1, + BlockSize: 64, + LinksSize: 53, + DataSize: 11, + CumulativeSize: 77 + } + expect(expected).to.eql(stats) }) - it('should get stats by base58 encoded multihash', (done) => { + it('should get stats by base58 encoded multihash', async () => { const testObj = { Data: Buffer.from('get test object'), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() + const cid = await ipfs.object.put(testObj) - ipfs.object.stat(cid.buffer, (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - done() - }) - }) + const stats = await ipfs.object.stat(cid.buffer) + const expected = { + Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', + NumLinks: 0, + BlockSize: 17, + LinksSize: 2, + DataSize: 15, + CumulativeSize: 17 + } + expect(expected).to.deep.equal(stats) }) - it('should get stats by base58 encoded multihash string', (done) => { + it('should get stats by base58 encoded multihash string', async () => { const testObj = { Data: Buffer.from('get test object'), Links: [] } - ipfs.object.put(testObj, (err, cid) => { - expect(err).to.not.exist() + const cid = await ipfs.object.put(testObj) - ipfs.object.stat(cid.toBaseEncodedString(), (err, stats) => { - expect(err).to.not.exist() - const expected = { - Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', - NumLinks: 0, - BlockSize: 17, - LinksSize: 2, - DataSize: 15, - CumulativeSize: 17 - } - expect(expected).to.deep.equal(stats) - done() - }) - }) + const stats = await ipfs.object.stat(cid.toBaseEncodedString()) + const expected = { + Hash: 'QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3Ms2sdJZ', + NumLinks: 0, + BlockSize: 17, + LinksSize: 2, + DataSize: 15, + CumulativeSize: 17 + } + expect(expected).to.deep.equal(stats) }) it('returns error for request without argument', () => { - return ipfs.object.stat(null) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.stat(null)).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) it('returns error for request with invalid argument', () => { - return ipfs.object.stat('invalid', { enc: 'base58' }) - .then( - () => expect.fail('should have returned an error for invalid argument'), - (err) => expect(err).to.be.an.instanceof(Error) - ) + return expect(ipfs.object.stat('invalid', { enc: 'base58' })).to.eventually.be.rejected.and.be.an.instanceOf(Error) }) }) } diff --git a/src/object/utils.js b/src/object/utils.js index db206d985..f426dfd99 100644 --- a/src/object/utils.js +++ b/src/object/utils.js @@ -1,15 +1,10 @@ 'use strict' const { promisify } = require('es6-promisify') -const callbackify = require('callbackify') const dagPB = require('ipld-dag-pb') const { DAGNode, DAGLink } = dagPB -const calculateCid = callbackify((node) => { - return dagPB.util.cid(node.serialize(), { - cidVersion: 0 - }) -}) +const calculateCid = (node) => dagPB.util.cid(node.serialize(), { cidVersion: 0 }) const createDAGNode = promisify((data, links, cb) => { cb(null, new DAGNode(data, links)) @@ -19,20 +14,11 @@ const addLinkToDAGNode = promisify((parent, link, cb) => { cb(null, new DAGNode(parent.Data, parent.Links.concat(link))) }) -const asDAGLink = promisify((node, name, cb) => { - if (typeof name === 'function') { - cb = name - name = '' - } - - calculateCid(node, (err, cid) => { - if (err) { - return cb(err) - } +const asDAGLink = async (node, name = '') => { + const cid = await calculateCid(node) - cb(null, new DAGLink(name, node.size, cid)) - }) -}) + return new DAGLink(name, node.size, cid) +} module.exports = { calculateCid, From f8caf5cc24cbff5cb9df7b18e6a0b6a40b7a5e19 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:32:07 +0000 Subject: [PATCH 14/45] chore: pin async/await refactor --- src/pin/add.js | 20 +---- src/pin/ls.js | 232 ++++++++++++++++++------------------------------- src/pin/rm.js | 47 +++++----- 3 files changed, 109 insertions(+), 190 deletions(-) diff --git a/src/pin/add.js b/src/pin/add.js index 45ff97979..7f71d3f5a 100644 --- a/src/pin/add.js +++ b/src/pin/add.js @@ -38,23 +38,11 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should add a pin', (done) => { - ipfs.pin.add(fixtures.files[0].cid, { recursive: false }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.include({ - hash: fixtures.files[0].cid - }) - done() + it('should add a pin', async () => { + const pinset = await ipfs.pin.add(fixtures.files[0].cid, { recursive: false }) + expect(pinset).to.deep.include({ + hash: fixtures.files[0].cid }) }) - - it('should add a pin (promised)', () => { - return ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) - .then((pinset) => { - expect(pinset).to.deep.include({ - hash: fixtures.files[1].cid - }) - }) - }) }) } diff --git a/src/pin/ls.js b/src/pin/ls.js index 691350e13..1e3f59acf 100644 --- a/src/pin/ls.js +++ b/src/pin/ls.js @@ -50,176 +50,114 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) // 1st, because ipfs.add pins automatically - it('should list all recursive pins', (done) => { - ipfs.pin.ls({ type: 'recursive' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.directory.cid - }) - done() + it('should list all recursive pins', async () => { + const pinset = await ipfs.pin.ls({ type: 'recursive' }) + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid }) - }) - - it('should list all indirect pins', (done) => { - ipfs.pin.ls({ type: 'indirect' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.not.deep.include({ - type: 'recursive', - hash: fixtures.files[0].cid - }) - expect(pinset).to.not.deep.include({ - type: 'direct', - hash: fixtures.files[1].cid - }) - expect(pinset).to.not.deep.include({ - type: 'recursive', - hash: fixtures.directory.cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[1].cid - }) - done() + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.directory.cid }) }) - it('should list all types of pins', (done) => { - ipfs.pin.ls((err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.not.be.empty() - // check the three "roots" - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.directory.cid - }) - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'direct', - hash: fixtures.files[1].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[1].cid - }) - done() + it('should list all indirect pins', async () => { + const pinset = await ipfs.pin.ls({ type: 'indirect' }) + expect(pinset).to.not.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid + }) + expect(pinset).to.not.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid + }) + expect(pinset).to.not.deep.include({ + type: 'recursive', + hash: fixtures.directory.cid + }) + expect(pinset).to.deep.include({ + type: 'indirect', + hash: fixtures.directory.files[0].cid + }) + expect(pinset).to.deep.include({ + type: 'indirect', + hash: fixtures.directory.files[1].cid }) }) - it('should list all types of pins (promised)', () => { - return ipfs.pin.ls() - .then((pinset) => { - expect(pinset).to.not.be.empty() - // check our three "roots" - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.directory.cid - }) - expect(pinset).to.deep.include({ - type: 'recursive', - hash: fixtures.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'direct', - hash: fixtures.files[1].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[0].cid - }) - expect(pinset).to.deep.include({ - type: 'indirect', - hash: fixtures.directory.files[1].cid - }) - }) - }) - - it('should list all direct pins', (done) => { - ipfs.pin.ls({ type: 'direct' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.have.lengthOf(1) - expect(pinset).to.deep.include({ - type: 'direct', - hash: fixtures.files[1].cid - }) - done() + it('should list all types of pins', async () => { + const pinset = await ipfs.pin.ls() + expect(pinset).to.not.be.empty() + // check the three "roots" + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.directory.cid + }) + expect(pinset).to.deep.include({ + type: 'recursive', + hash: fixtures.files[0].cid + }) + expect(pinset).to.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid + }) + expect(pinset).to.deep.include({ + type: 'indirect', + hash: fixtures.directory.files[0].cid + }) + expect(pinset).to.deep.include({ + type: 'indirect', + hash: fixtures.directory.files[1].cid }) }) - it('should list pins for a specific hash', (done) => { - ipfs.pin.ls(fixtures.files[0].cid, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.equal([{ - type: 'recursive', - hash: fixtures.files[0].cid - }]) - done() + it('should list all direct pins', async () => { + const pinset = await ipfs.pin.ls({ type: 'direct' }) + expect(pinset).to.have.lengthOf(1) + expect(pinset).to.deep.include({ + type: 'direct', + hash: fixtures.files[1].cid }) }) - it('should list pins for a specific hash (promised)', () => { - return ipfs.pin.ls(fixtures.files[0].cid) - .then((pinset) => { - expect(pinset).to.deep.equal([{ - type: 'recursive', - hash: fixtures.files[0].cid - }]) - }) + it('should list pins for a specific hash', async () => { + const pinset = await ipfs.pin.ls(fixtures.files[0].cid) + expect(pinset).to.deep.equal([{ + type: 'recursive', + hash: fixtures.files[0].cid + }]) }) - it('should throw an error on missing direct pins for existing path', (done) => { + it('should throw an error on missing direct pins for existing path', () => { // ipfs.txt is an indirect pin, so lookup for direct one should throw an error - ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'direct' }, (err, pinset) => { - expect(err).to.exist() - expect(pinset).to.not.exist() - expect(err.message).to.be.equal(`path '/ipfs/${fixtures.directory.cid}/files/ipfs.txt' is not pinned`) - done() - }) + return expect(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'direct' })) + .to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.to.have.property('message', `path '/ipfs/${fixtures.directory.cid}/files/ipfs.txt' is not pinned`) }) - it('should throw an error on missing link for a specific path', (done) => { - ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/I-DONT-EXIST.txt`, { type: 'direct' }, (err, pinset) => { - expect(err).to.exist() - expect(pinset).to.not.exist() - expect(err.message).to.be.equal(`no link named "I-DONT-EXIST.txt" under ${fixtures.directory.cid}`) - done() - }) + it('should throw an error on missing link for a specific path', () => { + return expect(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/I-DONT-EXIST.txt`, { type: 'direct' })) + .to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.to.have.property('message', `no link named "I-DONT-EXIST.txt" under ${fixtures.directory.cid}`) }) - it('should list indirect pins for a specific path', (done) => { - ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'indirect' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.include({ - type: `indirect through ${fixtures.directory.cid}`, - hash: fixtures.directory.files[1].cid - }) - done() + it('should list indirect pins for a specific path', async () => { + const pinset = await ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'indirect' }) + expect(pinset).to.deep.include({ + type: `indirect through ${fixtures.directory.cid}`, + hash: fixtures.directory.files[1].cid }) }) - it('should list recursive pins for a specific hash (promised)', () => { - return ipfs.pin.ls(fixtures.files[0].cid, { type: 'recursive' }) - .then((pinset) => { - expect(pinset).to.deep.equal([{ - type: 'recursive', - hash: fixtures.files[0].cid - }]) - }) + it('should list recursive pins for a specific hash', async () => { + const pinset = await ipfs.pin.ls(fixtures.files[0].cid, { type: 'recursive' }) + expect(pinset).to.deep.equal([{ + type: 'recursive', + hash: fixtures.files[0].cid + }]) }) }) } diff --git a/src/pin/rm.js b/src/pin/rm.js index ba88b66f9..94ac6d809 100644 --- a/src/pin/rm.js +++ b/src/pin/rm.js @@ -41,36 +41,29 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should remove a recursive pin', (done) => { - ipfs.pin.rm(fixtures.files[0].cid, { recursive: true }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.deep.equal([{ - hash: fixtures.files[0].cid - }]) - ipfs.pin.ls({ type: 'recursive' }, (err, pinset) => { - expect(err).to.not.exist() - expect(pinset).to.not.deep.include({ - hash: fixtures.files[0].cid, - type: 'recursive' - }) - done() - }) + it('should remove a recursive pin', async () => { + const removedPinset = await ipfs.pin.rm(fixtures.files[0].cid, { recursive: true }) + expect(removedPinset).to.deep.equal([{ + hash: fixtures.files[0].cid + }]) + + const pinset = await ipfs.pin.ls({ type: 'recursive' }) + expect(pinset).to.not.deep.include({ + hash: fixtures.files[0].cid, + type: 'recursive' }) }) - it('should remove a direct pin (promised)', () => { - return ipfs.pin.rm(fixtures.files[1].cid, { recursive: false }) - .then((pinset) => { - expect(pinset).to.deep.equal([{ - hash: fixtures.files[1].cid - }]) - return ipfs.pin.ls({ type: 'direct' }) - }) - .then((pinset) => { - expect(pinset).to.not.deep.include({ - hash: fixtures.files[1].cid - }) - }) + it('should remove a direct pin', async () => { + const removedPinset = await ipfs.pin.rm(fixtures.files[1].cid, { recursive: false }) + expect(removedPinset).to.deep.equal([{ + hash: fixtures.files[1].cid + }]) + + const pinset = await ipfs.pin.ls({ type: 'direct' }) + expect(pinset).to.not.deep.include({ + hash: fixtures.files[1].cid + }) }) }) } From b49666bf552fe242306f2f273560054a744c46bb Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:32:46 +0000 Subject: [PATCH 15/45] chore: ping async/await refactor --- src/ping/ping-pull-stream.js | 57 +++++++++----------- src/ping/ping-readable-stream.js | 92 +++++++++++++++++--------------- src/ping/ping.js | 29 ++++------ 3 files changed, 84 insertions(+), 94 deletions(-) diff --git a/src/ping/ping-pull-stream.js b/src/ping/ping-pull-stream.js index 28b8d056f..41b0e8496 100644 --- a/src/ping/ping-pull-stream.js +++ b/src/ping/ping-pull-stream.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const pull = require('pull-stream') +const pullToPromise = require('pull-to-promise') const series = require('async/series') const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -40,47 +40,38 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should send the specified number of packets over pull stream', (done) => { - let packetNum = 0 + it('should send the specified number of packets over pull stream', async () => { const count = 3 - pull( - ipfsA.pingPullStream(ipfsB.peerId.id, { count }), - pull.drain((res) => { - expect(res.success).to.be.true() - // It's a pong - if (isPong(res)) { - packetNum++ - } - }, (err) => { - expect(err).to.not.exist() - expect(packetNum).to.equal(count) - done() - }) - ) + + const results = await pullToPromise.any(ipfsA.pingPullStream(ipfsB.peerId.id, { count })) + + const packetNum = results.reduce((acc, result) => { + expect(result.success).to.be.true() + + if (isPong(result)) { + acc++ + } + + return acc + }, 0) + + expect(packetNum).to.equal(count) }) - it('should fail when pinging an unknown peer over pull stream', (done) => { + it('should fail when pinging an unknown peer over pull stream', () => { const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' const count = 2 - pull( - ipfsA.pingPullStream(unknownPeerId, { count }), - pull.collect((err, results) => { - expect(err).to.exist() - done() - }) - ) + + return expect(pullToPromise.any(ipfsA.pingPullStream(unknownPeerId, { count }))) + .to.eventually.be.rejected() }) - it('should fail when pinging an invalid peer id over pull stream', (done) => { + it('should fail when pinging an invalid peer id over pull stream', () => { const invalidPeerId = 'not a peer ID' const count = 2 - pull( - ipfsA.pingPullStream(invalidPeerId, { count }), - pull.collect((err, results) => { - expect(err).to.exist() - done() - }) - ) + + return expect(pullToPromise.any(ipfsA.pingPullStream(invalidPeerId, { count }))) + .to.eventually.be.rejected() }) }) } diff --git a/src/ping/ping-readable-stream.js b/src/ping/ping-readable-stream.js index 4d057fb61..4deb6ae6d 100644 --- a/src/ping/ping-readable-stream.js +++ b/src/ping/ping-readable-stream.js @@ -41,62 +41,68 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should send the specified number of packets over readable stream', (done) => { + it('should send the specified number of packets over readable stream', () => { let packetNum = 0 const count = 3 - pump( - ipfsA.pingReadableStream(ipfsB.peerId.id, { count }), - new Writable({ - objectMode: true, - write (res, enc, cb) { - expect(res.success).to.be.true() - // It's a pong - if (isPong(res)) { - packetNum++ - } + return new Promise((resolve, reject) => { + pump( + ipfsA.pingReadableStream(ipfsB.peerId.id, { count }), + new Writable({ + objectMode: true, + write (res, enc, cb) { + expect(res.success).to.be.true() + // It's a pong + if (isPong(res)) { + packetNum++ + } - cb() + cb() + } + }), + (err) => { + expect(err).to.not.exist() + expect(packetNum).to.equal(count) + resolve() } - }), - (err) => { - expect(err).to.not.exist() - expect(packetNum).to.equal(count) - done() - } - ) + ) + }) }) - it('should fail when pinging peer that is not available over readable stream', (done) => { + it('should fail when pinging peer that is not available over readable stream', () => { const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' - pump( - ipfsA.pingReadableStream(unknownPeerId, {}), - new Writable({ - objectMode: true, - write: (res, enc, cb) => cb() - }), - (err) => { - expect(err).to.exist() - done() - } - ) + return new Promise((resolve, reject) => { + pump( + ipfsA.pingReadableStream(unknownPeerId, {}), + new Writable({ + objectMode: true, + write: (res, enc, cb) => cb() + }), + (err) => { + expect(err).to.exist() + resolve() + } + ) + }) }) - it('should fail when pinging an invalid peer id over readable stream', (done) => { + it('should fail when pinging an invalid peer id over readable stream', () => { const invalidPeerId = 'not a peer ID' - pump( - ipfsA.pingReadableStream(invalidPeerId, {}), - new Writable({ - objectMode: true, - write: (chunk, enc, cb) => cb() - }), - (err) => { - expect(err).to.exist() - done() - } - ) + return new Promise((resolve, reject) => { + pump( + ipfsA.pingReadableStream(invalidPeerId, {}), + new Writable({ + objectMode: true, + write: (chunk, enc, cb) => cb() + }), + (err) => { + expect(err).to.exist() + resolve() + } + ) + }) }) }) } diff --git a/src/ping/ping.js b/src/ping/ping.js index 2a4f1b913..f4632f718 100644 --- a/src/ping/ping.js +++ b/src/ping/ping.js @@ -41,34 +41,27 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should send the specified number of packets', (done) => { + it('should send the specified number of packets', async () => { const count = 3 - ipfsA.ping(ipfsB.peerId.id, { count }, (err, responses) => { - expect(err).to.not.exist() - responses.forEach(expectIsPingResponse) - const pongs = responses.filter(isPong) - expect(pongs.length).to.equal(count) - done() - }) + const responses = await ipfsA.ping(ipfsB.peerId.id, { count }) + responses.forEach(expectIsPingResponse) + + const pongs = responses.filter(isPong) + expect(pongs.length).to.equal(count) }) - it('should fail when pinging a peer that is not available', (done) => { + it('should fail when pinging a peer that is not available', () => { const notAvailablePeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn' const count = 2 - ipfsA.ping(notAvailablePeerId, { count }, (err, responses) => { - expect(err).to.exist() - done() - }) + return expect(ipfsA.ping(notAvailablePeerId, { count })).to.eventually.be.rejected() }) - it('should fail when pinging an invalid peer Id', (done) => { + it('should fail when pinging an invalid peer Id', () => { const invalidPeerId = 'not a peer ID' const count = 2 - ipfsA.ping(invalidPeerId, { count }, (err, responses) => { - expect(err).to.exist() - done() - }) + + return expect(ipfsA.ping(invalidPeerId, { count })).to.eventually.be.rejected() }) }) } From 3c9610c51bb49565983ea7465db1f3ab1b34a395 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:33:09 +0000 Subject: [PATCH 16/45] chore: pubsub async/await refactor --- src/pubsub/ls.js | 2 +- src/pubsub/peers.js | 2 +- src/pubsub/subscribe.js | 7 +++---- src/pubsub/unsubscribe.js | 2 +- src/pubsub/utils.js | 2 +- 5 files changed, 7 insertions(+), 8 deletions(-) diff --git a/src/pubsub/ls.js b/src/pubsub/ls.js index 639aed5e5..d052b395b 100644 --- a/src/pubsub/ls.js +++ b/src/pubsub/ls.js @@ -3,7 +3,7 @@ const { getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -const delay = require('../utils/delay') +const delay = require('delay') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/src/pubsub/peers.js b/src/pubsub/peers.js index 448130237..2e40e162f 100644 --- a/src/pubsub/peers.js +++ b/src/pubsub/peers.js @@ -6,7 +6,7 @@ const { spawnNodesWithId } = require('../utils/spawn') const { waitForPeers, getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') const { connect } = require('../utils/swarm') -const delay = require('../utils/delay') +const delay = require('delay') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/src/pubsub/subscribe.js b/src/pubsub/subscribe.js index e04bb0a50..ab6761e1f 100644 --- a/src/pubsub/subscribe.js +++ b/src/pubsub/subscribe.js @@ -7,8 +7,7 @@ const { collect } = require('streaming-iterables') const { spawnNodesWithId } = require('../utils/spawn') const { waitForPeers, getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') -const delay = require('../utils/delay') +const delay = require('delay') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -155,7 +154,7 @@ module.exports = (createCommon, options) => { }) describe('multiple connected nodes', () => { - before((done) => { + before(() => { if (ipfs1.pubsub.setMaxListeners) { ipfs1.pubsub.setMaxListeners(100) } @@ -165,7 +164,7 @@ module.exports = (createCommon, options) => { } const ipfs2Addr = ipfs2.peerId.addresses.find((a) => a.includes('127.0.0.1')) - connect(ipfs1, ipfs2Addr, done) + return ipfs1.swarm.connect(ipfs2Addr) }) it('should receive messages from a different node', async () => { diff --git a/src/pubsub/unsubscribe.js b/src/pubsub/unsubscribe.js index f6b8ce81c..9a7f0efd4 100644 --- a/src/pubsub/unsubscribe.js +++ b/src/pubsub/unsubscribe.js @@ -4,7 +4,7 @@ const { isBrowser, isWebWorker, isElectronRenderer } = require('ipfs-utils/src/env') const { getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -const delay = require('../utils/delay') +const delay = require('delay') module.exports = (createCommon, options) => { const describe = getDescribe(options) diff --git a/src/pubsub/utils.js b/src/pubsub/utils.js index f6721c8c8..80b53c659 100644 --- a/src/pubsub/utils.js +++ b/src/pubsub/utils.js @@ -1,7 +1,7 @@ 'use strict' const hat = require('hat') -const delay = require('../utils/delay') +const delay = require('delay') async function waitForPeers (ipfs, topic, peersToWait, waitForMs) { const start = Date.now() From 08c455d6c049264224601fcafdf1814a7c5be089 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:33:36 +0000 Subject: [PATCH 17/45] chore: repo async/await refactor --- src/repo/gc.js | 21 ++++++++++----------- src/repo/stat.js | 14 +++----------- src/repo/version.js | 15 +++------------ 3 files changed, 16 insertions(+), 34 deletions(-) diff --git a/src/repo/gc.js b/src/repo/gc.js index e2d0158b0..52c66f9d5 100644 --- a/src/repo/gc.js +++ b/src/repo/gc.js @@ -29,18 +29,17 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should run garbage collection', (done) => { - ipfs.repo.gc((err, res) => { - expect(err).to.not.exist() - expect(res).to.exist() - done() - }) - }) + it('should run garbage collection', async () => { + const res = await ipfs.add(Buffer.from('apples')) - it('should run garbage collection (promised)', () => { - return ipfs.repo.gc().then((res) => { - expect(res).to.exist() - }) + const pinset = await ipfs.pin.ls() + expect(pinset.map((obj) => obj.hash)).includes(res[0].hash) + + await ipfs.pin.rm(res[0].hash) + await ipfs.repo.gc() + + const finalPinset = await ipfs.pin.ls() + expect(finalPinset.map((obj) => obj.hash)).not.includes(res[0].hash) }) it('should clean up unpinned data', async () => { diff --git a/src/repo/stat.js b/src/repo/stat.js index d313a9998..f20d006ac 100644 --- a/src/repo/stat.js +++ b/src/repo/stat.js @@ -29,17 +29,9 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get repo stats', (done) => { - ipfs.repo.stat((err, res) => { - expectIsRepo(err, res) - done() - }) - }) - - it('should get repo stats (promised)', () => { - return ipfs.repo.stat().then((res) => { - expectIsRepo(null, res) - }) + it('should get repo stats', async () => { + const res = await ipfs.repo.stat() + expectIsRepo(null, res) }) }) } diff --git a/src/repo/version.js b/src/repo/version.js index 20c012a5b..c9db6c95c 100644 --- a/src/repo/version.js +++ b/src/repo/version.js @@ -28,18 +28,9 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get the repo version', (done) => { - ipfs.repo.version((err, version) => { - expect(err).to.not.exist() - expect(version).to.exist() - done() - }) - }) - - it('should get the repo version (promised)', () => { - return ipfs.repo.version().then((version) => { - expect(version).to.exist() - }) + it('should get the repo version', async () => { + const version = await ipfs.repo.version() + expect(version).to.exist() }) }) } From 143a2c4849c3744c2557e0984163301daf61030a Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:33:57 +0000 Subject: [PATCH 18/45] chore: stats async/await refactor --- src/stats/bitswap.js | 14 +++----------- src/stats/bw-pull-stream.js | 13 ++++--------- src/stats/bw-readable-stream.js | 11 +++++------ src/stats/bw.js | 14 +++----------- src/stats/repo.js | 14 +++----------- 5 files changed, 18 insertions(+), 48 deletions(-) diff --git a/src/stats/bitswap.js b/src/stats/bitswap.js index b0e57f38b..545db48ae 100644 --- a/src/stats/bitswap.js +++ b/src/stats/bitswap.js @@ -29,17 +29,9 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get bitswap stats', (done) => { - ipfs.stats.bitswap((err, res) => { - expectIsBitswap(err, res) - done() - }) - }) - - it('should get bitswap stats (promised)', () => { - return ipfs.stats.bitswap().then((res) => { - expectIsBitswap(null, res) - }) + it('should get bitswap stats', async () => { + const res = await ipfs.stats.bitswap() + expectIsBitswap(null, res) }) }) } diff --git a/src/stats/bw-pull-stream.js b/src/stats/bw-pull-stream.js index 000525c5f..3fd26033d 100644 --- a/src/stats/bw-pull-stream.js +++ b/src/stats/bw-pull-stream.js @@ -2,7 +2,7 @@ 'use strict' const { expectIsBandwidth } = require('./utils') -const pull = require('pull-stream') +const pullToPromise = require('pull-to-promise') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -30,16 +30,11 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get bandwidth stats over pull stream', (done) => { + it('should get bandwidth stats over pull stream', async () => { const stream = ipfs.stats.bwPullStream() - pull( - stream, - pull.collect((err, data) => { - expectIsBandwidth(err, data[0]) - done() - }) - ) + const data = await pullToPromise.any(stream) + expectIsBandwidth(null, data[0]) }) }) } diff --git a/src/stats/bw-readable-stream.js b/src/stats/bw-readable-stream.js index e8bc490b9..b44869506 100644 --- a/src/stats/bw-readable-stream.js +++ b/src/stats/bw-readable-stream.js @@ -3,6 +3,7 @@ const { expectIsBandwidth } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') +const getStream = require('get-stream') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -29,14 +30,12 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get bandwidth stats over readable stream', (done) => { + it('should get bandwidth stats over readable stream', async () => { const stream = ipfs.stats.bwReadableStream() - stream.once('data', (data) => { - expectIsBandwidth(null, data) - stream.destroy() - done() - }) + const [data] = await getStream.array(stream) + + expectIsBandwidth(null, data) }) }) } diff --git a/src/stats/bw.js b/src/stats/bw.js index c851f4f27..34c552673 100644 --- a/src/stats/bw.js +++ b/src/stats/bw.js @@ -29,17 +29,9 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get bandwidth stats', function (done) { - ipfs.stats.bw((err, res) => { - expectIsBandwidth(err, res) - done() - }) - }) - - it('should get bandwidth stats (promised)', () => { - return ipfs.stats.bw().then((res) => { - expectIsBandwidth(null, res) - }) + it('should get bandwidth stats ', async () => { + const res = await ipfs.stats.bw() + expectIsBandwidth(null, res) }) }) } diff --git a/src/stats/repo.js b/src/stats/repo.js index 77b85e3e7..4064b33d4 100644 --- a/src/stats/repo.js +++ b/src/stats/repo.js @@ -29,17 +29,9 @@ module.exports = (createCommon, options) => { after((done) => common.teardown(done)) - it('should get repo stats', (done) => { - ipfs.stats.repo((err, res) => { - expectIsRepo(err, res) - done() - }) - }) - - it('should get repo stats (promised)', () => { - return ipfs.stats.repo().then((res) => { - expectIsRepo(null, res) - }) + it('should get repo stats', async () => { + const res = await ipfs.stats.repo() + expectIsRepo(null, res) }) }) } From 6d4d5bb118ac07bb4eb81bbfd148b0c115dd265f Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:34:15 +0000 Subject: [PATCH 19/45] chore: swarm async/await refactor --- src/swarm/addrs.js | 19 +++------ src/swarm/connect.js | 16 +++++--- src/swarm/disconnect.js | 16 +++++--- src/swarm/local-addrs.js | 15 ++----- src/swarm/peers.js | 89 +++++++++++++++------------------------- 5 files changed, 61 insertions(+), 94 deletions(-) diff --git a/src/swarm/addrs.js b/src/swarm/addrs.js index 59ec826db..03196577a 100644 --- a/src/swarm/addrs.js +++ b/src/swarm/addrs.js @@ -23,20 +23,11 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) - it('should get a list of node addresses', (done) => { - ipfsA.swarm.addrs((err, peerInfos) => { - expect(err).to.not.exist() - expect(peerInfos).to.not.be.empty() - expect(peerInfos).to.be.an('array') - peerInfos.forEach(m => expect(PeerInfo.isPeerInfo(m)).to.be.true()) - done() - }) - }) - - it('should get a list of node addresses (promised)', () => { - return ipfsA.swarm.addrs().then((peerInfos) => { - expect(peerInfos).to.have.length.above(0) - }) + it('should get a list of node addresses', async () => { + const peerInfos = await ipfsA.swarm.addrs() + expect(peerInfos).to.not.be.empty() + expect(peerInfos).to.be.an('array') + peerInfos.forEach(m => expect(PeerInfo.isPeerInfo(m)).to.be.true()) }) }) } diff --git a/src/swarm/connect.js b/src/swarm/connect.js index 2e0a498df..9de402652 100644 --- a/src/swarm/connect.js +++ b/src/swarm/connect.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { getDescribe, getIt } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -20,12 +20,16 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) - it('should connect to a peer', (done) => { - ipfsA.swarm.connect(ipfsB.peerId.addresses[0], done) - }) + it('should connect to a peer', async () => { + let peers + + peers = await ipfsA.swarm.peers() + expect(peers).to.have.length(0) + + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) - it('should connect to a peer (promised)', () => { - return ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + peers = await ipfsA.swarm.peers() + expect(peers).to.have.length.above(0) }) }) } diff --git a/src/swarm/disconnect.js b/src/swarm/disconnect.js index fa4edab94..51551001e 100644 --- a/src/swarm/disconnect.js +++ b/src/swarm/disconnect.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { getDescribe, getIt } = require('../utils/mocha') +const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -22,12 +22,16 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) - it('should disconnect from a peer', (done) => { - ipfsA.swarm.disconnect(ipfsB.peerId.addresses[0], done) - }) + it('should disconnect from a peer', async () => { + let peers + + peers = await ipfsA.swarm.peers() + expect(peers).to.have.length.above(0) + + await ipfsA.swarm.disconnect(ipfsB.peerId.addresses[0]) - it('should disconnect from a peer (promised)', () => { - return ipfsA.swarm.disconnect(ipfsB.peerId.addresses[0]) + peers = await ipfsA.swarm.peers() + expect(peers).to.have.length(0) }) }) } diff --git a/src/swarm/local-addrs.js b/src/swarm/local-addrs.js index e9506c002..0c534e6e7 100644 --- a/src/swarm/local-addrs.js +++ b/src/swarm/local-addrs.js @@ -19,18 +19,9 @@ module.exports = (createCommon, options) => { after(() => common.teardown()) - it('should list local addresses the node is listening on', (done) => { - ipfs.swarm.localAddrs((err, multiaddrs) => { - expect(err).to.not.exist() - expect(multiaddrs).to.have.length.above(0) - done() - }) - }) - - it('should list local addresses the node is listening on (promised)', () => { - return ipfs.swarm.localAddrs().then((multiaddrs) => { - expect(multiaddrs).to.have.length.above(0) - }) + it('should list local addresses the node is listening on', async () => { + const multiaddrs = await ipfs.swarm.localAddrs() + expect(multiaddrs).to.have.length.above(0) }) }) } diff --git a/src/swarm/peers.js b/src/swarm/peers.js index 576dd87ff..f37471c9d 100644 --- a/src/swarm/peers.js +++ b/src/swarm/peers.js @@ -21,67 +21,44 @@ module.exports = (createCommon, options) => { ipfsA = await common.setup() ipfsB = await common.setup() await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) + await delay(60 * 1000) // wait for open streams in the connection available }) after(() => common.teardown()) - it('should list peers this node is connected to', (done) => { - ipfsA.swarm.peers((err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length.above(0) + it('should list peers this node is connected to', async () => { + const peers = await ipfsA.swarm.peers() + expect(peers).to.have.length.above(0) - const peer = peers[0] + const peer = peers[0] - expect(peer).to.have.a.property('addr') - expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) - expect(peer).to.have.a.property('peer') - expect(PeerId.isPeerId(peer.peer)).to.equal(true) - expect(peer).to.not.have.a.property('latency') + expect(peer).to.have.a.property('addr') + expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) + expect(peer).to.have.a.property('peer') + expect(PeerId.isPeerId(peer.peer)).to.equal(true) + expect(peer).to.not.have.a.property('latency') - // only available in 0.4.5 - // expect(peer).to.have.a.property('muxer') - // expect(peer).to.not.have.a.property('streams') - - done() - }) + /* TODO: These assertions must be uncommented as soon as + https://github.com/ipfs/js-ipfs/issues/2601 gets resolved */ + // expect(peer).to.have.a.property('muxer') + // expect(peer).to.not.have.a.property('streams') }) - it('should list peers this node is connected to (promised)', () => { - return ipfsA.swarm.peers().then((peers) => { - expect(peers).to.have.length.above(0) - - const peer = peers[0] - - expect(peer).to.have.a.property('addr') - expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) - expect(peer).to.have.a.property('peer') - expect(PeerId.isPeerId(peer.peer)).to.equal(true) - expect(peer).to.not.have.a.property('latency') - - // only available in 0.4.5 - // expect(peer).to.have.a.property('muxer') - // expect(peer).to.not.have.a.property('streams') - }) - }) - - it('should list peers this node is connected to with verbose option', (done) => { - ipfsA.swarm.peers({ verbose: true }, (err, peers) => { - expect(err).to.not.exist() - expect(peers).to.have.length.above(0) - - const peer = peers[0] - expect(peer).to.have.a.property('addr') - expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) - expect(peer).to.have.a.property('peer') - expect(peer).to.have.a.property('latency') - expect(peer.latency).to.match(/n\/a|[0-9]+m?s/) // n/a or 3ms or 3s - - // Only available in 0.4.5 - // expect(peer).to.have.a.property('muxer') - // expect(peer).to.have.a.property('streams') - - done() - }) + it('should list peers this node is connected to with verbose option', async () => { + const peers = await ipfsA.swarm.peers({ verbose: true }) + expect(peers).to.have.length.above(0) + + const peer = peers[0] + expect(peer).to.have.a.property('addr') + expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true) + expect(peer).to.have.a.property('peer') + expect(peer).to.have.a.property('latency') + expect(peer.latency).to.match(/n\/a|[0-9]+[mµ]?s/) // n/a or 3ms or 3µs or 3s + + /* TODO: These assertions must be uncommented as soon as + https://github.com/ipfs/js-ipfs/issues/2601 gets resolved */ + // expect(peer).to.have.a.property('muxer') + // expect(peer).to.have.a.property('streams') }) function getConfig (addrs) { @@ -105,8 +82,8 @@ module.exports = (createCommon, options) => { it('should list peers only once', async () => { const config = getConfig(['/ip4/127.0.0.1/tcp/0']) - const nodeA = await common.setup({}, { config }) - const nodeB = await common.setup({}, { config }) + const nodeA = await common.setup({ spawnOptions: { config } }) + const nodeB = await common.setup({ spawnOptions: { config } }) await nodeA.swarm.connect(nodeB.peerId.addresses[0]) await delay(1000) const peersA = await nodeA.swarm.peers() @@ -125,8 +102,8 @@ module.exports = (createCommon, options) => { '/ip4/127.0.0.1/tcp/26545', '/ip4/127.0.0.1/tcp/26546' ]) - const nodeA = await common.setup({}, { configA }) - const nodeB = await common.setup({}, { configB }) + const nodeA = await common.setup({ spawnOptions: { config: configA } }) + const nodeB = await common.setup({ spawnOptions: { config: configB } }) await nodeA.swarm.connect(nodeB.peerId.addresses[0]) await delay(1000) const peersA = await nodeA.swarm.peers() From abe108d18280810c34f11495e9e833af482de524 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:34:54 +0000 Subject: [PATCH 20/45] chore: remove unnecessary util file --- src/utils/delay.js | 20 -------------------- 1 file changed, 20 deletions(-) delete mode 100644 src/utils/delay.js diff --git a/src/utils/delay.js b/src/utils/delay.js deleted file mode 100644 index 0295cb6ce..000000000 --- a/src/utils/delay.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -/** - * Promise version of setTimeout - * @example - * ```js - * async function something() { - * console.log("this might take some time...."); - * await delay(5000); - * console.log("done!") - * } - * - * something(); - * ``` - * @param {number} ms - * @return {Promise} - */ -const delay = ms => new Promise(resolve => setTimeout(resolve, ms)) - -module.exports = delay From e242e0912879ee1e4d0a8a383cc1af898a1ac58c Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Wed, 20 Nov 2019 23:35:33 +0000 Subject: [PATCH 21/45] chore: update dependencies --- package.json | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index e76586007..c473c5816 100644 --- a/package.json +++ b/package.json @@ -47,6 +47,7 @@ "delay": "^4.3.0", "dirty-chai": "^2.0.1", "es6-promisify": "^6.0.1", + "get-stream": "^5.1.0", "hat": "0.0.3", "ipfs-block": "~0.8.0", "ipfs-unixfs": "~0.1.16", @@ -61,9 +62,15 @@ "multibase": "~0.6.0", "multihashes": "~0.4.14", "multihashing-async": "~0.6.0", - "peer-id": "~0.12.0", + "p-each-series": "^2.1.0", + "p-map-series": "^2.1.0", + "p-timeout": "^3.2.0", + "p-times": "^2.1.0", + "p-whilst": "^2.1.0", + "peer-id": "~0.13.5", "peer-info": "~0.15.0", "pull-stream": "^3.6.11", + "pull-to-promise": "^1.0.1", "pump": "^3.0.0", "readable-stream": "^3.1.1", "streaming-iterables": "^4.1.0", From 3fd2a8bdd0527f4fb841e9526fa35432400b5663 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 15:55:32 +0000 Subject: [PATCH 22/45] refactor: remove async dep and unnecessary utils --- package.json | 1 - src/utils/spawn.js | 37 ------------------------------------- src/utils/swarm.js | 20 -------------------- 3 files changed, 58 deletions(-) delete mode 100644 src/utils/spawn.js delete mode 100644 src/utils/swarm.js diff --git a/package.json b/package.json index c473c5816..d1c93f42e 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,6 @@ }, "homepage": "https://github.com/ipfs/interface-ipfs-core#readme", "dependencies": { - "async": "^2.6.2", "bl": "^3.0.0", "bs58": "^4.0.1", "callbackify": "^1.1.0", diff --git a/src/utils/spawn.js b/src/utils/spawn.js deleted file mode 100644 index 792d482bc..000000000 --- a/src/utils/spawn.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict' - -const waterfall = require('async/waterfall') -const timesSeries = require('async/timesSeries') -const map = require('async/map') - -function identify (node, cb) { - node.id((err, id) => { - if (err) return cb(err) - node.peerId = id - cb(null, node) - }) -} - -// Spawn a node, get it's id and set it as `peerId` on the node -function spawnNodeWithId (factory, callback) { - waterfall([(cb) => factory.spawnNode(cb), identify], callback) -} - -exports.spawnNodeWithId = spawnNodeWithId - -// Spawn n nodes -function spawnNodes (n, factory, callback) { - timesSeries(n, (_, cb) => factory.spawnNode(cb), callback) -} - -exports.spawnNodes = spawnNodes - -// Spawn n nodes, getting their id's and setting them as `peerId` on the nodes -function spawnNodesWithId (n, factory, callback) { - spawnNodes(n, factory, (err, nodes) => { - if (err) return callback(err) - map(nodes, identify, callback) - }) -} - -exports.spawnNodesWithId = spawnNodesWithId diff --git a/src/utils/swarm.js b/src/utils/swarm.js deleted file mode 100644 index b36dc7df8..000000000 --- a/src/utils/swarm.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const eachSeries = require('async/eachSeries') - -function connect (fromNode, toAddrs, cb) { - if (!Array.isArray(toAddrs)) { - toAddrs = [toAddrs] - } - - // FIXME ??? quick connections to different nodes sometimes cause no - // connection and no error, hence serialize connections and pause between - eachSeries(toAddrs, (toAddr, cb) => { - fromNode.swarm.connect(toAddr, (err) => { - if (err) return cb(err) - setTimeout(cb, 300) - }) - }, cb) -} - -module.exports.connect = connect From 79e4e6bafda0fc275129eb2aa032e485b0b0697d Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 15:56:35 +0000 Subject: [PATCH 23/45] refactor: bitswap before and after methods to async syntax --- src/bitswap/stat.js | 40 ++++++++----------------------- src/bitswap/utils.js | 4 ++-- src/bitswap/wantlist.js | 52 +++++++++++------------------------------ 3 files changed, 25 insertions(+), 71 deletions(-) diff --git a/src/bitswap/stat.js b/src/bitswap/stat.js index 23daaf218..30b5570f4 100644 --- a/src/bitswap/stat.js +++ b/src/bitswap/stat.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const waterfall = require('async/waterfall') const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsBitswap } = require('../stats/utils') @@ -10,45 +9,26 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.bitswap.stat', () => { + describe('.bitswap.stat', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get bitswap stats', async () => { const res = await ipfs.bitswap.stat() expectIsBitswap(null, res) }) - it('should not get bitswap stats when offline', function (done) { - this.timeout(60 * 1000) - - waterfall([ - (cb) => createCommon().setup(cb), - (factory, cb) => factory.spawnNode(cb), - (node, cb) => node.stop((err) => cb(err, node)) - ], (err, node) => { - expect(err).to.not.exist() - node.bitswap.stat((err) => { - expect(err).to.exist() - done() - }) - }) + it('should not get bitswap stats when offline', async () => { + const node = await createCommon().setup() + await node.stop() + + return expect(node.bitswap.stat()).to.eventually.be.rejected() }) }) } diff --git a/src/bitswap/utils.js b/src/bitswap/utils.js index 0ec5b481b..d2b0cd11e 100644 --- a/src/bitswap/utils.js +++ b/src/bitswap/utils.js @@ -8,7 +8,7 @@ function waitForWantlistKey (ipfs, key, opts = {}) { let list = { Keys: [] } const start = Date.now() - const test = () => !list.Keys.some(k => k['/'] === key) + const findKey = () => !list.Keys.some(k => k['/'] === key) const iteratee = async () => { if (Date.now() - start > opts.timeout) { @@ -18,7 +18,7 @@ function waitForWantlistKey (ipfs, key, opts = {}) { list = await ipfs.bitswap.wantlist(opts.peerId) } - return pWhilst(test, iteratee) + return pWhilst(findKey, iteratee) } module.exports.waitForWantlistKey = waitForWantlistKey diff --git a/src/bitswap/wantlist.js b/src/bitswap/wantlist.js index 13ec39e81..34480a900 100644 --- a/src/bitswap/wantlist.js +++ b/src/bitswap/wantlist.js @@ -2,48 +2,31 @@ /* eslint max-nested-callbacks: ["error", 6] */ 'use strict' -const waterfall = require('async/waterfall') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { waitForWantlistKey } = require('./utils') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.bitswap.wantlist', () => { + describe('.bitswap.wantlist', function () { + this.timeout(60 * 1000) let ipfsA let ipfsB const key = 'QmUBdnXXPyoDFXj3Hj39dNJ5VkN3QFRskXxcGaYFBB8CNR' - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { + ipfsA = await common.setup() + ipfsB = await common.setup() - common.setup((err, factory) => { - expect(err).to.not.exist() + // Add key to the wantlist for ipfsB + ipfsB.block.get(key, () => {}) - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - ipfsA = nodes[0] - ipfsB = nodes[1] - - // Add key to the wantlist for ipfsB - ipfsB.block.get(key, () => {}) - - connect(ipfsA, ipfsB.peerId.addresses[0], done) - }) - }) + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) - after(function (done) { - this.timeout(30 * 1000) - common.teardown(done) - }) + after(() => common.teardown()) it('should get the wantlist', () => { return waitForWantlistKey(ipfsB, key) @@ -53,20 +36,11 @@ module.exports = (createCommon, options) => { return waitForWantlistKey(ipfsA, key, { peerId: ipfsB.peerId.id }) }) - it('should not get the wantlist when offline', function (done) { - this.timeout(60 * 1000) + it('should not get the wantlist when offline', async () => { + const node = await createCommon().setup() + await node.stop() - waterfall([ - (cb) => createCommon().setup(cb), - (factory, cb) => factory.spawnNode(cb), - (node, cb) => node.stop((err) => cb(err, node)) - ], (err, node) => { - expect(err).to.not.exist() - node.bitswap.wantlist((err) => { - expect(err).to.exist() - done() - }) - }) + return expect(node.bitswap.stat()).to.eventually.be.rejected() }) }) } From 571b35b03c20526fccdc599263eb1f5dd88e105b Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 15:57:17 +0000 Subject: [PATCH 24/45] refactor: block before and after methods to async syntax --- src/block/get.js | 23 ++++++----------------- src/block/put.js | 23 +++++++---------------- src/block/rm.js | 18 ++++-------------- src/block/stat.js | 25 +++++++------------------ 4 files changed, 24 insertions(+), 65 deletions(-) diff --git a/src/block/get.js b/src/block/get.js index 885518d07..38e2a659a 100644 --- a/src/block/get.js +++ b/src/block/get.js @@ -3,7 +3,6 @@ const multihash = require('multihashes') const CID = require('cids') -const auto = require('async/auto') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -12,27 +11,17 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.block.get', function () { + this.timeout(60 * 1000) const data = Buffer.from('blorb') let ipfs, hash - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - auto({ - factory: (cb) => common.setup(cb), - ipfs: ['factory', (res, cb) => res.factory.spawnNode(cb)], - block: ['ipfs', (res, cb) => res.ipfs.block.put(data, cb)] - }, (err, res) => { - if (err) return done(err) - ipfs = res.ipfs - hash = res.block.cid.multihash - done() - }) + before(async () => { + ipfs = await common.setup() + const block = await ipfs.block.put(data) + hash = block.cid.multihash }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get by CID object', async () => { const cid = new CID(hash) diff --git a/src/block/put.js b/src/block/put.js index 9c46dd282..1df2edc99 100644 --- a/src/block/put.js +++ b/src/block/put.js @@ -11,25 +11,15 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.block.put', () => { + describe('.block.put', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should put a buffer, using defaults', async () => { const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ' @@ -81,7 +71,8 @@ module.exports = (createCommon, options) => { it('should error with array of blocks', () => { const blob = Buffer.from('blorb') - return expect(ipfs.block.put([blob, blob])).to.eventually.be.rejected.and.be.an.instanceOf(Error) + return expect(ipfs.block.put([blob, blob])).to.eventually.be.rejected + .and.be.an.instanceOf(Error) }) }) } diff --git a/src/block/rm.js b/src/block/rm.js index 0cca80366..8f99fa23a 100644 --- a/src/block/rm.js +++ b/src/block/rm.js @@ -10,24 +10,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.block.rm', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should remove by CID object', async () => { const cid = await ipfs.dag.put(Buffer.from(hat()), { diff --git a/src/block/stat.js b/src/block/stat.js index 7143036bc..0d1a5976e 100644 --- a/src/block/stat.js +++ b/src/block/stat.js @@ -2,7 +2,6 @@ 'use strict' const CID = require('cids') -const auto = require('async/auto') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -10,28 +9,18 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.block.stat', () => { + describe('.block.stat', function () { + this.timeout(60 * 1000) const data = Buffer.from('blorb') let ipfs, hash - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - auto({ - factory: (cb) => common.setup(cb), - ipfs: ['factory', (res, cb) => res.factory.spawnNode(cb)], - block: ['ipfs', (res, cb) => res.ipfs.block.put(data, cb)] - }, (err, res) => { - if (err) return done(err) - ipfs = res.ipfs - hash = res.block.cid.multihash - done() - }) + before(async () => { + ipfs = await common.setup() + const block = await ipfs.block.put(data) + hash = block.cid.multihash }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should stat by CID', async () => { const cid = new CID(hash) From 20e842becd1a7c582fbff3f16306e90db525a90c Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 15:57:52 +0000 Subject: [PATCH 25/45] refactor: bootstrap before and after methods to async syntax --- src/bootstrap/add.js | 17 +++-------------- src/bootstrap/list.js | 17 +++-------------- src/bootstrap/rm.js | 17 +++-------------- 3 files changed, 9 insertions(+), 42 deletions(-) diff --git a/src/bootstrap/add.js b/src/bootstrap/add.js index cad035fa5..eb8034663 100644 --- a/src/bootstrap/add.js +++ b/src/bootstrap/add.js @@ -16,22 +16,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should return an error when called with an invalid arg', () => { return expect(ipfs.bootstrap.add(invalidArg)).to.eventually.be.rejected diff --git a/src/bootstrap/list.js b/src/bootstrap/list.js index 376c19dc1..ffb75c23d 100644 --- a/src/bootstrap/list.js +++ b/src/bootstrap/list.js @@ -13,22 +13,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should return a list of peers', async () => { const res = await ipfs.bootstrap.list() diff --git a/src/bootstrap/rm.js b/src/bootstrap/rm.js index 2d5a40aa0..1bdc19c3c 100644 --- a/src/bootstrap/rm.js +++ b/src/bootstrap/rm.js @@ -16,22 +16,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should return an error when called with an invalid arg', () => { return expect(ipfs.bootstrap.rm(invalidArg)).to.eventually.be.rejected From 444c6a585af3d55e780e782139a84760f8aa5c94 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 15:58:36 +0000 Subject: [PATCH 26/45] refactor: config before and after methods to async syntax --- src/config/get.js | 19 ++++--------------- src/config/profiles/apply.js | 17 +++-------------- src/config/profiles/list.js | 19 ++++--------------- src/config/replace.js | 19 ++++--------------- src/config/set.js | 19 ++++--------------- 5 files changed, 19 insertions(+), 74 deletions(-) diff --git a/src/config/get.js b/src/config/get.js index 2fe08aaed..199257e36 100644 --- a/src/config/get.js +++ b/src/config/get.js @@ -10,25 +10,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.config.get', function () { - this.timeout(30 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should retrieve the whole config', async () => { const config = await ipfs.config.get() diff --git a/src/config/profiles/apply.js b/src/config/profiles/apply.js index c042e20a6..7cd96f785 100644 --- a/src/config/profiles/apply.js +++ b/src/config/profiles/apply.js @@ -12,22 +12,11 @@ module.exports = (createCommon, options) => { this.timeout(30 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should apply a config profile', async () => { const diff = await ipfs.config.profiles.apply('lowpower') diff --git a/src/config/profiles/list.js b/src/config/profiles/list.js index 585baecf1..69482b21b 100644 --- a/src/config/profiles/list.js +++ b/src/config/profiles/list.js @@ -9,25 +9,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.config.profiles.list', function () { - this.timeout(30 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should list config profiles', async () => { const profiles = await ipfs.config.profiles.list() diff --git a/src/config/replace.js b/src/config/replace.js index aaa51c54d..bab2133cc 100644 --- a/src/config/replace.js +++ b/src/config/replace.js @@ -9,25 +9,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.config.replace', function () { - this.timeout(30 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) const config = { Fruit: 'Bananas' diff --git a/src/config/set.js b/src/config/set.js index 2c0e54b47..c808c4ec2 100644 --- a/src/config/set.js +++ b/src/config/set.js @@ -9,25 +9,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.config.set', function () { - this.timeout(30 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should set a new key', async () => { await ipfs.config.set('Fruit', 'banana') From ba18c86376a285c0745b5708030646bf8330bed7 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 15:59:11 +0000 Subject: [PATCH 27/45] refactor: dag before and after methods to async syntax --- src/dag/get.js | 22 ++++------------------ src/dag/put.js | 22 ++++------------------ src/dag/tree.js | 22 ++++------------------ 3 files changed, 12 insertions(+), 54 deletions(-) diff --git a/src/dag/get.js b/src/dag/get.js index ae70417c7..ad68e2794 100644 --- a/src/dag/get.js +++ b/src/dag/get.js @@ -7,7 +7,6 @@ const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') const Unixfs = require('ipfs-unixfs') const CID = require('cids') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -15,26 +14,13 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.dag.get', () => { + describe('.dag.get', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) let pbNode let cborNode diff --git a/src/dag/put.js b/src/dag/put.js index 746146a4b..9ef382c98 100644 --- a/src/dag/put.js +++ b/src/dag/put.js @@ -6,7 +6,6 @@ const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') const CID = require('cids') const multihash = require('multihashes') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -14,26 +13,13 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.dag.put', () => { + describe('.dag.put', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) let pbNode let cborNode diff --git a/src/dag/tree.js b/src/dag/tree.js index a74c3a66a..a60746f7e 100644 --- a/src/dag/tree.js +++ b/src/dag/tree.js @@ -5,7 +5,6 @@ const pEachSeries = require('p-each-series') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode const dagCBOR = require('ipld-dag-cbor') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -13,26 +12,13 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.dag.tree', () => { + describe('.dag.tree', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) let nodePb let nodeCbor From bc7dcef48d5541ff51a33e163a1bbc493816be4b Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 15:59:29 +0000 Subject: [PATCH 28/45] refactor: dht before and after methods to async syntax --- src/dht/find-peer.js | 29 +++++------------------------ src/dht/find-provs.js | 34 ++++++++++------------------------ src/dht/get.js | 29 +++++------------------------ src/dht/provide.js | 26 +++++--------------------- src/dht/put.js | 26 ++++++-------------------- src/dht/query.js | 29 +++++------------------------ 6 files changed, 36 insertions(+), 137 deletions(-) diff --git a/src/dht/find-peer.js b/src/dht/find-peer.js index fe56d90b1..898ddf4b7 100644 --- a/src/dht/find-peer.js +++ b/src/dht/find-peer.js @@ -1,9 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -16,30 +14,13 @@ module.exports = (createCommon, options) => { let nodeA let nodeB - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - - connect(nodeB, nodeA.peerId.addresses[0], done) - }) - }) + before(async () => { + nodeA = await common.setup() + nodeB = await common.setup() + await nodeB.swarm.connect(nodeA.peerId.addresses[0]) }) - after(function (done) { - this.timeout(50 * 1000) - - common.teardown(done) - }) + after(() => common.teardown()) it('should find other peers', async () => { const res = await nodeA.dht.findPeer(nodeB.peerId.id) diff --git a/src/dht/find-provs.js b/src/dht/find-provs.js index 4af69f4d5..2a5b5e044 100644 --- a/src/dht/find-provs.js +++ b/src/dht/find-provs.js @@ -2,11 +2,8 @@ 'use strict' const multihashing = require('multihashing-async') -const parallel = require('async/parallel') const CID = require('cids') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') async function fakeCid () { const bytes = Buffer.from(`TEST${Date.now()}`) @@ -26,29 +23,18 @@ module.exports = (createCommon, options) => { let nodeB let nodeC - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(3, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - nodeC = nodes[2] - - parallel([ - (cb) => connect(nodeB, nodeA.peerId.addresses[0], cb), - (cb) => connect(nodeC, nodeB.peerId.addresses[0], cb) - ], done) - }) - }) + before(async () => { + nodeA = await common.setup() + nodeB = await common.setup() + nodeC = await common.setup() + await Promise.all([ + nodeB.swarm.connect(nodeA.peerId.addresses[0]), + nodeC.swarm.connect(nodeB.peerId.addresses[0]) + ]) }) + after(() => common.teardown()) + let providedCid before('add providers for the same cid', async function () { const cids = await Promise.all([ diff --git a/src/dht/get.js b/src/dht/get.js index 7bdb20852..a71e54d22 100644 --- a/src/dht/get.js +++ b/src/dht/get.js @@ -2,9 +2,7 @@ 'use strict' const hat = require('hat') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -17,30 +15,13 @@ module.exports = (createCommon, options) => { let nodeA let nodeB - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - - connect(nodeA, nodeB.peerId.addresses[0], done) - }) - }) + before(async () => { + nodeA = await common.setup() + nodeB = await common.setup() + await nodeA.swarm.connect(nodeB.peerId.addresses[0]) }) - after(function (done) { - this.timeout(50 * 1000) - - common.teardown(done) - }) + after(() => common.teardown()) it('should error when getting a non-existent key from the DHT', () => { return expect(nodeA.dht.get('non-existing', { timeout: 100 })).to.eventually.be.rejected diff --git a/src/dht/provide.js b/src/dht/provide.js index 3b08526ca..456dcbc7d 100644 --- a/src/dht/provide.js +++ b/src/dht/provide.js @@ -2,9 +2,7 @@ 'use strict' const CID = require('cids') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -16,27 +14,13 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - ipfs = nodes[0] - connect(ipfs, nodes[1].peerId.addresses[0], done) - }) - }) + before(async () => { + ipfs = await common.setup() + const nodeB = await common.setup() + await ipfs.swarm.connect(nodeB.peerId.addresses[0]) }) - after(function (done) { - this.timeout(50 * 1000) - - common.teardown(done) - }) + after(() => common.teardown()) it('should provide local CID', async () => { const res = await ipfs.add(Buffer.from('test')) diff --git a/src/dht/put.js b/src/dht/put.js index f6e1510d2..826c9fcce 100644 --- a/src/dht/put.js +++ b/src/dht/put.js @@ -1,9 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodesWithId } = require('../utils/spawn') -const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -16,25 +14,13 @@ module.exports = (createCommon, options) => { let nodeA let nodeB - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - connect(nodeA, nodeB.peerId.addresses[0], done) - }) - }) + before(async () => { + nodeA = await common.setup() + nodeB = await common.setup() + await nodeA.swarm.connect(nodeB.peerId.addresses[0]) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should put a value to the DHT', async () => { const key = Buffer.from('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn') diff --git a/src/dht/query.js b/src/dht/query.js index dffa483cb..2e155911f 100644 --- a/src/dht/query.js +++ b/src/dht/query.js @@ -2,9 +2,7 @@ 'use strict' const pTimeout = require('p-timeout') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -17,30 +15,13 @@ module.exports = (createCommon, options) => { let nodeA let nodeB - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodesWithId(2, factory, (err, nodes) => { - expect(err).to.not.exist() - - nodeA = nodes[0] - nodeB = nodes[1] - - connect(nodeB, nodeA.peerId.addresses[0], done) - }) - }) + before(async () => { + nodeA = await common.setup() + nodeB = await common.setup() + await nodeB.swarm.connect(nodeA.peerId.addresses[0]) }) - after(function (done) { - this.timeout(50 * 1000) - - common.teardown(done) - }) + after(() => common.teardown()) it('should return the other node in the query', async function () { const timeout = 150 * 1000 From 50eabf6437518c5e3d12c7a6a6b0a6380ddef840 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 15:59:54 +0000 Subject: [PATCH 29/45] refactor: files-mfs before and after methods to async syntax --- src/files-mfs/cp.js | 25 ++++++------------------- src/files-mfs/flush.js | 21 ++++----------------- src/files-mfs/ls-pull-stream.js | 23 +++++------------------ src/files-mfs/ls-readable-stream.js | 23 +++++------------------ src/files-mfs/ls.js | 23 +++++------------------ src/files-mfs/mkdir.js | 21 ++++----------------- src/files-mfs/mv.js | 24 +++++------------------- src/files-mfs/read-pull-stream.js | 19 +++---------------- src/files-mfs/read-readable-stream.js | 19 +++---------------- src/files-mfs/read.js | 19 +++---------------- src/files-mfs/rm.js | 23 +++++------------------ src/files-mfs/stat.js | 24 +++++------------------- src/files-mfs/write.js | 17 ++--------------- 13 files changed, 55 insertions(+), 226 deletions(-) diff --git a/src/files-mfs/cp.js b/src/files-mfs/cp.js index d1a2b44fa..addd9af76 100644 --- a/src/files-mfs/cp.js +++ b/src/files-mfs/cp.js @@ -10,27 +10,14 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.files.cp', function () { - this.timeout(40 * 1000) + describe.only('.files.cp', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should copy file, expect error', () => { const testDir = `/test-${hat()}` @@ -41,7 +28,7 @@ module.exports = (createCommon, options) => { it('should copy file, expect no error', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.mkdir(testDir, { parents: true }) await ipfs.files.write(`${testDir}/a`, Buffer.from('TEST'), { create: true }) await ipfs.files.cp(`${testDir}/a`, `${testDir}/b`) }) @@ -55,7 +42,7 @@ module.exports = (createCommon, options) => { it('should copy dir, expect no error', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) await ipfs.files.cp(`${testDir}/lv1/lv2`, `${testDir}/lv1/lv3`) }) diff --git a/src/files-mfs/flush.js b/src/files-mfs/flush.js index 16a9e938c..eb3b66df3 100644 --- a/src/files-mfs/flush.js +++ b/src/files-mfs/flush.js @@ -10,26 +10,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.flush', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not flush not found file/dir, expect error', async () => { const testDir = `/test-${hat()}` @@ -46,7 +33,7 @@ module.exports = (createCommon, options) => { it('should flush specific dir', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.mkdir(testDir, { parents: true }) await ipfs.files.flush(testDir) }) }) diff --git a/src/files-mfs/ls-pull-stream.js b/src/files-mfs/ls-pull-stream.js index b91339d18..6210eaed6 100644 --- a/src/files-mfs/ls-pull-stream.js +++ b/src/files-mfs/ls-pull-stream.js @@ -11,26 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.lsPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` @@ -44,7 +31,7 @@ module.exports = (createCommon, options) => { it('should ls directory', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir)) @@ -58,7 +45,7 @@ module.exports = (createCommon, options) => { it('should ls directory with long option', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir, { long: true })) diff --git a/src/files-mfs/ls-readable-stream.js b/src/files-mfs/ls-readable-stream.js index 311bf7220..e3a11a827 100644 --- a/src/files-mfs/ls-readable-stream.js +++ b/src/files-mfs/ls-readable-stream.js @@ -11,26 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.lsReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` @@ -45,7 +32,7 @@ module.exports = (createCommon, options) => { it('should ls directory', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) const stream = ipfs.files.lsReadableStream(testDir) @@ -61,7 +48,7 @@ module.exports = (createCommon, options) => { it('should ls directory with long option', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) const stream = ipfs.files.lsReadableStream(testDir, { long: true }) diff --git a/src/files-mfs/ls.js b/src/files-mfs/ls.js index 46fe52560..600db8d5a 100644 --- a/src/files-mfs/ls.js +++ b/src/files-mfs/ls.js @@ -11,26 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.ls', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not ls not found file/dir, expect error', () => { const testDir = `/test-${hat()}` @@ -41,7 +28,7 @@ module.exports = (createCommon, options) => { it('should ls directory', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) const info = await ipfs.files.ls(testDir) @@ -55,7 +42,7 @@ module.exports = (createCommon, options) => { it('should ls directory with long option', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) const info = await ipfs.files.ls(testDir, { long: true }) diff --git a/src/files-mfs/mkdir.js b/src/files-mfs/mkdir.js index a6081f439..9cc0dd5da 100644 --- a/src/files-mfs/mkdir.js +++ b/src/files-mfs/mkdir.js @@ -10,26 +10,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.mkdir', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should make directory on root', () => { const testDir = `/test-${hat()}` @@ -40,7 +27,7 @@ module.exports = (createCommon, options) => { it('should make directory and its parents', () => { const testDir = `/test-${hat()}` - return ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) + return ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) }) it('should not make already existent directory', () => { diff --git a/src/files-mfs/mv.js b/src/files-mfs/mv.js index 83835a4f9..7cdbeaf05 100644 --- a/src/files-mfs/mv.js +++ b/src/files-mfs/mv.js @@ -10,30 +10,16 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.mv', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) before(async () => { - await ipfs.files.mkdir('/test/lv1/lv2', { p: true }) + await ipfs.files.mkdir('/test/lv1/lv2', { parents: true }) await ipfs.files.write('/test/a', Buffer.from('Hello, world!'), { create: true }) }) - after(() => common.teardown()) it('should not move not found file/dir, expect error', () => { @@ -45,7 +31,7 @@ module.exports = (createCommon, options) => { it('should move file, expect no error', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) await ipfs.files.mv(`${testDir}/a`, `${testDir}/c`) @@ -54,7 +40,7 @@ module.exports = (createCommon, options) => { it('should move dir, expect no error', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) await ipfs.files.mv('/test/lv1/lv2', '/test/lv1/lv4') }) }) diff --git a/src/files-mfs/read-pull-stream.js b/src/files-mfs/read-pull-stream.js index b4c4b718e..6b70a494f 100644 --- a/src/files-mfs/read-pull-stream.js +++ b/src/files-mfs/read-pull-stream.js @@ -11,26 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.readPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not read not found, expect error', () => { const testDir = `/test-${hat()}` diff --git a/src/files-mfs/read-readable-stream.js b/src/files-mfs/read-readable-stream.js index 6a6491898..c6f286652 100644 --- a/src/files-mfs/read-readable-stream.js +++ b/src/files-mfs/read-readable-stream.js @@ -11,26 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.readReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not read not found, expect error', () => { const testDir = `/test-${hat()}` diff --git a/src/files-mfs/read.js b/src/files-mfs/read.js index 0a306ea7a..6b97d91c5 100644 --- a/src/files-mfs/read.js +++ b/src/files-mfs/read.js @@ -11,26 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.read', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not read not found, expect error', () => { const testDir = `/test-${hat()}` diff --git a/src/files-mfs/rm.js b/src/files-mfs/rm.js index abe456f85..a8412ea98 100644 --- a/src/files-mfs/rm.js +++ b/src/files-mfs/rm.js @@ -10,26 +10,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.rm', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not remove not found file/dir, expect error', () => { const testDir = `/test-${hat()}` @@ -40,7 +27,7 @@ module.exports = (createCommon, options) => { it('should remove file, expect no error', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.mkdir(testDir, { parents: true }) await ipfs.files.write(`${testDir}/c`, Buffer.from('Hello, world!'), { create: true }) await ipfs.files.rm(`${testDir}/c`) @@ -52,7 +39,7 @@ module.exports = (createCommon, options) => { it('should remove dir, expect no error', async () => { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { p: true }) + await ipfs.files.mkdir(`${testDir}/lv1/lv2`, { parents: true }) await ipfs.files.rm(`${testDir}/lv1/lv2`, { recursive: true }) diff --git a/src/files-mfs/stat.js b/src/files-mfs/stat.js index 87713181b..9bc3ccb7b 100644 --- a/src/files-mfs/stat.js +++ b/src/files-mfs/stat.js @@ -11,28 +11,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.files.stat', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - + before(async () => { ipfs = await common.setup() }) before(async () => { await ipfs.add(fixtures.smallFile.data) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not stat not found file/dir, expect error', function () { const testDir = `/test-${hat()}` @@ -43,7 +29,7 @@ module.exports = (createCommon, options) => { it('should stat file', async function () { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.mkdir(testDir, { parents: true }) await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true }) const stat = await ipfs.files.stat(`${testDir}/b`) @@ -63,7 +49,7 @@ module.exports = (createCommon, options) => { it('should stat dir', async function () { const testDir = `/test-${hat()}` - await ipfs.files.mkdir(testDir, { p: true }) + await ipfs.files.mkdir(testDir, { parents: true }) await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true }) const stat = await ipfs.files.stat(testDir) diff --git a/src/files-mfs/write.js b/src/files-mfs/write.js index a8352a025..6fc3e42d1 100644 --- a/src/files-mfs/write.js +++ b/src/files-mfs/write.js @@ -14,22 +14,9 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should not write to non existent file, expect error', function () { const testDir = `/test-${hat()}` From 41727203e316112aa050cb3a9c3184188d43a27a Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:00:10 +0000 Subject: [PATCH 30/45] refactor: files-regular before and after methods to async syntax --- src/files-regular/add-from-fs.js | 19 +++------------- src/files-regular/add-from-stream.js | 19 +++------------- src/files-regular/add-from-url.js | 18 +++------------ src/files-regular/add-pull-stream.js | 19 +++------------- src/files-regular/add-readable-stream.js | 19 +++------------- src/files-regular/add.js | 17 ++------------ src/files-regular/cat-pull-stream.js | 18 ++------------- src/files-regular/cat-readable-stream.js | 24 +++++--------------- src/files-regular/cat.js | 19 +++------------- src/files-regular/get-pull-stream.js | 19 +++------------- src/files-regular/get-readable-stream.js | 22 ++++-------------- src/files-regular/get.js | 29 +++++------------------- src/files-regular/ls-pull-stream.js | 19 ++++------------ src/files-regular/ls-readable-stream.js | 19 ++++------------ src/files-regular/ls.js | 19 ++++------------ src/files-regular/refs-local-tests.js | 19 ++++------------ src/files-regular/refs-tests.js | 19 ++++------------ 17 files changed, 62 insertions(+), 275 deletions(-) diff --git a/src/files-regular/add-from-fs.js b/src/files-regular/add-from-fs.js index b8d27bdf3..5a0765e9b 100644 --- a/src/files-regular/add-from-fs.js +++ b/src/files-regular/add-from-fs.js @@ -13,27 +13,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addFromFs', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) const fixturesPath = path.join(__dirname, '../../test/fixtures') let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should add a directory from the file system', async () => { const filesPath = path.join(fixturesPath, 'test-folder') diff --git a/src/files-regular/add-from-stream.js b/src/files-regular/add-from-stream.js index f7a61676b..34ee1c3c7 100644 --- a/src/files-regular/add-from-stream.js +++ b/src/files-regular/add-from-stream.js @@ -11,26 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addFromStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should add from a stream', async () => { const stream = new Readable({ diff --git a/src/files-regular/add-from-url.js b/src/files-regular/add-from-url.js index 9190c2600..c57fc7e4f 100644 --- a/src/files-regular/add-from-url.js +++ b/src/files-regular/add-from-url.js @@ -11,25 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addFromURL', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should add from a HTTP URL', async () => { const text = `TEST${Date.now()}` diff --git a/src/files-regular/add-pull-stream.js b/src/files-regular/add-pull-stream.js index 351f187bd..9dd753842 100644 --- a/src/files-regular/add-pull-stream.js +++ b/src/files-regular/add-pull-stream.js @@ -12,26 +12,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should add pull stream of valid files and dirs', async function () { const content = (name) => ({ diff --git a/src/files-regular/add-readable-stream.js b/src/files-regular/add-readable-stream.js index 5410572b6..749ac1e7f 100644 --- a/src/files-regular/add-readable-stream.js +++ b/src/files-regular/add-readable-stream.js @@ -11,26 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.addReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) + before(async () => { ipfs = await common.setup() }) - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should add readable stream of valid files and dirs', async function () { const content = (name) => ({ diff --git a/src/files-regular/add.js b/src/files-regular/add.js index 155009140..fdea60e4b 100644 --- a/src/files-regular/add.js +++ b/src/files-regular/add.js @@ -18,22 +18,9 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should add a File', async function () { if (!supportsFileReader) return this.skip('skip in node') diff --git a/src/files-regular/cat-pull-stream.js b/src/files-regular/cat-pull-stream.js index 3be3a5410..28aa765e9 100644 --- a/src/files-regular/cat-pull-stream.js +++ b/src/files-regular/cat-pull-stream.js @@ -11,27 +11,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.catPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) before(() => ipfs.add(fixtures.smallFile.data)) - after(() => common.teardown()) it('should return a Pull Stream for a CID', async () => { diff --git a/src/files-regular/cat-readable-stream.js b/src/files-regular/cat-readable-stream.js index 517405978..16f3e98db 100644 --- a/src/files-regular/cat-readable-stream.js +++ b/src/files-regular/cat-readable-stream.js @@ -11,29 +11,17 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.catReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() + await ipfs.add(fixtures.bigFile.data) + await ipfs.add(fixtures.smallFile.data) }) - before((done) => ipfs.add(fixtures.bigFile.data, done)) - before((done) => ipfs.add(fixtures.smallFile.data, done)) - - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should return a Readable Stream for a CID', async () => { const stream = ipfs.catReadableStream(fixtures.bigFile.cid) diff --git a/src/files-regular/cat.js b/src/files-regular/cat.js index 9723536ef..2681e689a 100644 --- a/src/files-regular/cat.js +++ b/src/files-regular/cat.js @@ -12,26 +12,13 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.cat', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) before(() => Promise.all([ ipfs.add(fixtures.smallFile.data), diff --git a/src/files-regular/get-pull-stream.js b/src/files-regular/get-pull-stream.js index 576e09471..03f9c379b 100644 --- a/src/files-regular/get-pull-stream.js +++ b/src/files-regular/get-pull-stream.js @@ -11,28 +11,15 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.getPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) + before(async () => { ipfs = await common.setup() }) before(() => ipfs.add(fixtures.smallFile.data)) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should return a Pull Stream of Pull Streams', async () => { const stream = ipfs.getPullStream(fixtures.smallFile.cid) diff --git a/src/files-regular/get-readable-stream.js b/src/files-regular/get-readable-stream.js index 9ba872cdd..e82442479 100644 --- a/src/files-regular/get-readable-stream.js +++ b/src/files-regular/get-readable-stream.js @@ -12,28 +12,16 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.getReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() + await ipfs.add(fixtures.smallFile.data) }) - before((done) => ipfs.add(fixtures.smallFile.data, done)) - - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should return a Readable Stream of Readable Streams', async () => { const stream = ipfs.getReadableStream(fixtures.smallFile.cid) diff --git a/src/files-regular/get.js b/src/files-regular/get.js index 46f176502..2bfe5d690 100644 --- a/src/files-regular/get.js +++ b/src/files-regular/get.js @@ -3,7 +3,6 @@ const { fixtures } = require('./utils') const bs58 = require('bs58') -const parallel = require('async/parallel') const CID = require('cids') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -13,33 +12,17 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.get', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - before((done) => { - parallel([ - (cb) => ipfs.add(fixtures.smallFile.data, cb), - (cb) => ipfs.add(fixtures.bigFile.data, cb) - ], done) + before(async () => { + ipfs = await common.setup() + await ipfs.add(fixtures.smallFile.data) + await ipfs.add(fixtures.bigFile.data) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get with a base58 encoded multihash', async () => { const files = await ipfs.get(fixtures.smallFile.cid) diff --git a/src/files-regular/ls-pull-stream.js b/src/files-regular/ls-pull-stream.js index 1e534be7c..963b2da1e 100644 --- a/src/files-regular/ls-pull-stream.js +++ b/src/files-regular/ls-pull-stream.js @@ -11,26 +11,15 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.lsPullStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should pull stream ls with a base58 encoded CID', async function () { const content = (name) => ({ diff --git a/src/files-regular/ls-readable-stream.js b/src/files-regular/ls-readable-stream.js index 55185d537..f3d0cbbca 100644 --- a/src/files-regular/ls-readable-stream.js +++ b/src/files-regular/ls-readable-stream.js @@ -11,26 +11,15 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.lsReadableStream', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should readable stream ls with a base58 encoded CID', async function () { const content = (name) => ({ diff --git a/src/files-regular/ls.js b/src/files-regular/ls.js index c3217e46f..3dc3b73e0 100644 --- a/src/files-regular/ls.js +++ b/src/files-regular/ls.js @@ -14,26 +14,15 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.ls', function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should ls with a base58 encoded CID', async function () { const content = (name) => ({ diff --git a/src/files-regular/refs-local-tests.js b/src/files-regular/refs-local-tests.js index 11f60f7de..78c8e6702 100644 --- a/src/files-regular/refs-local-tests.js +++ b/src/files-regular/refs-local-tests.js @@ -10,26 +10,15 @@ module.exports = (createCommon, suiteName, ipfsRefsLocal, options) => { const common = createCommon() describe(suiteName, function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get local refs', async function () { const content = (name) => ({ diff --git a/src/files-regular/refs-tests.js b/src/files-regular/refs-tests.js index b91761ef9..60bdebc4d 100644 --- a/src/files-regular/refs-tests.js +++ b/src/files-regular/refs-tests.js @@ -13,23 +13,12 @@ module.exports = (createCommon, suiteName, ipfsRefs, options) => { const common = createCommon() describe(suiteName, function () { - this.timeout(40 * 1000) + this.timeout(60 * 1000) let ipfs, pbRootCb, dagRootCid - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) before(async function () { @@ -42,7 +31,7 @@ module.exports = (createCommon, suiteName, ipfsRefs, options) => { dagRootCid = cid }) - after((done) => common.teardown(done)) + after(() => common.teardown()) for (const [name, options] of Object.entries(getRefsTests())) { const { path, params, expected, expectError, expectTimeout } = options From dd446e8994c6abf8334bd63e8627fa6801725e5e Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:00:34 +0000 Subject: [PATCH 31/45] refactor: key before and after methods to async syntax --- src/key/export.js | 20 +++++--------------- src/key/gen.js | 20 +++++--------------- src/key/import.js | 20 +++++--------------- src/key/list.js | 22 +++++----------------- src/key/rename.js | 22 +++++----------------- src/key/rm.js | 22 +++++----------------- 6 files changed, 30 insertions(+), 96 deletions(-) diff --git a/src/key/export.js b/src/key/export.js index 359dbe2db..ddfb783b9 100644 --- a/src/key/export.js +++ b/src/key/export.js @@ -9,25 +9,15 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.export', () => { + describe('.key.export', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should export "self" key', async function () { const pem = await ipfs.key.export('self', hat()) diff --git a/src/key/gen.js b/src/key/gen.js index 111c14216..71d5be51e 100644 --- a/src/key/gen.js +++ b/src/key/gen.js @@ -9,29 +9,19 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.gen', () => { + describe('.key.gen', function () { + this.timeout(60 * 1000) const keyTypes = [ { type: 'rsa', size: 2048 } ] let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) keyTypes.forEach((kt) => { it(`should generate a new ${kt.type} key`, async function () { diff --git a/src/key/import.js b/src/key/import.js index 62f46403d..8669223cd 100644 --- a/src/key/import.js +++ b/src/key/import.js @@ -9,25 +9,15 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.import', () => { + describe('.key.import', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should import an exported key', async () => { const password = hat() diff --git a/src/key/list.js b/src/key/list.js index 3f471c958..383989435 100644 --- a/src/key/list.js +++ b/src/key/list.js @@ -11,29 +11,17 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.list', () => { + describe('.key.list', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should list all the keys', async function () { - this.timeout(60 * 1000) - const keys = await pTimes(3, () => ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }), { concurrency: 1 }) const res = await ipfs.key.list() diff --git a/src/key/rename.js b/src/key/rename.js index 3bb02df1a..a18796059 100644 --- a/src/key/rename.js +++ b/src/key/rename.js @@ -10,29 +10,17 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.rename', () => { + describe('.key.rename', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should rename a key', async function () { - this.timeout(30 * 1000) - const oldName = hat() const newName = hat() diff --git a/src/key/rm.js b/src/key/rm.js index f303c7b69..5140ed901 100644 --- a/src/key/rm.js +++ b/src/key/rm.js @@ -10,29 +10,17 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.key.rm', () => { + describe('.key.rm', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should rm a key', async function () { - this.timeout(30 * 1000) - const key = await ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }) const removeRes = await ipfs.key.rm(key.name) From 1cb16c8b7664c1c437dbba26a89c2bb87e363794 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:01:25 +0000 Subject: [PATCH 32/45] refactor: miscellaneous before and after methods to async syntax --- src/miscellaneous/dns.js | 19 +++--------------- src/miscellaneous/id.js | 15 +++------------ src/miscellaneous/resolve.js | 37 +++++++----------------------------- src/miscellaneous/stop.js | 24 +++++------------------ src/miscellaneous/version.js | 22 +++++---------------- 5 files changed, 23 insertions(+), 94 deletions(-) diff --git a/src/miscellaneous/dns.js b/src/miscellaneous/dns.js index 665d1c321..490c04db0 100644 --- a/src/miscellaneous/dns.js +++ b/src/miscellaneous/dns.js @@ -13,24 +13,11 @@ module.exports = (createCommon, options) => { this.retries(3) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => { - common.teardown(done) - }) + after(() => common.teardown()) it('should non-recursively resolve ipfs.io', async () => { const res = await ipfs.dns('ipfs.io', { recursive: false }) diff --git a/src/miscellaneous/id.js b/src/miscellaneous/id.js index 36ffbfed4..2ba82464d 100644 --- a/src/miscellaneous/id.js +++ b/src/miscellaneous/id.js @@ -12,20 +12,11 @@ module.exports = (createCommon, options) => { this.timeout(60 * 1000) let ipfs - before(function (done) { - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => { - common.teardown(done) - }) + after(() => common.teardown()) it('should get the node ID', async () => { const res = await ipfs.id() diff --git a/src/miscellaneous/resolve.js b/src/miscellaneous/resolve.js index b52f4f7db..699c122b8 100644 --- a/src/miscellaneous/resolve.js +++ b/src/miscellaneous/resolve.js @@ -6,8 +6,6 @@ const isIpfs = require('is-ipfs') const loadFixture = require('aegir/fixtures') const hat = require('hat') const multibase = require('multibase') -const { spawnNodeWithId } = require('../utils/spawn') -const { connect } = require('../utils/swarm') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -18,22 +16,12 @@ module.exports = (createCommon, options) => { describe('.resolve', function () { this.timeout(60 * 1000) let ipfs - let nodeId - - before(function (done) { - common.setup((err, factory) => { - expect(err).to.not.exist() - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - nodeId = node.peerId.id - done() - }) - }) + + before(async () => { + ipfs = await common.setup() }) - after(common.teardown) + after(() => common.teardown()) it('should resolve an IPFS hash', async () => { const content = loadFixture('test/fixtures/testfile.txt', 'interface-ipfs-core') @@ -89,25 +77,14 @@ module.exports = (createCommon, options) => { }) it('should resolve IPNS link recursively', async function () { - this.timeout(20 * 1000) - - // Ensure another node exists for publishing to - await new Promise((resolve, reject) => { - common.setup((err, factory) => { - if (err) return reject(err) - spawnNodeWithId(factory, (err, node) => { - if (err) return reject(err) - const addr = node.peerId.addresses.find((a) => a.includes('127.0.0.1')) - connect(ipfs, addr, resolve) - }) - }) - }) + const node = await common.setup() + await ipfs.swarm.connect(node.peerId.addresses.find((a) => a.includes('127.0.0.1'))) const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive === true')) const { id: keyId } = await ipfs.key.gen('key-name', { type: 'rsa', size: 2048 }) await ipfs.name.publish(path, { allowOffline: true }) - await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name', resolve: false }) + await ipfs.name.publish(`/ipns/${ipfs.peerId.id}`, { allowOffline: true, key: 'key-name', resolve: false }) return expect(await ipfs.resolve(`/ipns/${keyId}`, { recursive: true })) .to.eq(`/ipfs/${path}`) diff --git a/src/miscellaneous/stop.js b/src/miscellaneous/stop.js index 779eae2ba..5adaa4bbc 100644 --- a/src/miscellaneous/stop.js +++ b/src/miscellaneous/stop.js @@ -8,30 +8,16 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.stop', () => { + describe('.stop', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) - }) - - after((done) => { - common.teardown(done) + before(async () => { + ipfs = await common.setup() }) // must be last test to run - it('should stop the node2', async function () { + it('should stop the node', async function () { this.timeout(10 * 1000) await ipfs.stop() diff --git a/src/miscellaneous/version.js b/src/miscellaneous/version.js index f92f519e9..1d8f58eb8 100644 --- a/src/miscellaneous/version.js +++ b/src/miscellaneous/version.js @@ -8,27 +8,15 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.version', () => { + describe('.version', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => { - common.teardown(done) - }) + after(() => common.teardown()) it('should get the node version', async () => { const result = await ipfs.version() From 3f95aaba6e9fb33310dbd5fb668bf7969d1c245b Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:01:49 +0000 Subject: [PATCH 33/45] refactor: name before and after methods to async syntax --- src/name/publish.js | 32 +++++++++----------------------- src/name/resolve.js | 18 ++++-------------- 2 files changed, 13 insertions(+), 37 deletions(-) diff --git a/src/name/publish.js b/src/name/publish.js index b998cb1e6..b21315ea7 100644 --- a/src/name/publish.js +++ b/src/name/publish.js @@ -4,7 +4,6 @@ const hat = require('hat') const { fixture } = require('./utils') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -17,33 +16,20 @@ module.exports = (createCommon, options) => { let ipfs let nodeId - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - nodeId = node.peerId.id - - ipfs.add(fixture.data, { pin: false }, done) - }) - }) + before(async () => { + ipfs = await common.setup() + nodeId = ipfs.peerId.id + await ipfs.add(fixture.data, { pin: false }) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should publish an IPNS record with the default params', async function () { this.timeout(50 * 1000) const value = fixture.cid - const res = await ipfs.name.publish(value, { 'allow-offline': true }) + const res = await ipfs.name.publish(value, { allowOffline: true }) expect(res).to.exist() expect(res.name).to.equal(nodeId) expect(res.value).to.equal(`/ipfs/${value}`) @@ -51,7 +37,7 @@ module.exports = (createCommon, options) => { it('should publish correctly with the lifetime option and resolve', async () => { const [{ path }] = await ipfs.add(Buffer.from('should publish correctly with the lifetime option and resolve')) - await ipfs.name.publish(path, { 'allow-offline': true, resolve: false, lifetime: '2h' }) + await ipfs.name.publish(path, { allowOffline: true, resolve: false, lifetime: '2h' }) return expect(await ipfs.name.resolve(`/ipns/${nodeId}`)).to.eq(`/ipfs/${path}`) }) @@ -66,7 +52,7 @@ module.exports = (createCommon, options) => { lifetime: '1m', ttl: '10s', key: 'self', - 'allow-offline': true + allowOffline: true } const res = await ipfs.name.publish(value, options) @@ -84,7 +70,7 @@ module.exports = (createCommon, options) => { lifetime: '24h', ttl: '10s', key: keyName, - 'allow-offline': true + allowOffline: true } const key = await ipfs.key.gen(keyName, { type: 'rsa', size: 2048 }) diff --git a/src/name/resolve.js b/src/name/resolve.js index 99060b9fd..d3e1be7ce 100644 --- a/src/name/resolve.js +++ b/src/name/resolve.js @@ -2,7 +2,6 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const delay = require('delay') const CID = require('cids') @@ -16,21 +15,12 @@ module.exports = (createCommon, options) => { let ipfs let nodeId - before(function (done) { - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - nodeId = node.peerId.id - done() - }) - }) + before(async () => { + ipfs = await common.setup() + nodeId = ipfs.peerId.id }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should resolve a record default options', async function () { this.timeout(20 * 1000) From 03b5d6f224366dd468816fa8419fd2d0325202d8 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:02:09 +0000 Subject: [PATCH 34/45] refactor: name-pubsub before and after methods to async syntax --- src/name-pubsub/cancel.js | 23 ++++------------------- src/name-pubsub/state.js | 21 ++++----------------- src/name-pubsub/subs.js | 20 +++----------------- 3 files changed, 11 insertions(+), 53 deletions(-) diff --git a/src/name-pubsub/cancel.js b/src/name-pubsub/cancel.js index 9f4fe529a..cf2a30d0e 100644 --- a/src/name-pubsub/cancel.js +++ b/src/name-pubsub/cancel.js @@ -4,7 +4,6 @@ const PeerId = require('peer-id') -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -16,26 +15,12 @@ module.exports = (createCommon, options) => { let ipfs let nodeId - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - nodeId = node.peerId.id - - done() - }) - }) + before(async () => { + ipfs = await common.setup() + nodeId = ipfs.peerId.id }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should return false when the name that is intended to cancel is not subscribed', async function () { this.timeout(60 * 1000) diff --git a/src/name-pubsub/state.js b/src/name-pubsub/state.js index bc6bfb368..df40c7b20 100644 --- a/src/name-pubsub/state.js +++ b/src/name-pubsub/state.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -10,26 +9,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.name.pubsub.state', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get the current state of pubsub', async function () { this.timeout(50 * 1000) diff --git a/src/name-pubsub/subs.js b/src/name-pubsub/subs.js index 5624cd44f..388d650dd 100644 --- a/src/name-pubsub/subs.js +++ b/src/name-pubsub/subs.js @@ -2,7 +2,6 @@ /* eslint-env mocha */ 'use strict' -const { spawnNodeWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') module.exports = (createCommon, options) => { @@ -13,24 +12,11 @@ module.exports = (createCommon, options) => { describe('.name.pubsub.subs', function () { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - - spawnNodeWithId(factory, (err, node) => { - expect(err).to.not.exist() - - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get an empty array as a result of subscriptions before any resolve', async function () { this.timeout(60 * 1000) From 187c89e0cfe2cc497f4b2d10ee37d32ed7fa5af4 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:02:24 +0000 Subject: [PATCH 35/45] refactor: object before and after methods to async syntax --- src/object/data.js | 17 +++-------------- src/object/get.js | 17 +++-------------- src/object/links.js | 17 +++-------------- src/object/new.js | 17 +++-------------- src/object/patch/add-link.js | 17 +++-------------- src/object/patch/append-data.js | 17 +++-------------- src/object/patch/rm-link.js | 17 +++-------------- src/object/patch/set-data.js | 17 +++-------------- src/object/put.js | 17 +++-------------- src/object/stat.js | 17 +++-------------- 10 files changed, 30 insertions(+), 140 deletions(-) diff --git a/src/object/data.js b/src/object/data.js index 5438137dd..1c31b1736 100644 --- a/src/object/data.js +++ b/src/object/data.js @@ -16,22 +16,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get data by multihash', async () => { const testObj = { diff --git a/src/object/get.js b/src/object/get.js index 938a5246d..1ca870d22 100644 --- a/src/object/get.js +++ b/src/object/get.js @@ -19,22 +19,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get object by multihash', async () => { const obj = { diff --git a/src/object/links.js b/src/object/links.js index e5c9ae6fc..10400d911 100644 --- a/src/object/links.js +++ b/src/object/links.js @@ -19,22 +19,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get empty links by multihash', async () => { const testObj = { diff --git a/src/object/new.js b/src/object/new.js index e2756caa3..b4732932c 100644 --- a/src/object/new.js +++ b/src/object/new.js @@ -13,22 +13,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should create a new object with no template', async () => { const cid = await ipfs.object.new() diff --git a/src/object/patch/add-link.js b/src/object/patch/add-link.js index 7c4d55fbf..c221f051d 100644 --- a/src/object/patch/add-link.js +++ b/src/object/patch/add-link.js @@ -16,22 +16,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should add a link to an existing node', async () => { const obj = { diff --git a/src/object/patch/append-data.js b/src/object/patch/append-data.js index 5a8aeb3b8..3a35a958e 100644 --- a/src/object/patch/append-data.js +++ b/src/object/patch/append-data.js @@ -14,22 +14,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should append data to an existing node', async () => { const obj = { diff --git a/src/object/patch/rm-link.js b/src/object/patch/rm-link.js index 1be87b4b1..331226837 100644 --- a/src/object/patch/rm-link.js +++ b/src/object/patch/rm-link.js @@ -14,22 +14,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should remove a link from an existing node', async () => { const obj1 = { diff --git a/src/object/patch/set-data.js b/src/object/patch/set-data.js index 28e138525..b65afab97 100644 --- a/src/object/patch/set-data.js +++ b/src/object/patch/set-data.js @@ -14,22 +14,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should set data for an existing node', async () => { const obj = { diff --git a/src/object/put.js b/src/object/put.js index f3a5362cb..da9e885ee 100644 --- a/src/object/put.js +++ b/src/object/put.js @@ -17,22 +17,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should put an object', async () => { const obj = { diff --git a/src/object/stat.js b/src/object/stat.js index cd139b48e..edac169c7 100644 --- a/src/object/stat.js +++ b/src/object/stat.js @@ -17,22 +17,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get stats by multihash', async () => { const testObj = { From cb07f7349090623c2ce3815cde31ceeaa43e9803 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:02:39 +0000 Subject: [PATCH 36/45] refactor: pin before and after methods to async syntax --- src/pin/add.js | 29 +++++++---------------------- src/pin/ls.js | 47 ++++++++++++++--------------------------------- src/pin/rm.js | 33 ++++++++------------------------- 3 files changed, 29 insertions(+), 80 deletions(-) diff --git a/src/pin/add.js b/src/pin/add.js index 7f71d3f5a..379ff6543 100644 --- a/src/pin/add.js +++ b/src/pin/add.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const each = require('async/each') const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,32 +10,18 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pin.add', function () { - this.timeout(50 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - populate() - }) - }) - - function populate () { - each(fixtures.files, (file, cb) => { - ipfs.add(file.data, { pin: false }, cb) - }, done) - } + before(async () => { + ipfs = await common.setup() + await Promise.all(fixtures.files.map(file => { + return ipfs.add(file.data, { pin: false }) + })) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should add a pin', async () => { const pinset = await ipfs.pin.add(fixtures.files[0].cid, { recursive: false }) diff --git a/src/pin/ls.js b/src/pin/ls.js index 1e3f59acf..1b93d53d9 100644 --- a/src/pin/ls.js +++ b/src/pin/ls.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,43 +10,25 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pin.ls', function () { - this.timeout(50 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - populate() - }) - }) - - function populate () { - series([ - // two files wrapped in directories, only root CID pinned recursively - cb => { - const dir = fixtures.directory.files.map((file) => ({ path: file.path, content: file.data })) - ipfs.add(dir, { pin: false, cidVersion: 0 }, cb) - }, - cb => ipfs.pin.add(fixtures.directory.cid, { recursive: true }, cb), - // a file (CID pinned recursively) - cb => ipfs.add(fixtures.files[0].data, { pin: false, cidVersion: 0 }, cb), - cb => ipfs.pin.add(fixtures.files[0].cid, { recursive: true }, cb), - // a single CID (pinned directly) - cb => ipfs.add(fixtures.files[1].data, { pin: false, cidVersion: 0 }, cb), - cb => ipfs.pin.add(fixtures.files[1].cid, { recursive: false }, cb) - ], done) - } + before(async () => { + ipfs = await common.setup() + // two files wrapped in directories, only root CID pinned recursively + const dir = fixtures.directory.files.map((file) => ({ path: file.path, content: file.data })) + await ipfs.add(dir, { pin: false, cidVersion: 0 }) + await ipfs.pin.add(fixtures.directory.cid, { recursive: true }) + // a file (CID pinned recursively) + await ipfs.add(fixtures.files[0].data, { pin: false, cidVersion: 0 }) + await ipfs.pin.add(fixtures.files[0].cid, { recursive: true }) + // a single CID (pinned directly) + await ipfs.add(fixtures.files[1].data, { pin: false, cidVersion: 0 }) + await ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) // 1st, because ipfs.add pins automatically it('should list all recursive pins', async () => { diff --git a/src/pin/rm.js b/src/pin/rm.js index 94ac6d809..7b8e5dc58 100644 --- a/src/pin/rm.js +++ b/src/pin/rm.js @@ -1,7 +1,6 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const { fixtures } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') @@ -11,35 +10,19 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pin.rm', function () { - this.timeout(50 * 1000) + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - populate() - }) - }) - - function populate () { - series([ - cb => ipfs.add(fixtures.files[0].data, { pin: false }, cb), - cb => ipfs.pin.add(fixtures.files[0].cid, { recursive: true }, cb), - cb => ipfs.add(fixtures.files[1].data, { pin: false }, cb), - cb => ipfs.pin.add(fixtures.files[1].cid, { recursive: false }, cb) - ], done) - } + before(async () => { + ipfs = await common.setup() + await ipfs.add(fixtures.files[0].data, { pin: false }) + await ipfs.pin.add(fixtures.files[0].cid, { recursive: true }) + await ipfs.add(fixtures.files[1].data, { pin: false }) + await ipfs.pin.add(fixtures.files[1].cid, { recursive: false }) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should remove a recursive pin', async () => { const removedPinset = await ipfs.pin.rm(fixtures.files[0].cid, { recursive: true }) From 9953c7fff1e013013a03bc3df6d2d292668e088c Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:02:49 +0000 Subject: [PATCH 37/45] refactor: ping before and after methods to async syntax --- src/ping/ping-pull-stream.js | 28 ++++++---------------------- src/ping/ping-readable-stream.js | 28 ++++++---------------------- src/ping/ping.js | 30 ++++++------------------------ 3 files changed, 18 insertions(+), 68 deletions(-) diff --git a/src/ping/ping-pull-stream.js b/src/ping/ping-pull-stream.js index 41b0e8496..e9e65add2 100644 --- a/src/ping/ping-pull-stream.js +++ b/src/ping/ping-pull-stream.js @@ -2,11 +2,8 @@ 'use strict' const pullToPromise = require('pull-to-promise') -const series = require('async/series') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { isPong } = require('./utils.js') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -14,31 +11,18 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pingPullStream', function () { - // TODO revisit when https://github.com/ipfs/go-ipfs/issues/5799 is resolved - this.timeout(2 * 60 * 1000) + this.timeout(60 * 1000) let ipfsA let ipfsB - before(function (done) { - common.setup((err, factory) => { - if (err) return done(err) - - series([ - (cb) => { - spawnNodesWithId(2, factory, (err, nodes) => { - if (err) return cb(err) - ipfsA = nodes[0] - ipfsB = nodes[1] - cb() - }) - }, - (cb) => connect(ipfsA, ipfsB.peerId.addresses[0], cb) - ], done) - }) + before(async () => { + ipfsA = await common.setup() + ipfsB = await common.setup() + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should send the specified number of packets over pull stream', async () => { const count = 3 diff --git a/src/ping/ping-readable-stream.js b/src/ping/ping-readable-stream.js index 4deb6ae6d..3c69fc367 100644 --- a/src/ping/ping-readable-stream.js +++ b/src/ping/ping-readable-stream.js @@ -3,11 +3,8 @@ const pump = require('pump') const { Writable } = require('stream') -const series = require('async/series') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { isPong } = require('./utils.js') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -15,31 +12,18 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.pingReadableStream', function () { - // TODO revisit when https://github.com/ipfs/go-ipfs/issues/5799 is resolved - this.timeout(2 * 60 * 1000) + this.timeout(60 * 1000) let ipfsA let ipfsB - before(function (done) { - common.setup((err, factory) => { - if (err) return done(err) - - series([ - (cb) => { - spawnNodesWithId(2, factory, (err, nodes) => { - if (err) return cb(err) - ipfsA = nodes[0] - ipfsB = nodes[1] - cb() - }) - }, - (cb) => connect(ipfsA, ipfsB.peerId.addresses[0], cb) - ], done) - }) + before(async () => { + ipfsA = await common.setup() + ipfsB = await common.setup() + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should send the specified number of packets over readable stream', () => { let packetNum = 0 diff --git a/src/ping/ping.js b/src/ping/ping.js index f4632f718..95b914826 100644 --- a/src/ping/ping.js +++ b/src/ping/ping.js @@ -1,11 +1,8 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') -const { spawnNodesWithId } = require('../utils/spawn') const { getDescribe, getIt, expect } = require('../utils/mocha') const { expectIsPingResponse, isPong } = require('./utils') -const { connect } = require('../utils/swarm') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -13,33 +10,18 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.ping', function () { - // TODO revisit when https://github.com/ipfs/go-ipfs/issues/5799 is resolved - this.timeout(2 * 60 * 1000) + this.timeout(60 * 1000) let ipfsA let ipfsB - before(function (done) { - this.timeout(60 * 1000) - - common.setup((err, factory) => { - if (err) return done(err) - - series([ - (cb) => { - spawnNodesWithId(2, factory, (err, nodes) => { - if (err) return cb(err) - ipfsA = nodes[0] - ipfsB = nodes[1] - cb() - }) - }, - (cb) => connect(ipfsA, ipfsB.peerId.addresses[0], cb) - ], done) - }) + before(async () => { + ipfsA = await common.setup() + ipfsB = await common.setup() + await ipfsA.swarm.connect(ipfsB.peerId.addresses[0]) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should send the specified number of packets', async () => { const count = 3 From 4479df9b2e295a8fd8ec54eec8d39058794fbb39 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:03:06 +0000 Subject: [PATCH 38/45] refactor: pubsub before and after methods to async syntax --- src/pubsub/ls.js | 17 +++-------------- src/pubsub/peers.js | 40 ++++++++++----------------------------- src/pubsub/publish.js | 19 ++++--------------- src/pubsub/subscribe.js | 23 ++++------------------ src/pubsub/unsubscribe.js | 17 +++-------------- 5 files changed, 24 insertions(+), 92 deletions(-) diff --git a/src/pubsub/ls.js b/src/pubsub/ls.js index d052b395b..126be18ba 100644 --- a/src/pubsub/ls.js +++ b/src/pubsub/ls.js @@ -16,19 +16,8 @@ module.exports = (createCommon, options) => { let ipfs let subscribedTopics = [] - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) afterEach(async () => { @@ -39,7 +28,7 @@ module.exports = (createCommon, options) => { await delay(100) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should return an empty list when no topics are subscribed', async () => { const topics = await ipfs.pubsub.ls() diff --git a/src/pubsub/peers.js b/src/pubsub/peers.js index 2e40e162f..01084d2d5 100644 --- a/src/pubsub/peers.js +++ b/src/pubsub/peers.js @@ -1,11 +1,8 @@ /* eslint-env mocha */ 'use strict' -const parallel = require('async/parallel') -const { spawnNodesWithId } = require('../utils/spawn') const { waitForPeers, getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') -const { connect } = require('../utils/swarm') const delay = require('delay') module.exports = (createCommon, options) => { @@ -21,24 +18,17 @@ module.exports = (createCommon, options) => { let ipfs3 let subscribedTopics = [] - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(100 * 1000) + before(async () => { + ipfs1 = await common.setup() + ipfs2 = await common.setup() + ipfs3 = await common.setup() - common.setup((err, factory) => { - if (err) return done(err) - - spawnNodesWithId(3, factory, (err, nodes) => { - if (err) return done(err) - - ipfs1 = nodes[0] - ipfs2 = nodes[1] - ipfs3 = nodes[2] + const ipfs2Addr = ipfs2.peerId.addresses.find((a) => a.includes('127.0.0.1')) + const ipfs3Addr = ipfs3.peerId.addresses.find((a) => a.includes('127.0.0.1')) - done() - }) - }) + await ipfs1.swarm.connect(ipfs2Addr) + await ipfs1.swarm.connect(ipfs3Addr) + await ipfs2.swarm.connect(ipfs3Addr) }) afterEach(async () => { @@ -51,17 +41,7 @@ module.exports = (createCommon, options) => { await delay(100) }) - after((done) => common.teardown(done)) - - before((done) => { - const ipfs2Addr = ipfs2.peerId.addresses.find((a) => a.includes('127.0.0.1')) - const ipfs3Addr = ipfs3.peerId.addresses.find((a) => a.includes('127.0.0.1')) - - parallel([ - (cb) => connect(ipfs1, [ipfs2Addr, ipfs3Addr], cb), - (cb) => connect(ipfs2, ipfs3Addr, cb) - ], done) - }) + after(() => common.teardown()) it('should not error when not subscribed to a topic', async () => { const topic = getTopic() diff --git a/src/pubsub/publish.js b/src/pubsub/publish.js index 8c660c675..6eba0061f 100644 --- a/src/pubsub/publish.js +++ b/src/pubsub/publish.js @@ -3,7 +3,7 @@ const hat = require('hat') const { getTopic } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) @@ -15,22 +15,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should publish message from string', () => { const topic = getTopic() diff --git a/src/pubsub/subscribe.js b/src/pubsub/subscribe.js index ab6761e1f..be5093fc0 100644 --- a/src/pubsub/subscribe.js +++ b/src/pubsub/subscribe.js @@ -4,7 +4,6 @@ const pushable = require('it-pushable') const { collect } = require('streaming-iterables') -const { spawnNodesWithId } = require('../utils/spawn') const { waitForPeers, getTopic } = require('./utils') const { getDescribe, getIt, expect } = require('../utils/mocha') const delay = require('delay') @@ -22,23 +21,9 @@ module.exports = (createCommon, options) => { let topic let subscribedTopics = [] - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(100 * 1000) - - common.setup((err, factory) => { - if (err) return done(err) - - spawnNodesWithId(2, factory, (err, nodes) => { - if (err) return done(err) - - ipfs1 = nodes[0] - ipfs2 = nodes[1] - - done() - }) - }) + before(async () => { + ipfs1 = await common.setup() + ipfs2 = await common.setup() }) beforeEach(() => { @@ -56,7 +41,7 @@ module.exports = (createCommon, options) => { await delay(100) }) - after((done) => common.teardown(done)) + after(() => common.teardown()) describe('single node', () => { it('should subscribe to one topic', async () => { diff --git a/src/pubsub/unsubscribe.js b/src/pubsub/unsubscribe.js index 9a7f0efd4..dd0be0f9a 100644 --- a/src/pubsub/unsubscribe.js +++ b/src/pubsub/unsubscribe.js @@ -16,22 +16,11 @@ module.exports = (createCommon, options) => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) // Browser/worker has max ~5 open HTTP requests to the same origin const count = isBrowser || isWebWorker || isElectronRenderer ? 5 : 10 From 25a99bfcc92b4ee2bdf07127271e160c65621f2c Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:03:22 +0000 Subject: [PATCH 39/45] refactor: repo before and after methods to async syntax --- src/repo/gc.js | 20 +++++--------------- src/repo/stat.js | 22 ++++++---------------- src/repo/version.js | 20 +++++--------------- 3 files changed, 16 insertions(+), 46 deletions(-) diff --git a/src/repo/gc.js b/src/repo/gc.js index 52c66f9d5..11fded58a 100644 --- a/src/repo/gc.js +++ b/src/repo/gc.js @@ -9,25 +9,15 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.repo.gc', () => { + describe('.repo.gc', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should run garbage collection', async () => { const res = await ipfs.add(Buffer.from('apples')) diff --git a/src/repo/stat.js b/src/repo/stat.js index f20d006ac..662c24035 100644 --- a/src/repo/stat.js +++ b/src/repo/stat.js @@ -2,32 +2,22 @@ 'use strict' const { expectIsRepo } = require('../stats/utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.repo.stat', () => { + describe('.repo.stat', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get repo stats', async () => { const res = await ipfs.repo.stat() diff --git a/src/repo/version.js b/src/repo/version.js index c9db6c95c..99c084326 100644 --- a/src/repo/version.js +++ b/src/repo/version.js @@ -8,25 +8,15 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.repo.version', () => { + describe('.repo.version', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get the repo version', async () => { const version = await ipfs.repo.version() From f00b03d3da09c347605b30d9b0a97b51ea77f304 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:03:42 +0000 Subject: [PATCH 40/45] refactor: stats before and after methods to async syntax --- src/stats/bitswap.js | 19 ++++--------------- src/stats/bw-pull-stream.js | 22 ++++++---------------- src/stats/bw-readable-stream.js | 22 ++++++---------------- src/stats/bw.js | 22 ++++++---------------- src/stats/repo.js | 22 ++++++---------------- 5 files changed, 28 insertions(+), 79 deletions(-) diff --git a/src/stats/bitswap.js b/src/stats/bitswap.js index 545db48ae..37a41828c 100644 --- a/src/stats/bitswap.js +++ b/src/stats/bitswap.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') const { expectIsBitswap } = require('./utils') module.exports = (createCommon, options) => { @@ -12,22 +12,11 @@ module.exports = (createCommon, options) => { describe('.stats.bitswap', () => { let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get bitswap stats', async () => { const res = await ipfs.stats.bitswap() diff --git a/src/stats/bw-pull-stream.js b/src/stats/bw-pull-stream.js index 3fd26033d..6d3c39331 100644 --- a/src/stats/bw-pull-stream.js +++ b/src/stats/bw-pull-stream.js @@ -3,32 +3,22 @@ const { expectIsBandwidth } = require('./utils') const pullToPromise = require('pull-to-promise') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.stats.bwPullStream', () => { + describe('.stats.bwPullStream', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get bandwidth stats over pull stream', async () => { const stream = ipfs.stats.bwPullStream() diff --git a/src/stats/bw-readable-stream.js b/src/stats/bw-readable-stream.js index b44869506..6ab0c711e 100644 --- a/src/stats/bw-readable-stream.js +++ b/src/stats/bw-readable-stream.js @@ -2,7 +2,7 @@ 'use strict' const { expectIsBandwidth } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') const getStream = require('get-stream') module.exports = (createCommon, options) => { @@ -10,25 +10,15 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe('.stats.bwReadableStream', () => { + describe('.stats.bwReadableStream', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get bandwidth stats over readable stream', async () => { const stream = ipfs.stats.bwReadableStream() diff --git a/src/stats/bw.js b/src/stats/bw.js index 34c552673..13937c77d 100644 --- a/src/stats/bw.js +++ b/src/stats/bw.js @@ -2,32 +2,22 @@ 'use strict' const { expectIsBandwidth } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.stats.bw', () => { + describe('.stats.bw', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get bandwidth stats ', async () => { const res = await ipfs.stats.bw() diff --git a/src/stats/repo.js b/src/stats/repo.js index 4064b33d4..792449307 100644 --- a/src/stats/repo.js +++ b/src/stats/repo.js @@ -2,32 +2,22 @@ 'use strict' const { expectIsRepo } = require('./utils') -const { getDescribe, getIt, expect } = require('../utils/mocha') +const { getDescribe, getIt } = require('../utils/mocha') module.exports = (createCommon, options) => { const describe = getDescribe(options) const it = getIt(options) const common = createCommon() - describe('.stats.repo', () => { + describe('.stats.repo', function () { + this.timeout(60 * 1000) let ipfs - before(function (done) { - // CI takes longer to instantiate the daemon, so we need to increase the - // timeout for the before step - this.timeout(60 * 1000) - - common.setup((err, factory) => { - expect(err).to.not.exist() - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should get repo stats', async () => { const res = await ipfs.stats.repo() From 514ffed68318141d80a736bbd8d0994c94d1790f Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 16:26:55 +0000 Subject: [PATCH 41/45] chore: remove 'only' from files-mfs/cp --- src/files-mfs/cp.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/files-mfs/cp.js b/src/files-mfs/cp.js index addd9af76..966a8c302 100644 --- a/src/files-mfs/cp.js +++ b/src/files-mfs/cp.js @@ -10,7 +10,7 @@ module.exports = (createCommon, options) => { const it = getIt(options) const common = createCommon() - describe.only('.files.cp', function () { + describe('.files.cp', function () { this.timeout(60 * 1000) let ipfs From a0c1e6a27e77a7e77d680eb32195cf695d321c08 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 17:03:52 +0000 Subject: [PATCH 42/45] chore: uncomment bootstrap rm test assertion The assertion was commented out temporarly due to bootstrap.rm bug that was fixed here https://github.com/ipfs/js-ipfs/pull/2626 --- src/bootstrap/rm.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/bootstrap/rm.js b/src/bootstrap/rm.js index 1bdc19c3c..172cf014d 100644 --- a/src/bootstrap/rm.js +++ b/src/bootstrap/rm.js @@ -52,8 +52,7 @@ module.exports = (createCommon, options) => { const rmRes = await ipfs.bootstrap.rm(null, { all: true }) const removedPeers = rmRes.Peers - // TODO: fix bootstrap.rm as it's not returning all the nodes when all option is passed - // expect(removedPeers).to.eql(addedPeers) + expect(removedPeers).to.eql(addedPeers) }) }) } From f3a8224cdc942caacb9045ea276bd8442696bc3e Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Fri, 22 Nov 2019 19:05:53 +0000 Subject: [PATCH 43/45] refactor: name resolve test --- src/name/resolve.js | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/src/name/resolve.js b/src/name/resolve.js index d3e1be7ce..1f72a0c05 100644 --- a/src/name/resolve.js +++ b/src/name/resolve.js @@ -132,20 +132,11 @@ module.exports = (createCommon, options) => { let ipfs this.retries(5) - before(function (done) { - common.setup((err, factory) => { - expect(err).to.not.exist() - - factory.spawnNode((err, node) => { - expect(err).to.not.exist() - - ipfs = node - done() - }) - }) + before(async () => { + ipfs = await common.setup() }) - after((done) => common.teardown(done)) + after(() => common.teardown()) it('should resolve /ipns/ipfs.io', async () => { return expect(await ipfs.name.resolve('/ipns/ipfs.io')) From 2720e57d59bfd2de69568864b86aae508aeee63b Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Sun, 24 Nov 2019 13:26:58 +0000 Subject: [PATCH 44/45] fix: increase dht findProvs test timeout --- src/dht/find-provs.js | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/dht/find-provs.js b/src/dht/find-provs.js index 2a5b5e044..4897f4f16 100644 --- a/src/dht/find-provs.js +++ b/src/dht/find-provs.js @@ -19,11 +19,14 @@ module.exports = (createCommon, options) => { const common = createCommon() describe('.dht.findProvs', function () { + this.timeout(80 * 1000) + let nodeA let nodeB let nodeC - before(async () => { + before(async function () { + this.timeout(60 * 1000) nodeA = await common.setup() nodeB = await common.setup() nodeC = await common.setup() @@ -37,6 +40,8 @@ module.exports = (createCommon, options) => { let providedCid before('add providers for the same cid', async function () { + this.timeout(10 * 1000) + const cids = await Promise.all([ nodeB.object.new('unixfs-dir'), nodeC.object.new('unixfs-dir') From 2aaff5e1baf0cf61898a19e6348c3d56902d1d24 Mon Sep 17 00:00:00 2001 From: Pedro Santos Date: Tue, 26 Nov 2019 11:04:44 +0000 Subject: [PATCH 45/45] fix: miscellaneous stop test --- src/miscellaneous/stop.js | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/miscellaneous/stop.js b/src/miscellaneous/stop.js index 5adaa4bbc..d83fdc9e3 100644 --- a/src/miscellaneous/stop.js +++ b/src/miscellaneous/stop.js @@ -10,14 +10,9 @@ module.exports = (createCommon, options) => { describe('.stop', function () { this.timeout(60 * 1000) - let ipfs - before(async () => { - ipfs = await common.setup() - }) - - // must be last test to run it('should stop the node', async function () { + const ipfs = await common.setup() this.timeout(10 * 1000) await ipfs.stop()