diff --git a/src/core/components/pubsub.js b/src/core/components/pubsub.js index fff4435844..8c5916b906 100644 --- a/src/core/components/pubsub.js +++ b/src/core/components/pubsub.js @@ -33,7 +33,11 @@ module.exports = function pubsub (self) { return } - checkOnlineAndEnabled() + try { + checkOnlineAndEnabled() + } catch (err) { + return Promise.reject(err) + } return self.libp2p.pubsub.subscribe(topic, handler, options) }, @@ -50,7 +54,11 @@ module.exports = function pubsub (self) { return } - checkOnlineAndEnabled() + try { + checkOnlineAndEnabled() + } catch (err) { + return Promise.reject(err) + } return self.libp2p.pubsub.unsubscribe(topic, handler) }, diff --git a/src/http/api/resources/files-regular.js b/src/http/api/resources/files-regular.js index ea711cfc18..713867717b 100644 --- a/src/http/api/resources/files-regular.js +++ b/src/http/api/resources/files-regular.js @@ -336,6 +336,10 @@ exports.refs = { const unique = request.query.unique const maxDepth = request.query['max-depth'] + if (maxDepth <= 0) { + return h.response() + } + const source = ipfs.refsPullStream(key, { recursive, format, edges, unique, maxDepth }) return sendRefsReplyStream(request, h, `refs for ${key}`, source) } diff --git a/test/cli/daemon.js b/test/cli/daemon.js index 0bee3e507e..6e2cb1fc46 100644 --- a/test/cli/daemon.js +++ b/test/cli/daemon.js @@ -104,14 +104,13 @@ describe('daemon', () => { } }) - try { - await daemon - throw new Error('Did not kill process') - } catch (err) { - expect(err.killed).to.be.true() - - expect(stdout).to.include('Daemon is ready') - } + await daemon.then( + () => expect.fail('Did not kill process'), + (err) => { + expect(err.killed).to.be.true() + expect(stdout).to.include('Daemon is ready') + } + ) }) it('should allow bind to multiple addresses for API and Gateway', async function () { @@ -142,15 +141,15 @@ describe('daemon', () => { } }) - try { - await daemon - throw new Error('Did not kill process') - } catch (err) { - expect(err.killed).to.be.true() + await daemon.then( + () => expect.fail('Did not kill process'), + (err) => { + expect(err.killed).to.be.true() - apiAddrs.forEach(addr => expect(err.stdout).to.include(`API listening on ${addr.slice(0, -2)}`)) - gatewayAddrs.forEach(addr => expect(err.stdout).to.include(`Gateway (read only) listening on ${addr.slice(0, -2)}`)) - } + apiAddrs.forEach(addr => expect(err.stdout).to.include(`API listening on ${addr.slice(0, -2)}`)) + gatewayAddrs.forEach(addr => expect(err.stdout).to.include(`Gateway (read only) listening on ${addr.slice(0, -2)}`)) + } + ) }) it('should allow no bind addresses for API and Gateway', async function () { @@ -171,15 +170,15 @@ describe('daemon', () => { } }) - try { - await daemon - throw new Error('Did not kill process') - } catch (err) { - expect(err.killed).to.be.true() + await daemon.then( + () => expect.fail('Did not kill process'), + (err) => { + expect(err.killed).to.be.true() - expect(err.stdout).to.not.include('API listening on') - expect(err.stdout).to.not.include('Gateway (read only) listening on') - } + expect(err.stdout).to.not.include('API listening on') + expect(err.stdout).to.not.include('Gateway (read only) listening on') + } + ) }) skipOnWindows('should handle SIGINT gracefully', async function () { @@ -211,32 +210,14 @@ describe('daemon', () => { await ipfs('init') const daemon = ipfs('daemon --silent') - const stop = async (err) => { - daemon.kill() - - if (err) { - throw err - } - try { - await daemon - } catch (err) { - if (!err.killed) { - throw err - } - } - } + setTimeout(() => { + daemon.kill() + }, 5 * 1000) - return new Promise((resolve, reject) => { - daemon.stdout.on('data', (data) => { - reject(new Error('Output was received ' + data.toString('utf8'))) - }) + const output = await daemon - setTimeout(() => { - resolve() - }, 5 * 1000) - }) - .then(stop, stop) + expect(output).to.be.empty() }) it('should present ipfs path help when option help is received', async function () { @@ -262,16 +243,16 @@ describe('daemon', () => { } }) - try { - await daemon - throw new Error('Did not kill process') - } catch (err) { - expect(err.killed).to.be.true() + await daemon.then( + () => expect.fail('Did not kill process'), + (err) => { + expect(err.killed).to.be.true() - expect(err.stdout).to.include(`js-ipfs version: ${pkg.version}`) - expect(err.stdout).to.include(`System version: ${os.arch()}/${os.platform()}`) - expect(err.stdout).to.include(`Node.js version: ${process.versions.node}`) - } + expect(err.stdout).to.include(`js-ipfs version: ${pkg.version}`) + expect(err.stdout).to.include(`System version: ${os.arch()}/${os.platform()}`) + expect(err.stdout).to.include(`Node.js version: ${process.versions.node}`) + } + ) }) it('should init by default', async function () { @@ -290,12 +271,10 @@ describe('daemon', () => { } }) - try { - await daemon - throw new Error('Did not kill process') - } catch (err) { - expect(err.killed).to.be.true() - } + await daemon.then( + () => expect.fail('Did not kill process'), + (err) => expect(err.killed).to.be.true() + ) expect(fs.existsSync(repoPath)).to.be.true() }) diff --git a/test/cli/init.js b/test/cli/init.js index 97bb486fbc..94ed82764b 100644 --- a/test/cli/init.js +++ b/test/cli/init.js @@ -83,11 +83,10 @@ describe('init', function () { it('profile non-existent', async function () { this.timeout(40 * 1000) - try { - await ipfs('init --profile doesnt-exist') - } catch (err) { - expect(err.stdout).includes('Could not find profile') - } + await ipfs('init --profile doesnt-exist').then( + () => expect.fail('Should have thrown'), + (err) => expect(err.stdout).includes('Could not find profile') + ) }) it('should present ipfs path help when option help is received', async function () { diff --git a/test/core/create-node.spec.js b/test/core/create-node.spec.js index 7d77fc7ded..5c3a2c8dfe 100644 --- a/test/core/create-node.spec.js +++ b/test/core/create-node.spec.js @@ -171,20 +171,18 @@ describe('create node', function () { expect(ipfs.isOnline()).to.be.false() - try { - await ipfs.ready - } catch (err) { - expect(ipfs.isOnline()).to.be.false() - - // After the error has occurred, it should still reject - try { - await ipfs.ready - } catch (_) { - return - } - } + await ipfs.ready.then( + () => expect.fail('Should have thrown'), + (err) => expect(err.message).to.contain('Expected modulus bit count >= 512') + ) + + expect(ipfs.isOnline()).to.be.false() - throw new Error('ready promise did not reject') + // After the error has occurred, it should still reject + await ipfs.ready.then( + () => expect.fail('Should have thrown'), + (err) => expect(err.message).to.contain('Expected modulus bit count >= 512') + ) }) it('should create a ready node with IPFS.create', async () => { diff --git a/test/core/dht.spec.js b/test/core/dht.spec.js index d8fde46d18..78ba1e04eb 100644 --- a/test/core/dht.spec.js +++ b/test/core/dht.spec.js @@ -74,16 +74,11 @@ describe.skip('dht', () => { }) describe('put', () => { - it('should callback with error for DHT not available', async () => { - let res - try { - res = await ipfs.dht.put(Buffer.from('a'), Buffer.from('b')) - } catch (err) { - expect(err).to.exist() - expect(err.code).to.equal('ERR_DHT_DISABLED') - } - - expect(res).to.not.exist() + it('should error when DHT not available', async () => { + await ipfs.dht.put(Buffer.from('a'), Buffer.from('b')).then( + () => expect.fail('Should have thrown'), + (err) => expect(err.code).to.equal('ERR_DHT_DISABLED') + ) }) }) }) diff --git a/test/core/gc.spec.js b/test/core/gc.spec.js index 42022f3fed..fef9c208bb 100644 --- a/test/core/gc.spec.js +++ b/test/core/gc.spec.js @@ -7,7 +7,6 @@ const IPFSFactory = require('ipfsd-ctl') const pEvent = require('p-event') const env = require('ipfs-utils/src/env') const IPFS = require('../../src/core') -const { Errors } = require('interface-datastore') // We need to detect when a readLock or writeLock is requested for the tests // so we override the Mutex class to emit an event @@ -189,11 +188,11 @@ describe('gc', function () { await rm1 // Second rm should fail because GC has already removed that block - try { - await rm2 - } catch (err) { - expect(err.code).eql(Errors.dbDeleteFailedError().code) - } + const results = await rm2 + const result = results + .filter(result => result.hash === cid2.toString()) + .pop() + expect(result).to.have.property('error').that.contains('block not found') // Confirm second block has been removed const localRefs = (await ipfs.refs.local()).map(r => r.ref) diff --git a/test/core/libp2p.spec.js b/test/core/libp2p.spec.js index f159db7a02..3b14d3e61e 100644 --- a/test/core/libp2p.spec.js +++ b/test/core/libp2p.spec.js @@ -316,7 +316,7 @@ describe('libp2p customization', function () { }) }) - it('select floodsub as pubsub router if node', (done) => { + it('select floodsub as pubsub router if node', async () => { const ipfs = { _repo: { datastore @@ -334,21 +334,18 @@ describe('libp2p customization', function () { } } - try { - _libp2p = libp2pComponent(ipfs, customConfig) - } catch (err) { - if (!isNode) { - expect(err).to.exist() - expect(err.code).to.eql('ERR_NOT_SUPPORTED') - done() - } + if (!isNode) { + await libp2pComponent(ipfs, customConfig).then( + () => expect.fail('Should have thrown'), + (err) => expect(err.code).to.eql('ERR_NOT_SUPPORTED') + ) } - _libp2p.start((err) => { - expect(err).to.not.exist() - expect(_libp2p._modules.pubsub).to.eql(require('libp2p-floodsub')) - done() - }) + _libp2p = libp2pComponent(ipfs, customConfig) + + await _libp2p.start() + + expect(_libp2p._modules.pubsub).to.eql(require('libp2p-floodsub')) }) }) }) diff --git a/test/core/name-pubsub.js b/test/core/name-pubsub.js index 0e5fdd59ad..7e4b9a26a7 100644 --- a/test/core/name-pubsub.js +++ b/test/core/name-pubsub.js @@ -128,11 +128,10 @@ describe('name-pubsub', function () { const resolvesEmpty = await nodeB.name.resolve(idB.id) expect(resolvesEmpty).to.be.eq(emptyDirCid) - try { - await nodeA.name.resolve(idB.id) - } catch (error) { - expect(error).to.exist() - } + await nodeA.name.resolve(idB.id).then( + () => expect.fail('should have thrown'), + (err) => expect(err.code).to.equal('ERR_NO_RECORD_FOUND') + ) const publish = await nodeB.name.publish(path) expect(publish).to.be.eql({ diff --git a/test/core/pubsub.spec.js b/test/core/pubsub.spec.js index 1fc46623bc..050b33e124 100644 --- a/test/core/pubsub.spec.js +++ b/test/core/pubsub.spec.js @@ -48,14 +48,13 @@ describe('pubsub disabled', () => { }) it('should not allow subscribe if disabled (promised)', async () => { - try { - const topic = hat() - const handler = () => { throw new Error('unexpected message') } - await ipfs.pubsub.subscribe(topic, handler) - } catch (err) { - return expect(err.code).to.equal('ERR_PUBSUB_DISABLED') - } - throw new Error('expected error to be thrown') + const topic = hat() + const handler = () => { throw new Error('unexpected message') } + + await ipfs.pubsub.subscribe(topic, handler).then( + () => expect.fail('should have thrown'), + (err) => expect(err.code).to.equal('ERR_PUBSUB_DISABLED') + ) }) it('should not allow unsubscribe if disabled', done => { @@ -69,14 +68,13 @@ describe('pubsub disabled', () => { }) it('should not allow unsubscribe if disabled (promised)', async () => { - try { - const topic = hat() - const handler = () => { throw new Error('unexpected message') } - await ipfs.pubsub.unsubscribe(topic, handler) - } catch (err) { - return expect(err.code).to.equal('ERR_PUBSUB_DISABLED') - } - throw new Error('expected error to be thrown') + const topic = hat() + const handler = () => { throw new Error('unexpected message') } + + await ipfs.pubsub.unsubscribe(topic, handler).then( + () => expect.fail('should have thrown'), + (err) => expect(err.code).to.equal('ERR_PUBSUB_DISABLED') + ) }) it('should not allow publish if disabled', done => { @@ -90,14 +88,13 @@ describe('pubsub disabled', () => { }) it('should not allow publish if disabled (promised)', async () => { - try { - const topic = hat() - const msg = Buffer.from(hat()) - await ipfs.pubsub.publish(topic, msg) - } catch (err) { - return expect(err.code).to.equal('ERR_PUBSUB_DISABLED') - } - throw new Error('expected error to be thrown') + const topic = hat() + const msg = Buffer.from(hat()) + + await ipfs.pubsub.publish(topic, msg).then( + () => expect.fail('should have thrown'), + (err) => expect(err.code).to.equal('ERR_PUBSUB_DISABLED') + ) }) it('should not allow ls if disabled', done => { @@ -109,12 +106,10 @@ describe('pubsub disabled', () => { }) it('should not allow ls if disabled (promised)', async () => { - try { - await ipfs.pubsub.ls() - } catch (err) { - return expect(err.code).to.equal('ERR_PUBSUB_DISABLED') - } - throw new Error('expected error to be thrown') + await ipfs.pubsub.ls().then( + () => {}, + (err) => expect(err.code).to.equal('ERR_PUBSUB_DISABLED') + ) }) it('should not allow peers if disabled', done => { @@ -127,18 +122,17 @@ describe('pubsub disabled', () => { }) it('should not allow peers if disabled (promised)', async () => { - try { - const topic = hat() - await ipfs.pubsub.peers(topic) - } catch (err) { - return expect(err.code).to.equal('ERR_PUBSUB_DISABLED') - } - throw new Error('expected error to be thrown') + const topic = hat() + + await ipfs.pubsub.peers(topic).then( + () => expect.fail('should have thrown'), + (err) => expect(err.code).to.equal('ERR_PUBSUB_DISABLED') + ) }) - it('should not allow setMaxListeners if disabled', async () => { + it('should not allow setMaxListeners if disabled', () => { try { - await ipfs.pubsub.setMaxListeners(100) + ipfs.pubsub.setMaxListeners(100) } catch (err) { return expect(err.code).to.equal('ERR_PUBSUB_DISABLED') } diff --git a/test/core/utils.js b/test/core/utils.js index a4d00e329f..f79b4346ff 100644 --- a/test/core/utils.js +++ b/test/core/utils.js @@ -53,11 +53,7 @@ describe('utils', () => { }) it('normalize path with no ipfs path, nor ipns path nor cid should throw an exception', function () { - try { - utils.normalizePath(`/${rootHash}/`) - } catch (err) { - expect(err).to.exist() - } + expect(() => utils.normalizePath(`/${rootHash}/`)).to.throw() }) it('normalize path should return an ipfs path, when an ipfs path is provided', function () {