Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
chore: remove try-catch from tests where functions are async
Browse files Browse the repository at this point in the history
We had a few instances in the tests where assertions may be missed
due to functions not throwing where we thought they would so this
PR refactors that code to use `.then(onResult, onReject)` instead.

Follows on from #2514 (comment)
  • Loading branch information
achingbrain committed Oct 13, 2019
1 parent 50f3667 commit d76f147
Show file tree
Hide file tree
Showing 11 changed files with 136 additions and 161 deletions.
12 changes: 10 additions & 2 deletions src/core/components/pubsub.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,11 @@ module.exports = function pubsub (self) {
return
}

checkOnlineAndEnabled()
try {
checkOnlineAndEnabled()
} catch (err) {
return Promise.reject(err)
}

return self.libp2p.pubsub.subscribe(topic, handler, options)
},
Expand All @@ -50,7 +54,11 @@ module.exports = function pubsub (self) {
return
}

checkOnlineAndEnabled()
try {
checkOnlineAndEnabled()
} catch (err) {
return Promise.reject(err)
}

return self.libp2p.pubsub.unsubscribe(topic, handler)
},
Expand Down
11 changes: 11 additions & 0 deletions src/http/api/resources/files-regular.js
Original file line number Diff line number Diff line change
Expand Up @@ -336,6 +336,17 @@ exports.refs = {
const unique = request.query.unique
const maxDepth = request.query['max-depth']

// This is because ndjson.serialize writes a single \n into the response even if
// the stream was empty - in the client we strip the \n from response and try to
// parse what's left as JSON which then explodes, so if we know we're not going
// to send anything back, shortcut the streaming of refs.
if (maxDepth <= 0) {
return h.response()
.header('x-chunked-output', '1')
.header('content-type', 'application/json')
.header('Trailer', 'X-Stream-Error')
}

const source = ipfs.refsPullStream(key, { recursive, format, edges, unique, maxDepth })
return sendRefsReplyStream(request, h, `refs for ${key}`, source)
}
Expand Down
103 changes: 41 additions & 62 deletions test/cli/daemon.js
Original file line number Diff line number Diff line change
Expand Up @@ -104,14 +104,13 @@ describe('daemon', () => {
}
})

try {
await daemon
throw new Error('Did not kill process')
} catch (err) {
expect(err.killed).to.be.true()

expect(stdout).to.include('Daemon is ready')
}
await daemon.then(
() => expect.fail('Did not kill process'),
(err) => {
expect(err.killed).to.be.true()
expect(stdout).to.include('Daemon is ready')
}
)
})

it('should allow bind to multiple addresses for API and Gateway', async function () {
Expand Down Expand Up @@ -142,15 +141,15 @@ describe('daemon', () => {
}
})

try {
await daemon
throw new Error('Did not kill process')
} catch (err) {
expect(err.killed).to.be.true()
await daemon.then(
() => expect.fail('Did not kill process'),
(err) => {
expect(err.killed).to.be.true()

apiAddrs.forEach(addr => expect(err.stdout).to.include(`API listening on ${addr.slice(0, -2)}`))
gatewayAddrs.forEach(addr => expect(err.stdout).to.include(`Gateway (read only) listening on ${addr.slice(0, -2)}`))
}
apiAddrs.forEach(addr => expect(err.stdout).to.include(`API listening on ${addr.slice(0, -2)}`))
gatewayAddrs.forEach(addr => expect(err.stdout).to.include(`Gateway (read only) listening on ${addr.slice(0, -2)}`))
}
)
})

it('should allow no bind addresses for API and Gateway', async function () {
Expand All @@ -171,15 +170,15 @@ describe('daemon', () => {
}
})

try {
await daemon
throw new Error('Did not kill process')
} catch (err) {
expect(err.killed).to.be.true()
await daemon.then(
() => expect.fail('Did not kill process'),
(err) => {
expect(err.killed).to.be.true()

expect(err.stdout).to.not.include('API listening on')
expect(err.stdout).to.not.include('Gateway (read only) listening on')
}
expect(err.stdout).to.not.include('API listening on')
expect(err.stdout).to.not.include('Gateway (read only) listening on')
}
)
})

skipOnWindows('should handle SIGINT gracefully', async function () {
Expand Down Expand Up @@ -211,32 +210,14 @@ describe('daemon', () => {
await ipfs('init')

const daemon = ipfs('daemon --silent')
const stop = async (err) => {
daemon.kill()

if (err) {
throw err
}

try {
await daemon
} catch (err) {
if (!err.killed) {
throw err
}
}
}
setTimeout(() => {
daemon.kill()
}, 5 * 1000)

return new Promise((resolve, reject) => {
daemon.stdout.on('data', (data) => {
reject(new Error('Output was received ' + data.toString('utf8')))
})
const output = await daemon

setTimeout(() => {
resolve()
}, 5 * 1000)
})
.then(stop, stop)
expect(output).to.be.empty()
})

it('should present ipfs path help when option help is received', async function () {
Expand All @@ -262,16 +243,16 @@ describe('daemon', () => {
}
})

try {
await daemon
throw new Error('Did not kill process')
} catch (err) {
expect(err.killed).to.be.true()
await daemon.then(
() => expect.fail('Did not kill process'),
(err) => {
expect(err.killed).to.be.true()

expect(err.stdout).to.include(`js-ipfs version: ${pkg.version}`)
expect(err.stdout).to.include(`System version: ${os.arch()}/${os.platform()}`)
expect(err.stdout).to.include(`Node.js version: ${process.versions.node}`)
}
expect(err.stdout).to.include(`js-ipfs version: ${pkg.version}`)
expect(err.stdout).to.include(`System version: ${os.arch()}/${os.platform()}`)
expect(err.stdout).to.include(`Node.js version: ${process.versions.node}`)
}
)
})

it('should init by default', async function () {
Expand All @@ -290,12 +271,10 @@ describe('daemon', () => {
}
})

try {
await daemon
throw new Error('Did not kill process')
} catch (err) {
expect(err.killed).to.be.true()
}
await daemon.then(
() => expect.fail('Did not kill process'),
(err) => expect(err.killed).to.be.true()
)

expect(fs.existsSync(repoPath)).to.be.true()
})
Expand Down
9 changes: 4 additions & 5 deletions test/cli/init.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,10 @@ describe('init', function () {
it('profile non-existent', async function () {
this.timeout(40 * 1000)

try {
await ipfs('init --profile doesnt-exist')
} catch (err) {
expect(err.stdout).includes('Could not find profile')
}
await ipfs('init --profile doesnt-exist').then(
() => expect.fail('Should have thrown'),
(err) => expect(err.stdout).includes('Could not find profile')
)
})

it('should present ipfs path help when option help is received', async function () {
Expand Down
24 changes: 11 additions & 13 deletions test/core/create-node.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -171,20 +171,18 @@ describe('create node', function () {

expect(ipfs.isOnline()).to.be.false()

try {
await ipfs.ready
} catch (err) {
expect(ipfs.isOnline()).to.be.false()

// After the error has occurred, it should still reject
try {
await ipfs.ready
} catch (_) {
return
}
}
await ipfs.ready.then(
() => expect.fail('Should have thrown'),
(err) => expect(err.message).to.contain('Expected modulus bit count >= 512')
)

expect(ipfs.isOnline()).to.be.false()

throw new Error('ready promise did not reject')
// After the error has occurred, it should still reject
await ipfs.ready.then(
() => expect.fail('Should have thrown'),
(err) => expect(err.message).to.contain('Expected modulus bit count >= 512')
)
})

it('should create a ready node with IPFS.create', async () => {
Expand Down
15 changes: 5 additions & 10 deletions test/core/dht.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,16 +74,11 @@ describe.skip('dht', () => {
})

describe('put', () => {
it('should callback with error for DHT not available', async () => {
let res
try {
res = await ipfs.dht.put(Buffer.from('a'), Buffer.from('b'))
} catch (err) {
expect(err).to.exist()
expect(err.code).to.equal('ERR_DHT_DISABLED')
}

expect(res).to.not.exist()
it('should error when DHT not available', async () => {
await ipfs.dht.put(Buffer.from('a'), Buffer.from('b')).then(
() => expect.fail('Should have thrown'),
(err) => expect(err.code).to.equal('ERR_DHT_DISABLED')
)
})
})
})
Expand Down
11 changes: 5 additions & 6 deletions test/core/gc.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ const IPFSFactory = require('ipfsd-ctl')
const pEvent = require('p-event')
const env = require('ipfs-utils/src/env')
const IPFS = require('../../src/core')
const { Errors } = require('interface-datastore')

// We need to detect when a readLock or writeLock is requested for the tests
// so we override the Mutex class to emit an event
Expand Down Expand Up @@ -189,11 +188,11 @@ describe('gc', function () {
await rm1

// Second rm should fail because GC has already removed that block
try {
await rm2
} catch (err) {
expect(err.code).eql(Errors.dbDeleteFailedError().code)
}
const results = await rm2
const result = results
.filter(result => result.hash === cid2.toString())
.pop()
expect(result).to.have.property('error').that.contains('block not found')

// Confirm second block has been removed
const localRefs = (await ipfs.refs.local()).map(r => r.ref)
Expand Down
25 changes: 11 additions & 14 deletions test/core/libp2p.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ describe('libp2p customization', function () {
})
})

it('select floodsub as pubsub router if node', (done) => {
it('select floodsub as pubsub router if node', async () => {
const ipfs = {
_repo: {
datastore
Expand All @@ -334,21 +334,18 @@ describe('libp2p customization', function () {
}
}

try {
_libp2p = libp2pComponent(ipfs, customConfig)
} catch (err) {
if (!isNode) {
expect(err).to.exist()
expect(err.code).to.eql('ERR_NOT_SUPPORTED')
done()
}
if (!isNode) {
await libp2pComponent(ipfs, customConfig).then(
() => expect.fail('Should have thrown'),
(err) => expect(err.code).to.eql('ERR_NOT_SUPPORTED')
)
}

_libp2p.start((err) => {
expect(err).to.not.exist()
expect(_libp2p._modules.pubsub).to.eql(require('libp2p-floodsub'))
done()
})
_libp2p = libp2pComponent(ipfs, customConfig)

await _libp2p.start()

expect(_libp2p._modules.pubsub).to.eql(require('libp2p-floodsub'))
})
})
})
9 changes: 4 additions & 5 deletions test/core/name-pubsub.js
Original file line number Diff line number Diff line change
Expand Up @@ -128,11 +128,10 @@ describe('name-pubsub', function () {
const resolvesEmpty = await nodeB.name.resolve(idB.id)
expect(resolvesEmpty).to.be.eq(emptyDirCid)

try {
await nodeA.name.resolve(idB.id)
} catch (error) {
expect(error).to.exist()
}
await nodeA.name.resolve(idB.id).then(
() => expect.fail('should have thrown'),
(err) => expect(err.code).to.equal('ERR_NO_RECORD_FOUND')
)

const publish = await nodeB.name.publish(path)
expect(publish).to.be.eql({
Expand Down
Loading

0 comments on commit d76f147

Please sign in to comment.