Skip to content
This repository has been archived by the owner on Mar 10, 2020. It is now read-only.

Header work #128

Merged
merged 2 commits into from
Nov 21, 2015
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 9 additions & 14 deletions src/request-api.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ function onRes (buffer, cb) {

const stream = !!res.headers['x-stream-output']
const chunkedObjects = !!res.headers['x-chunked-output']
const isJson = res.headers['content-type'] === 'application/json'

if (res.statusCode >= 400 || !res.statusCode) {
const error = new Error(`Server responded with ${res.statusCode}`)
Expand All @@ -33,24 +34,18 @@ function onRes (buffer, cb) {

if (chunkedObjects) {
const parsed = []
res.on('data', chunk => parsed.push(JSON.parse(chunk)))
res.on('data', chunk => {
try {
parsed.push(JSON.parse(chunk))
} catch (err) {
// Browser quirks emit more than needed sometimes
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Emit more than needed? What does this exactly mean? :)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if you have 3 chunks, you sometimes get:

  • chunk1
  • chunk2
  • chunk3
  • chunk1+chunk2 => not parsable

do not ask me why, browsers are a weird bunch..

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What I'm thinking is that "stream" and "chunked" should not exist at the same time. "chunked" is a stream made of small parts of the same files, they should not be parsed individually. "stream" in the another hand, it is the ndjson stuff :)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Correct, but "chunked" is represented as a (node) stream where each individual emitted event is equal to one chunk, that's why we are parsing it this way

}
})
res.on('end', () => cb(null, parsed))
return
}

Wreck.read(res, null, (err, payload) => {
if (err) return cb(err)

let parsed

try {
parsed = JSON.parse(payload.toString())
} catch (err2) {
parsed = payload.toString()
}

cb(null, parsed)
})
Wreck.read(res, {json: isJson}, cb)
}
}

Expand Down
9 changes: 8 additions & 1 deletion tasks/daemons.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,14 @@ function startDisposableDaemons (callback) {
throw err
}

ipfsNodes[key].setConfig('API', '{"HTTPHeaders": {"Access-Control-Allow-Origin": ["*"]}}', err => {
const headers = {
HTTPHeaders: {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's put When ipfs/go-ipfs#1979 is merged we can remove the config settings in the daemon setup as a TODO here and a issue pointing to it

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

agreed

'Access-Control-Allow-Origin': ['*'],
'Access-Control-Allow-Headers': ['X-Stream-Output, X-Chunked-Output'],
'Access-Control-Expose-Headers': ['X-Stream-Output, X-Chunked-Output']
}
}
ipfsNodes[key].setConfig('API', JSON.stringify(headers), err => {
if (err) {
throw err
}
Expand Down
27 changes: 5 additions & 22 deletions test/tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -193,13 +193,6 @@ describe('IPFS Node.js API wrapper tests', () => {
throw err
}

if (!res.on) {
// Just a string
assert.equal(res.toString(), testfile)
done()
return
}

let buf = ''
res
.on('error', err => { throw err })
Expand Down Expand Up @@ -313,7 +306,7 @@ describe('IPFS Node.js API wrapper tests', () => {
throw err
}

assert.equal(res, '')
assert.equal(res, null)
done()
})
})
Expand Down Expand Up @@ -391,13 +384,6 @@ describe('IPFS Node.js API wrapper tests', () => {
apiClients['a'].block.get(blorbKey, (err, res) => {
if (err) throw err

if (!res.on) {
// Just a string
assert.equal(res.toString(), 'blorb')
done()
return
}

let buf = ''
res
.on('data', function (data) { buf += data })
Expand Down Expand Up @@ -442,13 +428,6 @@ describe('IPFS Node.js API wrapper tests', () => {
apiClients['a'].object.data(testObjectHash, (err, res) => {
if (err) throw err

if (!res.on) {
// Just a string
assert.equal(res.toString(), 'testdata')
done()
return
}

let buf = ''
res
.on('error', err => { throw err })
Expand Down Expand Up @@ -729,6 +708,8 @@ describe('IPFS Node.js API wrapper tests', () => {
throw err
}

assert.equal(typeof res, 'object')

return done()

// non ipns or pk hashes fail to fetch, known bug
Expand All @@ -749,6 +730,8 @@ describe('IPFS Node.js API wrapper tests', () => {
if (err) {
throw err
}

assert.equal(typeof res, 'object')
assert(res)
done()
})
Expand Down