Skip to content

Commit

Permalink
fix: faster direct read approach (nodejs#1537)
Browse files Browse the repository at this point in the history
  • Loading branch information
jimmywarting authored and metcoder95 committed Dec 26, 2022
1 parent 16ba6ca commit ad3fa60
Showing 1 changed file with 78 additions and 25 deletions.
103 changes: 78 additions & 25 deletions lib/fetch/body.js
Expand Up @@ -258,6 +258,29 @@ function cloneBody (body) {
}
}

async function * consumeBody (body) {
if (body) {
if (isUint8Array(body)) {
yield body
} else {
const stream = body.stream

if (util.isDisturbed(stream)) {
throw new TypeError('disturbed')
}

if (stream.locked) {
throw new TypeError('locked')
}

// Compat.
stream[kBodyUsed] = true

yield * stream
}
}
}

function bodyMixinMethods (instance) {
const methods = {
async blob () {
Expand All @@ -267,27 +290,10 @@ function bodyMixinMethods (instance) {

const chunks = []

if (this[kState].body) {
if (isUint8Array(this[kState].body)) {
chunks.push(this[kState].body)
} else {
const stream = this[kState].body.stream

if (util.isDisturbed(stream)) {
throw new TypeError('disturbed')
}

if (stream.locked) {
throw new TypeError('locked')
}

// Compat.
stream[kBodyUsed] = true

for await (const chunk of stream) {
chunks.push(chunk)
}
}
for await (const chunk of consumeBody(this[kState].body)) {
// Assemble one final large blob with Uint8Array's can exhaust memory.
// That's why we create create multiple blob's and using references
chunks.push(new Blob([chunk]))
}

return new Blob(chunks, { type: this.headers.get('Content-Type') || '' })
Expand All @@ -298,17 +304,64 @@ function bodyMixinMethods (instance) {
throw new TypeError('Illegal invocation')
}

const blob = await this.blob()
return await blob.arrayBuffer()
const contentLength = this.headers.get('content-length')
const encoded = this.headers.has('content-encoding')

// if we have content length and no encoding, then we can
// pre allocate the buffer and just read the data into it
if (!encoded && contentLength) {
const buffer = new Uint8Array(contentLength)
let offset = 0

for await (const chunk of consumeBody(this[kState].body)) {
buffer.set(chunk, offset)
offset += chunk.length
}

return buffer.buffer
}

// if we don't have content length, then we have to allocate 2x the
// size of the body, once for consumed data, and once for the final buffer

// This could be optimized by using growable ArrayBuffer, but it's not
// implemented yet. https://github.com/tc39/proposal-resizablearraybuffer

const chunks = []
let size = 0

for await (const chunk of consumeBody(this[kState].body)) {
chunks.push(chunk)
size += chunk.byteLength
}

const buffer = new Uint8Array(size)
let offset = 0

for (const chunk of chunks) {
buffer.set(chunk, offset)
offset += chunk.byteLength
}

return buffer.buffer
},

async text () {
if (!(this instanceof instance)) {
throw new TypeError('Illegal invocation')
}

const blob = await this.blob()
return toUSVString(await blob.text())
let result = ''
const textDecoder = new TextDecoder()

for await (const chunk of consumeBody(this[kState].body)) {
result += textDecoder.decode(chunk, { stream: true })
}

// flush
result += textDecoder.decode()

return result
},

async json () {
Expand Down

0 comments on commit ad3fa60

Please sign in to comment.