Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We鈥檒l occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: faster direct read approach #1537

Merged
merged 1 commit into from Jul 10, 2022
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
103 changes: 78 additions & 25 deletions lib/fetch/body.js
Expand Up @@ -258,6 +258,29 @@ function cloneBody (body) {
}
}

async function * consumeBody (body) {
if (body) {
if (isUint8Array(body)) {
yield body
} else {
const stream = body.stream

if (util.isDisturbed(stream)) {
throw new TypeError('disturbed')
}

if (stream.locked) {
throw new TypeError('locked')
}

// Compat.
stream[kBodyUsed] = true

yield * stream
}
}
}

function bodyMixinMethods (instance) {
const methods = {
async blob () {
Expand All @@ -267,27 +290,10 @@ function bodyMixinMethods (instance) {

const chunks = []

if (this[kState].body) {
if (isUint8Array(this[kState].body)) {
chunks.push(this[kState].body)
} else {
const stream = this[kState].body.stream

if (util.isDisturbed(stream)) {
throw new TypeError('disturbed')
}

if (stream.locked) {
throw new TypeError('locked')
}

// Compat.
stream[kBodyUsed] = true

for await (const chunk of stream) {
chunks.push(chunk)
}
}
for await (const chunk of consumeBody(this[kState].body)) {
// Assemble one final large blob with Uint8Array's can exhaust memory.
// That's why we create create multiple blob's and using references
chunks.push(new Blob([chunk]))
}

return new Blob(chunks, { type: this.headers.get('Content-Type') || '' })
Expand All @@ -298,17 +304,64 @@ function bodyMixinMethods (instance) {
throw new TypeError('Illegal invocation')
}

const blob = await this.blob()
return await blob.arrayBuffer()
const contentLength = this.headers.get('content-length')
const encoded = this.headers.has('content-encoding')

// if we have content length and no encoding, then we can
// pre allocate the buffer and just read the data into it
if (!encoded && contentLength) {
const buffer = new Uint8Array(contentLength)
let offset = 0

for await (const chunk of consumeBody(this[kState].body)) {
buffer.set(chunk, offset)
offset += chunk.length
}

return buffer.buffer
}

// if we don't have content length, then we have to allocate 2x the
// size of the body, once for consumed data, and once for the final buffer

// This could be optimized by using growable ArrayBuffer, but it's not
// implemented yet. https://github.com/tc39/proposal-resizablearraybuffer

const chunks = []
let size = 0

for await (const chunk of consumeBody(this[kState].body)) {
chunks.push(chunk)
size += chunk.byteLength
}

const buffer = new Uint8Array(size)
let offset = 0

for (const chunk of chunks) {
buffer.set(chunk, offset)
offset += chunk.byteLength
}

return buffer.buffer
},

async text () {
if (!(this instanceof instance)) {
throw new TypeError('Illegal invocation')
}

const blob = await this.blob()
return toUSVString(await blob.text())
let result = ''
const textDecoder = new TextDecoder()

for await (const chunk of consumeBody(this[kState].body)) {
result += textDecoder.decode(chunk, { stream: true })
}

// flush
result += textDecoder.decode()

return result
},

async json () {
Expand Down