Skip to content

Commit

Permalink
Merge pull request #15373 from webpack/fix/issue-14907
Browse files Browse the repository at this point in the history
if cache pack is too big, we should batch writing
  • Loading branch information
sokra committed Feb 15, 2022
2 parents 18c3590 + 7badefd commit ba4e83c
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 13 deletions.
53 changes: 44 additions & 9 deletions lib/serialization/FileMiddleware.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ Section -> Buffer

// "wpc" + 1 in little-endian
const VERSION = 0x01637077;
const WRITE_LIMIT_TOTAL = 0x7fff0000;
const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024;

/**
* @param {Buffer[]} buffers buffers
Expand Down Expand Up @@ -87,7 +89,7 @@ const readUInt64LE = Buffer.prototype.readBigUInt64LE
* @param {FileMiddleware} middleware this
* @param {BufferSerializableType[] | Promise<BufferSerializableType[]>} data data to be serialized
* @param {string | boolean} name file base name
* @param {function(string | false, Buffer[]): Promise<void>} writeFile writes a file
* @param {function(string | false, Buffer[], number): Promise<void>} writeFile writes a file
* @param {string | Hash} hashFunction hash function to use
* @returns {Promise<SerializeResult>} resulting file pointer and promise
*/
Expand Down Expand Up @@ -212,9 +214,9 @@ const serialize = async (
if (name === true) {
name = hashForName(buf, hashFunction);
}
backgroundJobs.push(writeFile(name, buf));
let size = 0;
for (const b of buf) size += b.length;
backgroundJobs.push(writeFile(name, buf, size));
return {
size,
name,
Expand Down Expand Up @@ -422,7 +424,7 @@ class FileMiddleware extends SerializerMiddleware {
// It's important that we don't touch existing files during serialization
// because serialize may read existing files (when deserializing)
const allWrittenFiles = new Set();
const writeFile = async (name, content) => {
const writeFile = async (name, content, size) => {
const file = name
? join(this.fs, filename, `../${name}${extension}`)
: filename;
Expand All @@ -441,10 +443,7 @@ class FileMiddleware extends SerializerMiddleware {
[zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
[zConstants.BROTLI_PARAM_QUALITY]: 2,
[zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
[zConstants.BROTLI_PARAM_SIZE_HINT]: content.reduce(
(size, b) => size + b.length,
0
)
[zConstants.BROTLI_PARAM_SIZE_HINT]: size
}
});
}
Expand All @@ -456,8 +455,44 @@ class FileMiddleware extends SerializerMiddleware {
stream.on("error", err => reject(err));
stream.on("finish", () => resolve());
}
for (const b of content) stream.write(b);
stream.end();
// split into chunks for WRITE_LIMIT_CHUNK size
const chunks = [];
for (const b of content) {
if (b.length < WRITE_LIMIT_CHUNK) {
chunks.push(b);
} else {
for (let i = 0; i < b.length; i += WRITE_LIMIT_CHUNK) {
chunks.push(b.slice(i, i + WRITE_LIMIT_CHUNK));
}
}
}

const len = chunks.length;
let i = 0;
const batchWrite = err => {
// will be handled in "on" error handler
if (err) return;

if (i === len) {
stream.end();
return;
}

// queue up a batch of chunks up to the write limit
// end is exclusive
let end = i;
let sum = chunks[end++].length;
while (end < len) {
sum += chunks[end].length;
if (sum > WRITE_LIMIT_TOTAL) break;
end++;
}
while (i < end - 1) {
stream.write(chunks[i++]);
}
stream.write(chunks[i++], batchWrite);
};
batchWrite();
});
if (name) allWrittenFiles.add(file);
};
Expand Down
4 changes: 0 additions & 4 deletions test/TestCasesCachePack.longtest.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,6 @@ describe("TestCases", () => {
["no-string"]:
/^Pack got invalid because of write to: Compilation\/modules.+no-string[/\\]loader\.js!.+no-string[/\\]file\.js$/
},
large: {
["big-assets"]:
/^Pack got invalid because of write to: ResolverCachePlugin|normal|dependencyType=|esm|path=|.+|request=|\.\/large\/big-assets\/$/
},
parsing: {
// Module parse failed
context:
Expand Down

0 comments on commit ba4e83c

Please sign in to comment.