Skip to content

Commit

Permalink
Fix possible race condition in the caching of request graph (#9675)
Browse files Browse the repository at this point in the history
  • Loading branch information
benjervis committed Apr 29, 2024
1 parent 3ff10bc commit 03983cb
Show file tree
Hide file tree
Showing 5 changed files with 41 additions and 24 deletions.
15 changes: 15 additions & 0 deletions packages/core/cache/src/FSCache.js
Expand Up @@ -151,6 +151,21 @@ export class FSCache implements Cache {
await Promise.all(writePromises);
}
async deleteLargeBlob(key: string): Promise<void> {
const deletePromises: Promise<void>[] = [];
let i = 0;
let filePath = this.#getFilePath(key, i);
while (await this.fs.exists(filePath)) {
deletePromises.push(this.fs.rimraf(filePath));
i += 1;
filePath = this.#getFilePath(key, i);
}
await Promise.all(deletePromises);
}
async get<T>(key: string): Promise<?T> {
try {
let data = await this.fs.readFile(this._getCachePath(key));
Expand Down
4 changes: 4 additions & 0 deletions packages/core/cache/src/IDBCache.browser.js
Expand Up @@ -133,6 +133,10 @@ export class IDBCache implements Cache {
return this.setBlob(key, contents);
}

async deleteLargeBlob(key: string): Promise<void> {
await (await this.store).delete(STORE_NAME, key);
}

refresh(): void {
// NOOP
}
Expand Down
4 changes: 4 additions & 0 deletions packages/core/cache/src/LMDBCache.js
Expand Up @@ -117,6 +117,10 @@ export class LMDBCache implements Cache {
return this.fsCache.setLargeBlob(key, contents, options);
}

deleteLargeBlob(key: string): Promise<void> {
return this.fsCache.deleteLargeBlob(key);
}

refresh(): void {
// Reset the read transaction for the store. This guarantees that
// the next read will see the latest changes to the store.
Expand Down
41 changes: 17 additions & 24 deletions packages/core/core/src/RequestTracker.js
Expand Up @@ -1352,6 +1352,9 @@ export default class RequestTracker {

let serialisedGraph = this.graph.serialize();

// Delete an existing request graph cache, to prevent invalid states
await this.options.cache.deleteLargeBlob(requestGraphKey);

let total = 0;
const serialiseAndSet = async (
key: string,
Expand Down Expand Up @@ -1440,33 +1443,23 @@ export default class RequestTracker {
}
}

queue
.add(() =>
serialiseAndSet(requestGraphKey, {
...serialisedGraph,
nodes: undefined,
}),
)
.catch(() => {
// Handle promise rejection
});
try {
await queue.run();

let opts = getWatcherOptions(this.options);
let snapshotPath = path.join(this.options.cacheDir, snapshotKey + '.txt');
queue
.add(() =>
this.options.inputFS.writeSnapshot(
this.options.watchDir,
snapshotPath,
opts,
),
)
.catch(() => {
// Handle promise rejection
// Set the request graph after the queue is flushed to avoid writing an invalid state
await serialiseAndSet(requestGraphKey, {
...serialisedGraph,
nodes: undefined,
});

try {
await queue.run();
let opts = getWatcherOptions(this.options);
let snapshotPath = path.join(this.options.cacheDir, snapshotKey + '.txt');

await this.options.inputFS.writeSnapshot(
this.options.watchDir,
snapshotPath,
opts,
);
} catch (err) {
// If we have aborted, ignore the error and continue
if (!signal?.aborted) throw err;
Expand Down
1 change: 1 addition & 0 deletions packages/core/types-internal/src/Cache.js
Expand Up @@ -18,6 +18,7 @@ export interface Cache {
contents: Buffer | string,
options?: {|signal?: AbortSignal|},
): Promise<void>;
deleteLargeBlob(key: string): Promise<void>;
getBuffer(key: string): Promise<?Buffer>;
/**
* In a multi-threaded environment, where there are potentially multiple Cache
Expand Down

0 comments on commit 03983cb

Please sign in to comment.