Skip to content

Commit

Permalink
Stream directly to disk
Browse files Browse the repository at this point in the history
The current implementation saves everything to memory and extracts
the zip files to memory, before copying to the filesystem. This can
consume a huge amount of memory if artifacts are large. This change
streams the zip file directly to disk and extracts it without loading
the entire zip in memory first.
  • Loading branch information
gabriel-samfira committed Aug 30, 2022
1 parent fa2f5f1 commit a22d1b0
Show file tree
Hide file tree
Showing 120 changed files with 6,918 additions and 2,913 deletions.
116 changes: 100 additions & 16 deletions main.js
@@ -1,9 +1,11 @@
const core = require('@actions/core')
const github = require('@actions/github')
const AdmZip = require('adm-zip')
const filesize = require('filesize')
const pathname = require('path')
const fs = require('fs')
const github = require('@actions/github')
const https = require('follow-redirects').https;
const pathname = require('path')
const url = require('url')
const yauzl = require("yauzl");

async function main() {
try {
Expand Down Expand Up @@ -196,34 +198,116 @@ async function main() {

core.info(`==> Downloading: ${artifact.name}.zip (${size})`)

const zip = await client.rest.actions.downloadArtifact({
let saveTo = `${pathname.join(path, artifact.name)}.zip`
if (!fs.existsSync(path)) {
fs.mkdirSync(path, { recursive: true })
}

let request = client.rest.actions.downloadArtifact.endpoint({
owner: owner,
repo: repo,
artifact_id: artifact.id,
archive_format: "zip",
})
});

const sendGetRequest = async () => {
return new Promise(resolve => {
const options = {
hostname: url.parse(request.url).hostname,
path: url.parse(request.url).pathname,
headers: {
...request.headers,
Authorization: `token ${token}`,
}
}
const file = fs.createWriteStream(saveTo);
https.get(options, (response) => {
response.on('error', function(err) {
core.info(`error downloading: ${err}`);
resolve()
})
response.pipe(file);
file.on("finish", () => {
file.close();
core.info("Download Completed");
resolve()
});
file.on("error", () => {
core.info(`error saving file: ${err}`);
resolve()
})
});
})
}

await sendGetRequest();

if (skipUnpack) {
fs.mkdirSync(path, { recursive: true })
fs.writeFileSync(`${pathname.join(path, artifact.name)}.zip`, Buffer.from(zip.data), 'binary')
continue
}

const dir = name ? path : pathname.join(path, artifact.name)
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true })
}

core.startGroup(`==> Extracting: ${artifact.name}.zip`)
yauzl.open(saveTo, {lazyEntries: true}, function(err, zipfile) {
if (err) throw err;
zipfile.readEntry();
zipfile.on("entry", function(entry) {
const filepath = pathname.resolve(pathname.join(dir, entry.fileName))

fs.mkdirSync(dir, { recursive: true })
// Make sure the zip is properly crafted.
const relative = pathname.relative(dir, filepath);
const isInPath = relative && !relative.startsWith('..') && !pathname.isAbsolute(relative);
if (!isInPath) {
core.info(` ==> Path ${filepath} resolves outside of ${dir} skipping`)
zipfile.readEntry();
}

const adm = new AdmZip(Buffer.from(zip.data))
// The zip may contain the directory names for newly created files.
if (/\/$/.test(entry.fileName)) {
// Directory file names end with '/'.
// Note that entries for directories themselves are optional.
// An entry's fileName implicitly requires its parent directories to exist.
if (!fs.existsSync(filepath)) {
core.info(` ==> Creating: ${filepath}`)
fs.mkdirSync(filepath, { recursive: true })
}
zipfile.readEntry();
} else {
// This is a file entry. Attempt to extract it.
core.info(` ==> Extracting: ${entry.fileName}`)

core.startGroup(`==> Extracting: ${artifact.name}.zip`)
adm.getEntries().forEach((entry) => {
const action = entry.isDirectory ? "creating" : "inflating"
const filepath = pathname.join(dir, entry.entryName)
// Ensure the parent folder exists
let dirName = pathname.dirname(filepath)
if (!fs.existsSync(dirName)) {
core.info(` ==> Creating: ${dirName}`)
fs.mkdirSync(dirName, { recursive: true })
}
zipfile.openReadStream(entry, (err, readStream) => {
if (err) throw err;

core.info(` ${action}: ${filepath}`)
})
readStream.on("end", () => {
zipfile.readEntry();
});
readStream.on("error", (err) => {
throw new Error(`Failed to extract ${entry.fileName}: ${err}`)
});

adm.extractAllTo(dir, true)
const file = fs.createWriteStream(filepath);
readStream.pipe(file);
file.on("finish", () => {
file.close();
});
file.on("error", (err) => {
throw new Error(`Failed to extract ${entry.fileName}: ${err}`)
});
});
}
});
});
core.endGroup()
}
} catch (error) {
Expand Down
1 change: 1 addition & 0 deletions node_modules/.bin/uuid

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

74 changes: 62 additions & 12 deletions node_modules/.package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 9 additions & 1 deletion node_modules/@actions/core/lib/core.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit a22d1b0

Please sign in to comment.