Skip to content

Commit

Permalink
remove ts deps
Browse files Browse the repository at this point in the history
  • Loading branch information
vemonet committed Jul 23, 2023
1 parent 8a672eb commit 7de3e9d
Show file tree
Hide file tree
Showing 5 changed files with 146 additions and 316 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
- name: Build action
run: npm run build

- name: Check dist up-to-date
- name: Check dist/index.js is up-to-date with src/setup-spark.ts
run: git diff --exit-code dist


Expand Down
14 changes: 3 additions & 11 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ git checkout -b my-branch

Checkout the only important file! The mighty [`src/setup-spark.ts`](https://github.com/vemonet/setup-spark/blob/main/src/setup-spark.ts). The Spark installation has been inspired by the [jupyter/docker-stack pyspark Dockerfile](https://github.com/jupyter/docker-stacks/blob/master/pyspark-notebook/Dockerfile)

1. Install:
1. Install dependencies:

```bash
npm install
Expand All @@ -33,7 +33,7 @@ npm install
npm run build
```

3. Commit and push the generated `index.js` file with the rest of the modified files
3. Commit and push the generated `dist/index.js` file with the rest of the modified files

## ☑️ Test

Expand All @@ -53,14 +53,12 @@ Format the code with prettier:
npm run fmt
```

Run linting checks:
Run eslint checks:

```bash
npm run test
```

## 📜 Check dependencies licenses

Third party dependencies licenses are checked automatically by a GitHub Action workflow using [Licensed](https://github.com/github/licensed).

## 🔼 Updating dependencies
Expand All @@ -85,12 +83,6 @@ npm audit fix --force

Commit, push and check if the GitHub action tests are passing.

## 🕊️ Pull request process

1. Before sending a pull request, make sure the project still work as expected with the new changes properly integrated
2. [Send a pull request](https://github.com/vemonet/setup-spark/compare) to the `main` branch 📤
3. Project contributors will review your change, and answer the pull request as soon as they can

## 🏷️ Publish new version

Create a new release on GitHub following semantic versioning, e.g. `v1.2.0`
Expand Down
278 changes: 138 additions & 140 deletions dist/index.js
Original file line number Diff line number Diff line change
@@ -1,135 +1,6 @@
/******/ (() => { // webpackBootstrap
/******/ var __webpack_modules__ = ({

/***/ 9559:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {

// test

"use strict";

var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const tc = __importStar(__nccwpck_require__(7784));
const fs = __importStar(__nccwpck_require__(7147));
// See docs to create JS action: https://docs.github.com/en/actions/creating-actions/creating-a-javascript-action
const log = (msg) => {
core.info(`${new Date().toLocaleTimeString('fr-FR')} - ${msg}`);
};
function run() {
return __awaiter(this, void 0, void 0, function* () {
try {
const sparkVersion = core.getInput('spark-version');
const hadoopVersion = core.getInput('hadoop-version');
const scalaVersion = core.getInput('scala-version');
const py4jVersion = core.getInput('py4j-version');
let sparkUrl = core.getInput('spark-url');
// Try to write to the parent folder of the workflow workspace
const workspaceFolder = process.env.GITHUB_WORKSPACE || '/home/runner/work';
let installFolder = workspaceFolder.split('/').slice(0, -1).join('/');
try {
fs.accessSync(installFolder, fs.constants.R_OK);
}
catch (err) {
log(`Using $GITHUB_WORKSPACE to store Spark (${installFolder} not writable)`);
installFolder = workspaceFolder;
}
log(`Spark will be installed to ${installFolder}`);
const scalaBit = scalaVersion ? `-scala${scalaVersion}` : '';
let sparkHome = `${installFolder}/spark-${sparkVersion}-bin-hadoop${hadoopVersion}${scalaBit}`;
const cachedSpark = tc.find('spark', sparkVersion);
if (cachedSpark) {
log(`Using Spark from cache ${cachedSpark}`);
sparkHome = cachedSpark;
}
else if (!sparkUrl) {
// If URL not provided directly, we try to download from official recommended https://spark.apache.org/downloads.html
sparkUrl = `https://dlcdn.apache.org/spark/spark-${sparkVersion}/spark-${sparkVersion}-bin-hadoop${hadoopVersion}${scalaBit}.tgz`;
try {
yield download(sparkUrl, installFolder);
}
catch (error) {
log(`Faster recommended download URL not available, downloading from Apache Archives.`);
sparkUrl = `https://archive.apache.org/dist/spark/spark-${sparkVersion}/spark-${sparkVersion}-bin-hadoop${hadoopVersion}${scalaBit}.tgz`;
yield download(sparkUrl, installFolder);
}
}
else {
// URL provided directly by user
yield download(sparkUrl, installFolder);
}
if (!fs.existsSync(`${sparkHome}/bin/spark-submit`)) {
throw new Error(`The Spark binary was not properly downloaded from ${sparkUrl}`);
}
log(`Binary downloaded, setting up environment variables`);
const SPARK_OPTS = `--driver-java-options=-Xms1024M --driver-java-options=-Xmx2048M --driver-java-options=-Dlog4j.logLevel=info`;
const PYTHONPATH = `${sparkHome}/python:${sparkHome}/python/lib/py4j-${py4jVersion}-src.zip`;
const PYSPARK_PYTHON = 'python';
// Set environment variables in the workflow
core.exportVariable('SPARK_HOME', sparkHome);
core.exportVariable('HADOOP_VERSION', hadoopVersion);
core.exportVariable('APACHE_SPARK_VERSION', sparkVersion);
core.exportVariable('PYSPARK_PYTHON', PYSPARK_PYTHON);
core.exportVariable('PYSPARK_DRIVER_PYTHON', PYSPARK_PYTHON);
core.exportVariable('PYTHONPATH', PYTHONPATH);
core.exportVariable('SPARK_OPTS', SPARK_OPTS);
// Add Spark to path
core.addPath(`${sparkHome}/bin`);
yield tc.cacheDir(sparkHome, 'spark', sparkVersion);
core.setOutput('spark-version', sparkVersion);
}
catch (error) {
log(`Issue installing Spark: check if the Spark version and Hadoop versions you are using are part of the ones proposed on the Spark download page at https://spark.apache.org/downloads.html`);
core.error(error);
core.setFailed(error.message);
}
});
}
// Helper function to download and unzip spark binary
function download(url, installFolder) {
return __awaiter(this, void 0, void 0, function* () {
log(`Downloading Spark binary from ${url} to ${installFolder}`);
const zipPath = yield tc.downloadTool(url);
yield tc.extractTar(zipPath, installFolder);
});
}
run();


/***/ }),

/***/ 7351:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {

Expand Down Expand Up @@ -685,8 +556,8 @@ class OidcClient {
const res = yield httpclient
.getJson(id_token_url)
.catch(error => {
throw new Error(`Failed to get ID Token. \n
Error Code : ${error.statusCode}\n
throw new Error(`Failed to get ID Token. \n
Error Code : ${error.statusCode}\n
Error Message: ${error.result.message}`);
});
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
Expand Down Expand Up @@ -6692,6 +6563,133 @@ function version(uuid) {
var _default = version;
exports["default"] = _default;

/***/ }),

/***/ 8737:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {

"use strict";

var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const tc = __importStar(__nccwpck_require__(7784));
const fs = __importStar(__nccwpck_require__(7147));
// See docs to create JS action: https://docs.github.com/en/actions/creating-actions/creating-a-javascript-action
const log = (msg) => {
core.info(`${new Date().toLocaleTimeString('fr-FR')} - ${msg}`);
};
function run() {
return __awaiter(this, void 0, void 0, function* () {
try {
const sparkVersion = core.getInput('spark-version');
const hadoopVersion = core.getInput('hadoop-version');
const scalaVersion = core.getInput('scala-version');
const py4jVersion = core.getInput('py4j-version');
let sparkUrl = core.getInput('spark-url');
// Try to write to the parent folder of the workflow workspace
const workspaceFolder = process.env.GITHUB_WORKSPACE || '/home/runner/work';
let installFolder = workspaceFolder.split('/').slice(0, -1).join('/');
try {
fs.accessSync(installFolder, fs.constants.R_OK);
}
catch (err) {
log(`Using $GITHUB_WORKSPACE to store Spark (${installFolder} not writable)`);
installFolder = workspaceFolder;
}
log(`Spark will be installed to ${installFolder}`);
const scalaBit = scalaVersion ? `-scala${scalaVersion}` : '';
let sparkHome = `${installFolder}/spark-${sparkVersion}-bin-hadoop${hadoopVersion}${scalaBit}`;
const cachedSpark = tc.find('spark', sparkVersion);
if (cachedSpark) {
log(`Using Spark from cache ${cachedSpark}`);
sparkHome = cachedSpark;
}
else if (!sparkUrl) {
// If URL not provided directly, we try to download from official recommended https://spark.apache.org/downloads.html
sparkUrl = `https://dlcdn.apache.org/spark/spark-${sparkVersion}/spark-${sparkVersion}-bin-hadoop${hadoopVersion}${scalaBit}.tgz`;
try {
yield download(sparkUrl, installFolder);
}
catch (error) {
log(`Faster recommended download URL not available, downloading from Apache Archives.`);
sparkUrl = `https://archive.apache.org/dist/spark/spark-${sparkVersion}/spark-${sparkVersion}-bin-hadoop${hadoopVersion}${scalaBit}.tgz`;
yield download(sparkUrl, installFolder);
}
}
else {
// URL provided directly by user
yield download(sparkUrl, installFolder);
}
if (!fs.existsSync(`${sparkHome}/bin/spark-submit`)) {
throw new Error(`The Spark binary was not properly downloaded from ${sparkUrl}`);
}
log(`Binary downloaded, setting up environment variables`);
const SPARK_OPTS = `--driver-java-options=-Xms1024M --driver-java-options=-Xmx2048M --driver-java-options=-Dlog4j.logLevel=info`;
const PYTHONPATH = `${sparkHome}/python:${sparkHome}/python/lib/py4j-${py4jVersion}-src.zip`;
const PYSPARK_PYTHON = 'python';
// Set environment variables in the workflow
core.exportVariable('SPARK_HOME', sparkHome);
core.exportVariable('HADOOP_VERSION', hadoopVersion);
core.exportVariable('APACHE_SPARK_VERSION', sparkVersion);
core.exportVariable('PYSPARK_PYTHON', PYSPARK_PYTHON);
core.exportVariable('PYSPARK_DRIVER_PYTHON', PYSPARK_PYTHON);
core.exportVariable('PYTHONPATH', PYTHONPATH);
core.exportVariable('SPARK_OPTS', SPARK_OPTS);
// Add Spark to path
core.addPath(`${sparkHome}/bin`);
yield tc.cacheDir(sparkHome, 'spark', sparkVersion);
core.setOutput('spark-version', sparkVersion);
}
catch (error) {
log(`Issue installing Spark: check if the Spark version and Hadoop versions you are using are part of the ones proposed on the Spark download page at https://spark.apache.org/downloads.html`);
core.error(error);
core.setFailed(error.message);
}
});
}
// Helper function to download and unzip spark binary
function download(url, installFolder) {
return __awaiter(this, void 0, void 0, function* () {
log(`Downloading Spark binary from ${url} to ${installFolder}`);
const zipPath = yield tc.downloadTool(url);
yield tc.extractTar(zipPath, installFolder);
});
}
run();


/***/ }),

/***/ 9491:
Expand Down Expand Up @@ -6818,7 +6816,7 @@ module.exports = require("util");
/************************************************************************/
/******/ // The module cache
/******/ var __webpack_module_cache__ = {};
/******/
/******/
/******/ // The require function
/******/ function __nccwpck_require__(moduleId) {
/******/ // Check if module is in cache
Expand All @@ -6832,7 +6830,7 @@ module.exports = require("util");
/******/ // no module.loaded needed
/******/ exports: {}
/******/ };
/******/
/******/
/******/ // Execute the module function
/******/ var threw = true;
/******/ try {
Expand All @@ -6841,23 +6839,23 @@ module.exports = require("util");
/******/ } finally {
/******/ if(threw) delete __webpack_module_cache__[moduleId];
/******/ }
/******/
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/************************************************************************/
/******/ /* webpack/runtime/compat */
/******/
/******/
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";
/******/
/******/
/************************************************************************/
/******/
/******/
/******/ // startup
/******/ // Load entry module and return exports
/******/ // This entry module is referenced by other modules so it can't be inlined
/******/ var __webpack_exports__ = __nccwpck_require__(9559);
/******/ var __webpack_exports__ = __nccwpck_require__(8737);
/******/ module.exports = __webpack_exports__;
/******/
/******/
/******/ })()
;

0 comments on commit 7de3e9d

Please sign in to comment.