Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make the exit type configurable #27

Merged
merged 1 commit into from Jun 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
11 changes: 11 additions & 0 deletions README.md
Expand Up @@ -74,3 +74,14 @@ To force an API call, set the `GITHUB_TOKEN` environment variable like so:
count: 2
labels: "community-reviewed, team-reviewed, codeowner-reviewed"
```

### Exit with a neutral result rather than failure

```yaml
- uses: mheap/github-action-required-labels@v1
with:
mode: minimum
count: 2
labels: "community-reviewed, team-reviewed, codeowner-reviewed"
exit_type: neutral # Can be: failure, neutral or success
```
18 changes: 15 additions & 3 deletions index.js
Expand Up @@ -9,6 +9,8 @@ Toolkit.run(async (tools) => {
.map((l) => l.trim())
.filter((r) => r);

const exitType = tools.inputs.exit_type || "failure";

// Validate inputs
if (tools.inputs.count === "") {
tools.exit.failure(`[count] input is not provided`);
Expand All @@ -28,6 +30,16 @@ Toolkit.run(async (tools) => {
return;
}

const allowedExitCodes = ["success", "neutral", "failure"];
if (!allowedExitCodes.includes(exitType)) {
tools.exit.failure(
`Unknown exit_code input [${exitType}]. Must be one of: ${allowedExitCodes.join(
", "
)}`
);
return;
}

// If a token is provided, call the API, otherwise read the event.json file
let labels;
if (process.env.GITHUB_TOKEN) {
Expand All @@ -43,7 +55,7 @@ Toolkit.run(async (tools) => {
let intersection = allowedLabels.filter((x) => appliedLabels.includes(x));

if (mode === "exactly" && intersection.length !== count) {
tools.exit.failure(
tools.exit[exitType](
`Label error. Requires exactly ${count} of: ${allowedLabels.join(
", "
)}. Found: ${appliedLabels.join(", ")}`
Expand All @@ -52,7 +64,7 @@ Toolkit.run(async (tools) => {
}

if (mode === "minimum" && intersection.length < count) {
tools.exit.failure(
tools.exit[exitType](
`Label error. Requires at least ${count} of: ${allowedLabels.join(
", "
)}. Found: ${appliedLabels.join(", ")}`
Expand All @@ -61,7 +73,7 @@ Toolkit.run(async (tools) => {
}

if (mode === "maximum" && intersection.length > count) {
tools.exit.failure(
tools.exit[exitType](
`Label error. Requires at most ${count} of: ${allowedLabels.join(
", "
)}. Found: ${appliedLabels.join(", ")}`
Expand Down
78 changes: 78 additions & 0 deletions index.test.js
Expand Up @@ -35,6 +35,7 @@ describe("Required Labels", () => {
};
tools.exit.success = jest.fn();
tools.exit.failure = jest.fn();
tools.exit.neutral = jest.fn();
});

afterEach(() => {
Expand Down Expand Up @@ -170,6 +171,21 @@ describe("Required Labels", () => {
"Unknown mode input [bananas]. Must be one of: exactly, minimum, maximum"
);
});

it("unknown exit_code", () => {
restoreTest = mockPr(tools, [], {
INPUT_MODE: "exactly",
INPUT_LABELS: "enhancement,bug",
INPUT_COUNT: "1",
INPUT_EXIT_TYPE: "other",
});

action(tools);
expect(tools.exit.failure).toBeCalledTimes(1);
expect(tools.exit.failure).toBeCalledWith(
"Unknown exit_code input [other]. Must be one of: success, neutral, failure"
);
});
});

describe("data integrity", () => {
Expand Down Expand Up @@ -210,6 +226,68 @@ describe("Required Labels", () => {
expect(tools.exit.success).toBeCalledWith("Complete");
});
});

describe("configurable exit code", () => {
it("defaults to failure", () => {
// Create a new Toolkit instance
restoreTest = mockPr(tools, ["enhancement", "bug"], {
INPUT_LABELS: "enhancement,bug",
INPUT_MODE: "exactly",
INPUT_COUNT: "1",
});

action(tools);
expect(tools.exit.failure).toBeCalledTimes(1);
expect(tools.exit.failure).toBeCalledWith(
"Label error. Requires exactly 1 of: enhancement, bug. Found: enhancement, bug"
);
});

it("explicitly uses failure", () => {
restoreTest = mockPr(tools, ["enhancement", "bug"], {
INPUT_LABELS: "enhancement,bug",
INPUT_MODE: "exactly",
INPUT_COUNT: "1",
INPUT_EXIT_TYPE: "failure",
});

action(tools);
expect(tools.exit.failure).toBeCalledTimes(1);
expect(tools.exit.failure).toBeCalledWith(
"Label error. Requires exactly 1 of: enhancement, bug. Found: enhancement, bug"
);
});

it("explicitly uses success", () => {
restoreTest = mockPr(tools, ["enhancement", "bug"], {
INPUT_LABELS: "enhancement,bug",
INPUT_MODE: "exactly",
INPUT_COUNT: "1",
INPUT_EXIT_TYPE: "success",
});

action(tools);
expect(tools.exit.success).toBeCalledTimes(1);
expect(tools.exit.success).toBeCalledWith(
"Label error. Requires exactly 1 of: enhancement, bug. Found: enhancement, bug"
);
});

it("explicitly uses neutral", () => {
restoreTest = mockPr(tools, ["enhancement", "bug"], {
INPUT_LABELS: "enhancement,bug",
INPUT_MODE: "exactly",
INPUT_COUNT: "1",
INPUT_EXIT_TYPE: "neutral",
});

action(tools);
expect(tools.exit.neutral).toBeCalledTimes(1);
expect(tools.exit.neutral).toBeCalledWith(
"Label error. Requires exactly 1 of: enhancement, bug. Found: enhancement, bug"
);
});
});
});

function mockPr(tools, labels, env) {
Expand Down