Skip to content

Commit

Permalink
Fix and speedup diff-shades integration (#2773)
Browse files Browse the repository at this point in the history
  • Loading branch information
ichard26 committed Jan 20, 2022
1 parent 8c22d23 commit 9bd4134
Show file tree
Hide file tree
Showing 6 changed files with 68 additions and 41 deletions.
14 changes: 14 additions & 0 deletions .github/mypyc-requirements.txt
@@ -0,0 +1,14 @@
mypy == 0.920

# A bunch of packages for type information
mypy-extensions >= 0.4.3
tomli >= 0.10.2
types-typed-ast >= 1.4.2
types-dataclasses >= 0.1.3
typing-extensions > 3.10.0.1
click >= 8.0.0
platformdirs >= 2.1.0

# And because build isolation is disabled, we'll need to pull this too
setuptools-scm[toml] >= 6.3.1
wheel
32 changes: 24 additions & 8 deletions .github/workflows/diff_shades.yml
Expand Up @@ -3,10 +3,10 @@ name: diff-shades
on:
push:
branches: [main]
paths-ignore: ["docs/**", "tests/**", "*.md"]
paths-ignore: ["docs/**", "tests/**", "**.md", "**.rst"]

pull_request:
paths-ignore: ["docs/**", "tests/**", "*.md"]
paths-ignore: ["docs/**", "tests/**", "**.md", "**.rst"]

workflow_dispatch:
inputs:
Expand All @@ -27,10 +27,18 @@ on:
description: "Custom Black arguments (eg. -S)"
required: false

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true

jobs:
analysis:
name: analysis / linux
runs-on: ubuntu-latest
env:
# Clang is less picky with the C code it's given than gcc (and may
# generate faster binaries too).
CC: clang-12

steps:
- name: Checkout this repository (full clone)
Expand All @@ -45,6 +53,7 @@ jobs:
python -m pip install pip --upgrade
python -m pip install https://github.com/ichard26/diff-shades/archive/stable.zip
python -m pip install click packaging urllib3
python -m pip install -r .github/mypyc-requirements.txt
# After checking out old revisions, this might not exist so we'll use a copy.
cat scripts/diff_shades_gha_helper.py > helper.py
git config user.name "diff-shades-gha"
Expand All @@ -66,22 +75,28 @@ jobs:
path: ${{ steps.config.outputs.baseline-analysis }}
key: ${{ steps.config.outputs.baseline-cache-key }}

- name: Install baseline revision
- name: Build and install baseline revision
if: steps.baseline-cache.outputs.cache-hit != 'true'
env:
GITHUB_TOKEN: ${{ github.token }}
run: ${{ steps.config.outputs.baseline-setup-cmd }} && python -m pip install .
run: >
${{ steps.config.outputs.baseline-setup-cmd }}
&& python setup.py --use-mypyc bdist_wheel
&& python -m pip install dist/*.whl && rm build dist -r
- name: Analyze baseline revision
if: steps.baseline-cache.outputs.cache-hit != 'true'
run: >
diff-shades analyze -v --work-dir projects-cache/
${{ steps.config.outputs.baseline-analysis }} -- ${{ github.event.inputs.baseline-args }}
- name: Install target revision
- name: Build and install target revision
env:
GITHUB_TOKEN: ${{ github.token }}
run: ${{ steps.config.outputs.target-setup-cmd }} && python -m pip install .
run: >
${{ steps.config.outputs.target-setup-cmd }}
&& python setup.py --use-mypyc bdist_wheel
&& python -m pip install dist/*.whl
- name: Analyze target revision
run: >
Expand Down Expand Up @@ -118,13 +133,14 @@ jobs:
python helper.py comment-body
${{ steps.config.outputs.baseline-analysis }} ${{ steps.config.outputs.target-analysis }}
${{ steps.config.outputs.baseline-sha }} ${{ steps.config.outputs.target-sha }}
${{ github.event.pull_request.number }}
- name: Upload summary file (PR only)
if: github.event_name == 'pull_request'
uses: actions/upload-artifact@v2
with:
name: .pr-comment-body.md
path: .pr-comment-body.md
name: .pr-comment.json
path: .pr-comment.json

# This is last so the diff-shades-comment workflow can still work even if we
# end up detecting failed files and failing the run.
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/diff_shades_comment.yml
Expand Up @@ -31,15 +31,15 @@ jobs:
- name: Try to find pre-existing PR comment
if: steps.metadata.outputs.needs-comment == 'true'
id: find-comment
uses: peter-evans/find-comment@v1
uses: peter-evans/find-comment@d2dae40ed151c634e4189471272b57e76ec19ba8
with:
issue-number: ${{ steps.metadata.outputs.pr-number }}
comment-author: "github-actions[bot]"
body-includes: "diff-shades"

- name: Create or update PR comment
if: steps.metadata.outputs.needs-comment == 'true'
uses: peter-evans/create-or-update-comment@v1
uses: peter-evans/create-or-update-comment@a35cf36e5301d70b76f316e867e7788a55a31dae
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ steps.metadata.outputs.pr-number }}
Expand Down
2 changes: 1 addition & 1 deletion docs/contributing/gauging_changes.md
Expand Up @@ -74,7 +74,7 @@ to further information. If there's a pre-existing diff-shades comment, it'll be
instead the next time the workflow is triggered on the same PR.

The workflow uploads 3-4 artifacts upon completion: the two generated analyses (they
have the .json file extension), `diff.html`, and `.pr-comment-body.md` if triggered by a
have the .json file extension), `diff.html`, and `.pr-comment.json` if triggered by a
PR. The last one is downloaded by the `diff-shades-comment` workflow and shouldn't be
downloaded locally. `diff.html` comes in handy for push-based or manually triggered
runs. And the analyses exist just in case you want to do further analysis using the
Expand Down
54 changes: 25 additions & 29 deletions scripts/diff_shades_gha_helper.py
Expand Up @@ -23,7 +23,7 @@
import zipfile
from io import BytesIO
from pathlib import Path
from typing import Any, Dict, Optional, Tuple
from typing import Any, Optional, Tuple

import click
import urllib3
Expand All @@ -34,7 +34,7 @@
else:
from typing_extensions import Final, Literal

COMMENT_BODY_FILE: Final = ".pr-comment-body.md"
COMMENT_FILE: Final = ".pr-comment.json"
DIFF_STEP_NAME: Final = "Generate HTML diff report"
DOCS_URL: Final = (
"https://black.readthedocs.io/en/latest/"
Expand All @@ -55,19 +55,16 @@ def set_output(name: str, value: str) -> None:
print(f"::set-output name={name}::{value}")


def http_get(
url: str,
is_json: bool = True,
headers: Optional[Dict[str, str]] = None,
**kwargs: Any,
) -> Any:
headers = headers or {}
def http_get(url: str, is_json: bool = True, **kwargs: Any) -> Any:
headers = kwargs.get("headers") or {}
headers["User-Agent"] = USER_AGENT
if "github" in url:
if GH_API_TOKEN:
headers["Authorization"] = f"token {GH_API_TOKEN}"
headers["Accept"] = "application/vnd.github.v3+json"
r = http.request("GET", url, headers=headers, **kwargs)
kwargs["headers"] = headers

r = http.request("GET", url, **kwargs)
if is_json:
data = json.loads(r.data.decode("utf-8"))
else:
Expand Down Expand Up @@ -199,8 +196,9 @@ def config(
@click.argument("target", type=click.Path(exists=True, path_type=Path))
@click.argument("baseline-sha")
@click.argument("target-sha")
@click.argument("pr-num", type=int)
def comment_body(
baseline: Path, target: Path, baseline_sha: str, target_sha: str
baseline: Path, target: Path, baseline_sha: str, target_sha: str, pr_num: int
) -> None:
# fmt: off
cmd = [
Expand All @@ -225,45 +223,43 @@ def comment_body(
f"[**What is this?**]({DOCS_URL}) | [Workflow run]($workflow-run-url) |"
" [diff-shades documentation](https://github.com/ichard26/diff-shades#readme)"
)
print(f"[INFO]: writing half-completed comment body to {COMMENT_BODY_FILE}")
with open(COMMENT_BODY_FILE, "w", encoding="utf-8") as f:
f.write(body)
print(f"[INFO]: writing comment details to {COMMENT_FILE}")
with open(COMMENT_FILE, "w", encoding="utf-8") as f:
json.dump({"body": body, "pr-number": pr_num}, f)


@main.command("comment-details", help="Get PR comment resources from a workflow run.")
@click.argument("run-id")
def comment_details(run_id: str) -> None:
data = http_get(f"https://api.github.com/repos/{REPO}/actions/runs/{run_id}")
if data["event"] != "pull_request":
if data["event"] != "pull_request" or data["conclusion"] == "cancelled":
set_output("needs-comment", "false")
return

set_output("needs-comment", "true")
pulls = data["pull_requests"]
assert len(pulls) == 1
pr_number = pulls[0]["number"]
set_output("pr-number", str(pr_number))

jobs_data = http_get(data["jobs_url"])
assert len(jobs_data["jobs"]) == 1, "multiple jobs not supported nor tested"
job = jobs_data["jobs"][0]
jobs = http_get(data["jobs_url"])["jobs"]
assert len(jobs) == 1, "multiple jobs not supported nor tested"
job = jobs[0]
steps = {s["name"]: s["number"] for s in job["steps"]}
diff_step = steps[DIFF_STEP_NAME]
diff_url = job["html_url"] + f"#step:{diff_step}:1"

artifacts_data = http_get(data["artifacts_url"])["artifacts"]
artifacts = {a["name"]: a["archive_download_url"] for a in artifacts_data}
body_url = artifacts[COMMENT_BODY_FILE]
body_zip = BytesIO(http_get(body_url, is_json=False))
with zipfile.ZipFile(body_zip) as zfile:
with zfile.open(COMMENT_BODY_FILE) as rf:
body = rf.read().decode("utf-8")
comment_url = artifacts[COMMENT_FILE]
comment_zip = BytesIO(http_get(comment_url, is_json=False))
with zipfile.ZipFile(comment_zip) as zfile:
with zfile.open(COMMENT_FILE) as rf:
comment_data = json.loads(rf.read().decode("utf-8"))

set_output("pr-number", str(comment_data["pr-number"]))
body = comment_data["body"]
# It's more convenient to fill in these fields after the first workflow is done
# since this command can access the workflows API (doing it in the main workflow
# while it's still in progress seems impossible).
body = body.replace("$workflow-run-url", data["html_url"])
body = body.replace("$job-diff-url", diff_url)
# # https://github.community/t/set-output-truncates-multiline-strings/16852/3
# https://github.community/t/set-output-truncates-multiline-strings/16852/3
escaped = body.replace("%", "%25").replace("\n", "%0A").replace("\r", "%0D")
set_output("comment-body", escaped)

Expand Down
3 changes: 2 additions & 1 deletion src/black/parsing.py
Expand Up @@ -206,7 +206,7 @@ def stringify_ast(node: Union[ast.AST, ast3.AST], depth: int = 0) -> Iterator[st
break

try:
value = getattr(node, field)
value: object = getattr(node, field)
except AttributeError:
continue

Expand Down Expand Up @@ -237,6 +237,7 @@ def stringify_ast(node: Union[ast.AST, ast3.AST], depth: int = 0) -> Iterator[st
yield from stringify_ast(value, depth + 2)

else:
normalized: object
# Constant strings may be indented across newlines, if they are
# docstrings; fold spaces after newlines when comparing. Similarly,
# trailing and leading space may be removed.
Expand Down

0 comments on commit 9bd4134

Please sign in to comment.