Skip to content

Commit

Permalink
Update pre commit dependencies (#1530)
Browse files Browse the repository at this point in the history
* Update pre-commit dependencies

* Remove flake8-copyright as not compatible with flake8==6.0.0

Waiting for savoirfairelinux/flake8-copyright#20
to be merged

* Automatically format files with pre-commit
  • Loading branch information
vrigal committed Feb 1, 2023
1 parent f10b674 commit 82867b6
Show file tree
Hide file tree
Showing 21 changed files with 5 additions and 29 deletions.
9 changes: 4 additions & 5 deletions .pre-commit-config.yaml
Expand Up @@ -4,24 +4,23 @@ repos:
hooks:
- id: seed-isort-config
- repo: https://github.com/timothycrosley/isort
rev: 5.10.1
rev: 5.12.0
hooks:
- id: isort
- repo: https://github.com/ambv/black
rev: 22.10.0
rev: 23.1.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 5.0.4
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies:
- "flake8-coding==1.3.1"
- "flake8-copyright==0.2.3"
- "flake8-debugger==3.1.0"
- "flake8-mypy==17.8.0"
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
rev: v4.4.0
hooks:
- id: check-ast
- id: check-docstring-first
Expand Down
1 change: 0 additions & 1 deletion backend/code_review_backend/issues/api.py
Expand Up @@ -301,7 +301,6 @@ class IssueCheckHistory(CachedView, generics.ListAPIView):
pagination_class = None

def get_queryset(self):

# Count all the issues per day
queryset = (
Issue.objects.annotate(date=TruncDate("created"))
Expand Down
Expand Up @@ -63,7 +63,6 @@ def handle(self, *args, **options):
)

for task_id, report in tasks:

# Build revision & diff
revision, diff = self.build_revision_and_diff(report["revision"], task_id)

Expand Down Expand Up @@ -118,7 +117,6 @@ def load_tasks(self, environment, chunk=200):

token = None
while True:

query = {"limit": chunk}
if token is not None:
query["continuationToken"] = token
Expand All @@ -127,7 +125,6 @@ def load_tasks(self, environment, chunk=200):
)

for task in data["tasks"]:

if not task["data"].get("issues"):
continue

Expand All @@ -137,7 +134,6 @@ def load_tasks(self, environment, chunk=200):
artifact = json.load(open(path))

else:

# Download the task report
logging.info(f"Download task {task['taskId']}")
try:
Expand Down
Expand Up @@ -13,7 +13,6 @@


class Migration(migrations.Migration):

initial = True

dependencies = []
Expand Down
Expand Up @@ -37,7 +37,6 @@ def set_existing_as_new(apps, schema_editor):


class Migration(migrations.Migration):

dependencies = [("issues", "0001_initial")]

operations = [
Expand Down
Expand Up @@ -40,7 +40,6 @@ def _create_diff_repos(apps, schema_editor):


class Migration(migrations.Migration):

dependencies = [("issues", "0002_compare_issues")]

operations = [
Expand Down
Expand Up @@ -10,7 +10,6 @@


class Migration(migrations.Migration):

dependencies = [("issues", "0003_diff_repository")]

operations = [
Expand Down
Expand Up @@ -9,7 +9,6 @@


class Migration(migrations.Migration):

dependencies = [
("issues", "0004_issue_in_patch"),
]
Expand Down
Expand Up @@ -18,7 +18,6 @@ def clean_unlinked_issue(apps, schema_editor):


class Migration(migrations.Migration):

dependencies = [
("issues", "0005_rename_check_issue_analyzer_check"),
]
Expand Down
Expand Up @@ -58,7 +58,7 @@ def setUp(self):
)
]
)
for (rev, days_ago) in ((rev_1, 35), (rev_2, 15), (rev_3, 1)):
for rev, days_ago in ((rev_1, 35), (rev_2, 15), (rev_3, 1)):
rev.created = timezone.now() - timedelta(days=days_ago)
rev.save(update_fields=["created"])

Expand Down
1 change: 0 additions & 1 deletion backend/code_review_backend/issues/tests/test_issue.py
Expand Up @@ -12,7 +12,6 @@

class IssueTestCase(TestCase):
def test_publishable(self):

# A warning is not publishable
issue = Issue.objects.create(path="some/file", line=12, level=LEVEL_WARNING)
self.assertFalse(issue.publishable)
Expand Down
1 change: 0 additions & 1 deletion backend/code_review_backend/issues/tests/test_revision.py
Expand Up @@ -17,7 +17,6 @@ def setUp(self):
)

def test_phabricator_url(self):

rev = Revision.objects.create(
id=12, phid="PHID-REV-12345", repository=self.repo
)
Expand Down
1 change: 0 additions & 1 deletion bot/code_review_bot/cli.py
Expand Up @@ -63,7 +63,6 @@ def parse_cli():

@stats.timer("runtime.analysis")
def main():

args = parse_cli()
taskcluster.auth(args.taskcluster_client_id, args.taskcluster_access_token)

Expand Down
1 change: 0 additions & 1 deletion bot/code_review_bot/revisions.py
Expand Up @@ -90,7 +90,6 @@ def __init__(
url=None,
patch=None,
):

# Identification
self.id = id
self.phid = phid
Expand Down
1 change: 0 additions & 1 deletion bot/code_review_bot/tasks/base.py
Expand Up @@ -74,7 +74,6 @@ def build_from_route(cls, index_service, queue_service):
return cls(task_id, task_status)

def load_artifacts(self, queue_service):

# Process only the supported final states
# as some tasks do not always have relevant output
if self.state in self.skipped_states:
Expand Down
1 change: 0 additions & 1 deletion bot/code_review_bot/workflow.py
Expand Up @@ -382,7 +382,6 @@ def _in_group(dep_id):
# Report a problem when tasks in erroneous state are found
# but no issue or patch has been processed by the bot
if task.state == "failed" and not task_issues and not task_patches:

# Skip task that are listed as ignorable (we try to avoid unnecessary spam)
if task.name in self.task_failures_ignored:
logger.warning(
Expand Down
1 change: 0 additions & 1 deletion bot/tools/fix_missing.py
Expand Up @@ -124,7 +124,6 @@ def list_pushes(known_revisions, updates, min_date, max_date):


def find_task(push_id):

# Find the task ids from Treeherder
resp = requests.get(
TREEHERDER_JOBS_URL, {"push_id": push_id}, headers=TREEHERDER_HEADERS
Expand Down
2 changes: 0 additions & 2 deletions events/code_review_events/workflow.py
Expand Up @@ -138,7 +138,6 @@ async def process_build(self, build):
self.update_state(build)

if build.state == PhabricatorBuildState.Public:

# Check if the author is not blacklisted
if self.is_blacklisted(build.revision):
return
Expand Down Expand Up @@ -375,7 +374,6 @@ def __init__(self, cache_root):

# Run webserver & pulse on web dyno or single instance
if not heroku.in_dyno() or heroku.in_web_dyno():

# Create web server
self.webserver = WebServer(QUEUE_WEB_BUILDS)
self.webserver.register(self.bus)
Expand Down
1 change: 0 additions & 1 deletion events/tests/conftest.py
Expand Up @@ -118,7 +118,6 @@ def _user_search(request):
return (200, json_headers, _response(mock_name))

with responses.RequestsMock(assert_all_requests_are_fired=False) as resp:

resp.add(
responses.POST,
"http://phabricator.test/api/user.whoami",
Expand Down
1 change: 0 additions & 1 deletion integration/tests/test_workflow.py
Expand Up @@ -7,7 +7,6 @@


def test_publish(monkeypatch, workflow, tmpdir, mock_taskcluster):

# Fake repo
repo_dir = tmpdir.realpath()
hg = tmpdir.mkdir(".hg")
Expand Down
1 change: 0 additions & 1 deletion tools/code_review_tools/log.py
Expand Up @@ -129,7 +129,6 @@ def init_logger(
PAPERTRAIL_PORT=None,
SENTRY_DSN=None,
):

if not channel:
channel = os.environ.get("APP_CHANNEL")

Expand Down

0 comments on commit 82867b6

Please sign in to comment.