Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Bump pre-commit hooks to fix isort issue #531

Merged
merged 1 commit into from
Feb 1, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
12 changes: 8 additions & 4 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,12 @@ max-line-length = 125

# E203,W503: incompatibilities with black
# E722: also signaled by pylint (disabling here globally and on case-by-case basis with pylint)
# E501: line length
# E203: whitespace before ':'
# W503: line break before binary operator
# E722: do not use bare 'except'
ignore =
E501, # line length
E203, # whitespace before ':'
W503, # line break before binary operator
E722, # do not use bare 'except'
E501,
E203,
W503,
E722,
6 changes: 3 additions & 3 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ jobs:
steps:
- uses: actions/checkout@v3

- name: Set up Python 3.7
- name: Set up Python 3.11
uses: actions/setup-python@v4
with:
python-version: 3.7
python-version: 3.11

- name: Install dependencies
run: pip install -r requirements-dev.txt
run: pip install 'pre-commit>=2.2.0'

# required for pylint
- name: Generate version.py
Expand Down
17 changes: 6 additions & 11 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
minimum_pre_commit_version: 2.9.2
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.1.0
rev: v4.4.0
hooks:
- id: trailing-whitespace
exclude: ^vendor/|^tests/.*/fixtures/.*|^tests/integration/test_data/.*
Expand All @@ -11,18 +11,18 @@ repos:
- id: debug-statements

- repo: https://github.com/PyCQA/isort
rev: 5.10.1
rev: 5.12.0
hooks:
- id: isort
name: isort (python)

- repo: https://github.com/psf/black
rev: 22.3.0
rev: 22.12.0
hooks:
- id: black

- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
rev: 6.0.0
hooks:
- id: flake8

Expand All @@ -33,16 +33,11 @@ repos:
name: Mypy Karapace
pass_filenames: false


# https://pre-commit.com/#repository-local-hooks
- repo: local
- repo: https://github.com/PyCQA/pylint
rev: v2.15.10
hooks:
- id: pylint
name: pylint
entry: pylint
language: system
types: [python]
files: \.py$
exclude: ^vendor/
args:
- --rcfile=.pylintrc
10 changes: 8 additions & 2 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ enable=
useless-suppression,

disable=
bad-continuation,
duplicate-code,
fixme,
import-outside-toplevel,
Expand All @@ -22,8 +21,15 @@ disable=
too-many-nested-blocks,
too-many-public-methods,
too-many-statements,
too-public-methods,
wrong-import-order,
import-error,
consider-using-f-string,
use-implicit-booleaness-not-comparison,
unspecified-encoding,
no-name-in-module,
use-list-literal,
use-dict-literal,
Comment on lines +25 to +31
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

  • import-error, no-name-in-module: pylint is no longer installed with all project dependencies, checks like this is better left to mypy + runtime tests we already have in place.
  • consider-using-f-string, use-list-literal, use-dict-literal: Covered by pyupgrade.
  • unspecified-encoding: Covered by pyupgrade, but with different resolution.
  • use-implicit-booleaness-not-comparison: I think this is overly opinionated. foo == [] is fine and doesn't need to be outlawed.



[FORMAT]
max-line-length=125
Expand Down
2 changes: 1 addition & 1 deletion karapace/compatibility/protobuf/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def check_protobuf_schema_compatibility(reader: ProtobufSchema, writer: Protobuf
messages = set()
for record in result.result:
if not record.modification.is_compatible():
incompatibilities.append(record.modification.__str__())
incompatibilities.append(str(record.modification))
locations.add(record.path)
messages.add(record.message)

Expand Down
2 changes: 1 addition & 1 deletion karapace/kafka_rest_apis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -715,7 +715,7 @@ async def topic_publish(self, topic: str, content_type: str, *, request: HTTPReq
await self.publish(topic, None, content_type, request)

@staticmethod
def validate_partition_id(partition_id: str, content_type: str) -> int: # pylint: disable=inconsistent-return-statements
def validate_partition_id(partition_id: str, content_type: str) -> int:
try:
return int(partition_id)
except ValueError:
Expand Down
2 changes: 0 additions & 2 deletions karapace/protobuf/protobuf_to_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

Timestamp_type_name = "Timestamp"

# pylint: disable=no-member


def datetime_to_timestamp(dt):
ts = Timestamp()
Expand Down
3 changes: 1 addition & 2 deletions karapace/protobuf/reserved_element.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@ def to_schema(self) -> str:
append_documentation(result, self.documentation)
result.append("reserved ")

for index in range(len(self.values)):
value = self.values[index]
for index, value in enumerate(self.values):
if index > 0:
result.append(", ")

Expand Down
4 changes: 2 additions & 2 deletions karapace/rapu.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import aiohttp.web
import aiohttp.web_exceptions
import asyncio
import cgi
import cgi # pylint: disable=deprecated-module
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I created a separate issue for this: #533

import hashlib
import json
import logging
Expand Down Expand Up @@ -192,7 +192,7 @@ def cors_and_server_headers_for_request(*, request, origin="*"): # pylint: disa
"Server": SERVER_NAME,
}

def check_rest_headers(self, request: HTTPRequest) -> dict: # pylint:disable=inconsistent-return-statements
def check_rest_headers(self, request: HTTPRequest) -> dict:
method = request.method
default_content = "application/vnd.kafka.json.v2+json"
default_accept = "*/*"
Expand Down
15 changes: 12 additions & 3 deletions karapace/schema_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,13 +194,22 @@ def get_schema_id(self, new_schema: TypedSchema) -> int:
return self.global_schema_id

def get_schema_id_if_exists(self, *, subject: Subject, schema: TypedSchema) -> Optional[SchemaId]:
return self._hash_to_schema_id_on_subject.get(hash((subject, schema.__str__())), None)
return self._hash_to_schema_id_on_subject.get(
hash((subject, str(schema))),
None,
)

def _set_schema_id_on_subject(self, *, subject: Subject, schema: TypedSchema, schema_id: SchemaId) -> None:
self._hash_to_schema_id_on_subject.setdefault(hash((subject, schema.__str__())), schema_id)
self._hash_to_schema_id_on_subject.setdefault(
hash((subject, str(schema))),
schema_id,
)

def _delete_from_schema_id_on_subject(self, *, subject: Subject, schema: TypedSchema) -> None:
self._hash_to_schema_id_on_subject.pop(hash((subject, schema.__str__())), None)
self._hash_to_schema_id_on_subject.pop(
hash((subject, str(schema))),
None,
)

def close(self) -> None:
LOG.info("Closing schema_reader")
Expand Down
2 changes: 1 addition & 1 deletion karapace/schema_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,7 @@ async def write_new_schema_local(
subject,
schema_id,
version,
new_schema.__str__(),
str(new_schema),
schema_id,
)
else:
Expand Down
2 changes: 1 addition & 1 deletion karapace/sentry/sentry_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from typing import Dict, Optional

# The Sentry SDK is optional, omit pylint import error
import sentry_sdk # pylint: disable=import-error
import sentry_sdk


class SentryClient(SentryClientAPI):
Expand Down
8 changes: 4 additions & 4 deletions karapace/serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ async def get_schema_for_subject(self, subject: str) -> TypedSchema:
assert self.registry_client, "must not call this method after the object is closed."
schema_id, schema = await self.registry_client.get_latest_schema(subject)
async with self.state_lock:
schema_ser = schema.__str__()
schema_ser = str(schema)
self.schemas_to_ids[schema_ser] = schema_id
self.ids_to_schemas[schema_id] = schema
return schema
Expand All @@ -177,7 +177,7 @@ async def get_id_for_schema(self, schema: str, subject: str, schema_type: Schema
schema_typed = ParsedTypedSchema.parse(schema_type, schema)
except InvalidSchema as e:
raise InvalidPayload(f"Schema string {schema} is invalid") from e
schema_ser = schema_typed.__str__()
schema_ser = str(schema_typed)
if schema_ser in self.schemas_to_ids:
return self.schemas_to_ids[schema_ser]
schema_id = await self.registry_client.post_new_schema(subject, schema_typed)
Expand All @@ -191,14 +191,14 @@ async def get_schema_for_id(self, schema_id: int) -> TypedSchema:
if schema_id in self.ids_to_schemas:
return self.ids_to_schemas[schema_id]
schema_typed = await self.registry_client.get_schema_for_id(schema_id)
schema_ser = schema_typed.__str__()
schema_ser = str(schema_typed)
async with self.state_lock:
self.schemas_to_ids[schema_ser] = schema_id
self.ids_to_schemas[schema_id] = schema_typed
return schema_typed

async def serialize(self, schema: TypedSchema, value: dict) -> bytes:
schema_id = self.schemas_to_ids[schema.__str__()]
schema_id = self.schemas_to_ids[str(schema)]
with io.BytesIO() as bio:
bio.write(struct.pack(HEADER_FORMAT, START_BYTE, schema_id))
try:
Expand Down
2 changes: 1 addition & 1 deletion karapace/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def default_json_serialization(obj: MappingProxyType) -> dict:
...


def default_json_serialization( # pylint: disable=inconsistent-return-statements
def default_json_serialization(
obj: Union[datetime, timedelta, Decimal, MappingProxyType],
) -> Union[str, float, dict]:
if isinstance(obj, datetime):
Expand Down
9 changes: 1 addition & 8 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,3 @@
# linters and formatters
isort==5.10.1
pylint==2.7.4
flake8==4.0.1
mypy==0.942
black==22.3.0

# testing
pytest==6.2.5
pytest-xdist[psutil]==2.2.1
Expand All @@ -13,7 +6,7 @@ pdbpp==0.10.2
psutil==5.9.0
requests==2.27.1

# workflow
# linting
pre-commit>=2.2.0

# performance test
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/test_karapace.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def test_regression_server_must_exit_on_exception(
errfile = stack.enter_context((tmp_path / "karapace.err").open("w"))
config_path.write_text(json.dumps(config))
sock.bind(("127.0.0.1", port))
process = Popen(
process = Popen( # pylint: disable=consider-using-with
args=["python", "-m", "karapace.karapace_all", str(config_path)],
stdout=logfile,
stderr=errfile,
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/utils/kafka_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,5 +192,5 @@ def configure_and_start_kafka(
),
)
env: Dict[bytes, bytes] = {}
proc = Popen(kafka_cmd, env=env)
proc = Popen(kafka_cmd, env=env) # pylint: disable=consider-using-with
aiven-anton marked this conversation as resolved.
Show resolved Hide resolved
return proc
2 changes: 1 addition & 1 deletion tests/integration/utils/zookeeper.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,5 +59,5 @@ def configure_and_start_zk(config: ZKConfig, kafka_description: KafkaDescription
kafka_description,
)
)
proc = Popen(java_args, env=env)
proc = Popen(java_args, env=env) # pylint: disable=consider-using-with
return proc