Skip to content

Commit

Permalink
chore: Enable ISC Ruff checks (#1499)
Browse files Browse the repository at this point in the history
> This is a plugin for the Python code-checking tool Flake8 to encourage correct string literal concatenation.
>
> It looks for style problems like implicitly concatenated string literals on the same line (which can be introduced by the code-formatting tool Black), or unnecessary plus operators for explicit string literal concatenation.
  • Loading branch information
edgarrmondragon committed Mar 15, 2023
1 parent b1067e8 commit 86e3b37
Show file tree
Hide file tree
Showing 10 changed files with 18 additions and 15 deletions.
1 change: 1 addition & 0 deletions pyproject.toml
Expand Up @@ -230,6 +230,7 @@ select = [
"ANN", # flake8-annotations
"COM", # flake8-commas
"T10", # flake8-debugger
"ISC", # flake8-implicit-str-concat
"ICN", # flake8-import-conventions
"PIE", # flake8-pie
"PT", # flake8-pytest-style
Expand Down
2 changes: 1 addition & 1 deletion samples/sample_tap_gitlab/gitlab_graphql_streams.py
Expand Up @@ -55,4 +55,4 @@ class GraphQLProjectsStream(GitlabGraphQLStream):
@property
def query(self) -> str:
"""Return dynamic GraphQL query."""
return f"project(fullPath: {self.config['project_id']}" " { name }"
return f"project(fullPath: {self.config['project_id']} {{ name }}"
2 changes: 1 addition & 1 deletion singer_sdk/_singerlib/messages.py
Expand Up @@ -112,7 +112,7 @@ def __post_init__(self) -> None:
if self.time_extracted and not self.time_extracted.tzinfo:
raise ValueError(
"'time_extracted' must be either None "
+ "or an aware datetime (with a time zone)",
"or an aware datetime (with a time zone)",
)

if self.time_extracted:
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/helpers/_state.py
Expand Up @@ -260,7 +260,7 @@ def log_sort_error(
partition_record_count: int,
) -> None:
"""Log a sort error."""
msg = f"Sorting error detected in '{stream_name}'." f"on record #{record_count}. "
msg = f"Sorting error detected in '{stream_name}' on record #{record_count}. "
if partition_record_count != record_count:
msg += (
f"Record was partition record "
Expand Down
8 changes: 5 additions & 3 deletions singer_sdk/helpers/capabilities.py
Expand Up @@ -23,9 +23,11 @@
Property(
"stream_maps",
ObjectType(),
description="Config object for stream maps capability. "
+ "For more information check out "
+ "[Stream Maps](https://sdk.meltano.com/en/latest/stream_maps.html).",
description=(
"Config object for stream maps capability. "
"For more information check out "
"[Stream Maps](https://sdk.meltano.com/en/latest/stream_maps.html)."
),
),
Property(
"stream_map_config",
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/sinks/sql.py
Expand Up @@ -190,7 +190,7 @@ def _check_conformed_names_not_duplicated(
if duplicates:
raise ConformedNameClashException(
"Duplicate stream properties produced when "
+ f"conforming property names: {duplicates}",
f"conforming property names: {duplicates}",
)

def conform_schema(self, schema: dict) -> dict:
Expand Down
4 changes: 2 additions & 2 deletions singer_sdk/streams/rest.py
Expand Up @@ -499,8 +499,8 @@ def get_new_paginator(self) -> BaseAPIPaginator:
if hasattr(self, "get_next_page_token"):
warn(
"`RESTStream.get_next_page_token` is deprecated and will not be used "
+ "in a future version of the Meltano Singer SDK. "
+ "Override `RESTStream.get_new_paginator` instead.",
"in a future version of the Meltano Singer SDK. "
"Override `RESTStream.get_new_paginator` instead.",
DeprecationWarning,
)
return LegacyStreamPaginator(self) # type: ignore
Expand Down
4 changes: 2 additions & 2 deletions singer_sdk/tap_base.py
Expand Up @@ -413,8 +413,8 @@ def cli(cls) -> Callable:
default=CliTestOptionValue.Disabled,
help=(
"Use --test to sync a single record for each stream. "
+ "Use --test=schema to test schema output without syncing "
+ "records."
"Use --test=schema to test schema output without syncing "
"records."
),
)
@click.option(
Expand Down
4 changes: 2 additions & 2 deletions singer_sdk/typing.py
Expand Up @@ -445,8 +445,8 @@ def type_dict(self) -> dict: # type: ignore # OK: @classproperty vs @property
if isinstance(wrapped, type) and not isinstance(wrapped.type_dict, Mapping):
raise ValueError(
f"Type dict for {wrapped} is not defined. "
+ "Try instantiating it with a nested type such as "
+ f"{wrapped.__name__}(StringType).",
"Try instantiating it with a nested type such as "
f"{wrapped.__name__}(StringType).",
)

return cast(dict, wrapped.type_dict)
Expand Down
4 changes: 2 additions & 2 deletions tests/core/test_jsonschema_helpers.py
Expand Up @@ -457,7 +457,7 @@ def test_wrapped_type_dict():
ValueError,
match=re.escape(
"Type dict for <class 'singer_sdk.typing.ArrayType'> is not defined. "
+ "Try instantiating it with a nested type such as ArrayType(StringType).",
"Try instantiating it with a nested type such as ArrayType(StringType).",
),
):
Property("bad_array_prop", ArrayType).to_dict()
Expand All @@ -466,7 +466,7 @@ def test_wrapped_type_dict():
ValueError,
match=re.escape(
"Type dict for <class 'singer_sdk.typing.ObjectType'> is not defined. "
+ "Try instantiating it with a nested type such as ObjectType(StringType).",
"Try instantiating it with a nested type such as ObjectType(StringType).",
),
):
Property("bad_object_prop", ObjectType).to_dict()
Expand Down

0 comments on commit 86e3b37

Please sign in to comment.