Skip to content

Commit

Permalink
Partially resolve builtin variable shadowing (A001)
Browse files Browse the repository at this point in the history
  • Loading branch information
Taragolis committed Apr 30, 2024
1 parent 8a33c33 commit b449b3d
Show file tree
Hide file tree
Showing 23 changed files with 162 additions and 144 deletions.
4 changes: 2 additions & 2 deletions dev/airflow-license
Expand Up @@ -76,8 +76,8 @@ if __name__ == "__main__":

for notice in notices:
notice = notice[0]
license = parse_license_file(notice[1])
print(f"{notice[1]:<30}|{notice[2][:50]:<50}||{notice[0]:<20}||{license:<10}")
license_type = parse_license_file(notice[1])
print(f"{notice[1]:<30}|{notice[2][:50]:<50}||{notice[0]:<20}||{license_type:<10}")

file_count = len(os.listdir("../licenses"))
print(f"Defined licenses: {len(notices)} Files found: {file_count}")
Expand Up @@ -958,7 +958,7 @@ def down(preserve_volumes: bool, cleanup_mypy_cache: bool, project_name: str):
@option_verbose
@option_dry_run
@click.argument("exec_args", nargs=-1, type=click.UNPROCESSED)
def exec(exec_args: tuple):
def exec_(exec_args: tuple):
perform_environment_checks()
container_running = find_airflow_container()
if container_running:
Expand Down
Expand Up @@ -2788,9 +2788,9 @@ def _copy_selected_sources_from_tmp_directory_to_clients_python():
f"[info]Copying selected sources: {GENERATED_CLIENT_DIRECTORIES_TO_COPY} from "
f"{PYTHON_CLIENT_TMP_DIR} to {PYTHON_CLIENT_DIR_PATH}[/]"
)
for dir in GENERATED_CLIENT_DIRECTORIES_TO_COPY:
source_dir = PYTHON_CLIENT_TMP_DIR / dir
target_dir = PYTHON_CLIENT_DIR_PATH / dir
for directory in GENERATED_CLIENT_DIRECTORIES_TO_COPY:
source_dir = PYTHON_CLIENT_TMP_DIR / directory
target_dir = PYTHON_CLIENT_DIR_PATH / directory
get_console().print(f"[info] Copying generated sources from {source_dir} to {target_dir}[/]")
shutil.rmtree(target_dir, ignore_errors=True)
shutil.copytree(source_dir, target_dir)
Expand Down Expand Up @@ -2926,9 +2926,9 @@ def prepare_python_client(
get_console().print(
f"[info]Copying generated client from {PYTHON_CLIENT_DIR_PATH} to {python_client_repo}[/]"
)
for dir in GENERATED_CLIENT_DIRECTORIES_TO_COPY:
source_dir = PYTHON_CLIENT_DIR_PATH / dir
target_dir = python_client_repo / dir
for directory in GENERATED_CLIENT_DIRECTORIES_TO_COPY:
source_dir = PYTHON_CLIENT_DIR_PATH / directory
target_dir = python_client_repo / directory
get_console().print(f"[info] Copying {source_dir} to {target_dir}[/]")
shutil.rmtree(target_dir, ignore_errors=True)
shutil.copytree(source_dir, target_dir)
Expand Down
4 changes: 2 additions & 2 deletions dev/breeze/src/airflow_breeze/params/doc_build_params.py
Expand Up @@ -49,6 +49,6 @@ def args_doc_builder(self) -> list[str]:
for filter_from_short_doc in get_long_package_names(self.short_doc_packages):
doc_args.extend(["--package-filter", filter_from_short_doc])
if self.package_filter:
for filter in self.package_filter:
doc_args.extend(["--package-filter", filter])
for filter_ in self.package_filter:
doc_args.extend(["--package-filter", filter_])
return doc_args
12 changes: 8 additions & 4 deletions dev/system_tests/update_issue_status.py
Expand Up @@ -209,9 +209,11 @@ def update_issue_status(
if not_completed_closed_issues:
console.print("[yellow] Issues that are not completed and should be opened:[/]\n")
for issue in not_completed_closed_issues:
all = per_issue_num_all[issue.id]
all_ = per_issue_num_all[issue.id]
done = per_issue_num_done[issue.id]
console.print(f" * [[yellow]{issue.title}[/]]({issue.html_url}): {done}/{all} : {done / all:.2%}")
console.print(
f" * [[yellow]{issue.title}[/]]({issue.html_url}): {done}/{all_} : {done / all_:.2%}"
)
console.print()
if completed_open_issues:
console.print("[yellow] Issues that are completed and should be closed:[/]\n")
Expand All @@ -221,9 +223,11 @@ def update_issue_status(
if not_completed_opened_issues:
console.print("[yellow] Issues that are not completed and are still opened:[/]\n")
for issue in not_completed_opened_issues:
all = per_issue_num_all[issue.id]
all_ = per_issue_num_all[issue.id]
done = per_issue_num_done[issue.id]
console.print(f" * [[yellow]{issue.title}[/]]({issue.html_url}): {done}/{all} : {done / all:.2%}")
console.print(
f" * [[yellow]{issue.title}[/]]({issue.html_url}): {done}/{all_} : {done / all_:.2%}"
)
console.print()
if completed_closed_issues:
console.print("[green] Issues that are completed and are already closed:[/]\n")
Expand Down
8 changes: 4 additions & 4 deletions hatch_build.py
Expand Up @@ -915,16 +915,16 @@ def _process_all_built_in_extras(self, version: str) -> None:
:param version: "standard" or "editable" build.
"""
for dict, _ in ALL_DYNAMIC_EXTRA_DICTS:
for extra, deps in dict.items():
for d, _ in ALL_DYNAMIC_EXTRA_DICTS:
for extra, deps in d.items():
self.all_devel_extras.add(extra)
self._add_devel_ci_dependencies(deps, python_exclusion="")
if dict not in [DEPRECATED_EXTRAS, DEVEL_EXTRAS, DOC_EXTRAS]:
if d not in [DEPRECATED_EXTRAS, DEVEL_EXTRAS, DOC_EXTRAS]:
# do not add deprecated extras to "all" extras
self.all_non_devel_extras.add(extra)
if version == "standard":
# for wheel builds we skip devel and doc extras
if dict not in [DEVEL_EXTRAS, DOC_EXTRAS]:
if d not in [DEVEL_EXTRAS, DOC_EXTRAS]:
self.optional_dependencies[extra] = deps
else:
# for editable builds we add all extras
Expand Down
10 changes: 10 additions & 0 deletions pyproject.toml
Expand Up @@ -289,6 +289,7 @@ extend-select = [
"PGH004", # Use specific rule codes when using noqa
"PGH005", # Invalid unittest.mock.Mock methods/attributes/properties
"S101", # Checks use `assert` outside the test cases, test cases should be added into the exclusions
"A001", # Checks for variable (and function) assignments that use the same name as a builtin.
"B004", # Checks for use of hasattr(x, "__call__") and replaces it with callable(x)
"B006", # Checks for uses of mutable objects as function argument defaults.
"B007", # Checks for unused variables in the loop
Expand Down Expand Up @@ -392,6 +393,15 @@ combine-as-imports = true
# https://github.com/apache/airflow/issues/39252
"airflow/providers/amazon/aws/hooks/eks.py" = ["W605"]

# Airflow Core / Providers modules which still shadowing a Python builtin variables/functions/classes
"airflow/api_connexion/endpoints/task_instance_endpoint.py" = ["A001"]
"airflow/cli/cli_config.py" = ["A001"]
"airflow/configuration.py" = ["A001"]
"airflow/models/dag.py" = ["A001"]
"airflow/providers/amazon/aws/hooks/s3.py" = ["A001"]
"airflow/providers/databricks/hooks/databricks.py" = ["A001"]
"airflow/providers/google/cloud/operators/bigquery.py" = ["A001"]

[tool.ruff.lint.flake8-tidy-imports]
# Disallow all relative imports.
ban-relative-imports = "all"
Expand Down
4 changes: 2 additions & 2 deletions scripts/ci/pre_commit/check_init_in_tests.py
Expand Up @@ -41,10 +41,10 @@

if __name__ == "__main__":
for dirname, sub_dirs, _ in os.walk(ROOT_DIR / "tests"):
dir = Path(dirname)
directory = Path(dirname)
sub_dirs[:] = [subdir for subdir in sub_dirs if subdir not in {"__pycache__", "test_logs"}]
for sub_dir in sub_dirs:
init_py_path = dir / sub_dir / "__init__.py"
init_py_path = directory / sub_dir / "__init__.py"
if not init_py_path.exists():
init_py_path.touch()
console.print(f"[yellow] Created {init_py_path}[/]")
Expand Down
Expand Up @@ -36,8 +36,8 @@ def check_dir_init_file(provider_files: list[str]) -> None:
missing_init_dirs.append(path)

if missing_init_dirs:
with open(os.path.join(ROOT_DIR, "scripts/ci/license-templates/LICENSE.txt")) as license:
license_txt = license.readlines()
with open(os.path.join(ROOT_DIR, "scripts/ci/license-templates/LICENSE.txt")) as fp:
license_txt = fp.readlines()
prefixed_licensed_txt = [f"# {line}" if line != "\n" else "#\n" for line in license_txt]

for missing_init_dir in missing_init_dirs:
Expand Down
10 changes: 5 additions & 5 deletions scripts/ci/pre_commit/www_lint.py
Expand Up @@ -27,8 +27,8 @@
)

if __name__ == "__main__":
dir = Path("airflow") / "www"
subprocess.check_call(["yarn", "--frozen-lockfile", "--non-interactive"], cwd=dir)
subprocess.check_call(["yarn", "run", "generate-api-types"], cwd=dir)
subprocess.check_call(["yarn", "run", "format"], cwd=dir)
subprocess.check_call(["yarn", "run", "lint:fix"], cwd=dir)
www_dir = Path("airflow") / "www"
subprocess.check_call(["yarn", "--frozen-lockfile", "--non-interactive"], cwd=www_dir)
subprocess.check_call(["yarn", "run", "generate-api-types"], cwd=www_dir)
subprocess.check_call(["yarn", "run", "format"], cwd=www_dir)
subprocess.check_call(["yarn", "run", "lint:fix"], cwd=www_dir)
18 changes: 9 additions & 9 deletions tests/callbacks/test_callback_requests.py
Expand Up @@ -37,7 +37,7 @@

class TestCallbackRequest:
@pytest.mark.parametrize(
"input,request_class",
"callback_request, request_class",
[
(CallbackRequest(full_filepath="filepath", msg="task_failure"), CallbackRequest),
(
Expand All @@ -64,8 +64,8 @@ class TestCallbackRequest:
),
],
)
def test_from_json(self, input, request_class):
if input is None:
def test_from_json(self, callback_request, request_class):
if callback_request is None:
ti = TaskInstance(
task=BashOperator(
task_id="test", bash_command="true", dag=DAG(dag_id="id"), start_date=datetime.now()
Expand All @@ -74,31 +74,31 @@ def test_from_json(self, input, request_class):
state=State.RUNNING,
)

input = TaskCallbackRequest(
callback_request = TaskCallbackRequest(
full_filepath="filepath",
simple_task_instance=SimpleTaskInstance.from_ti(ti=ti),
processor_subdir="/test_dir",
is_failure_callback=True,
)
json_str = input.to_json()
json_str = callback_request.to_json()
result = request_class.from_json(json_str=json_str)
assert result == input
assert result == callback_request

def test_taskcallback_to_json_with_start_date_and_end_date(self, session, create_task_instance):
ti = create_task_instance()
ti.start_date = timezone.utcnow()
ti.end_date = timezone.utcnow()
session.merge(ti)
session.flush()
input = TaskCallbackRequest(
callback_request = TaskCallbackRequest(
full_filepath="filepath",
simple_task_instance=SimpleTaskInstance.from_ti(ti),
processor_subdir="/test_dir",
is_failure_callback=True,
)
json_str = input.to_json()
json_str = callback_request.to_json()
result = TaskCallbackRequest.from_json(json_str)
assert input == result
assert callback_request == result

def test_simple_ti_roundtrip_exec_config_pod(self):
"""A callback request including a TI with an exec config with a V1Pod should safely roundtrip."""
Expand Down
32 changes: 16 additions & 16 deletions tests/providers/amazon/aws/operators/test_ec2.py
Expand Up @@ -84,8 +84,8 @@ def test_create_multiple_instances(self):
instance_ids = create_instances.execute(None)
assert len(instance_ids) == 5

for id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=id) == "running"
for instance_id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=instance_id) == "running"


class TestEC2TerminateInstanceOperator(BaseEc2TestClass):
Expand Down Expand Up @@ -130,15 +130,15 @@ def test_terminate_multiple_instances(self):
instance_ids = create_instances.execute(None)
assert len(instance_ids) == 5

for id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=id) == "running"
for instance_id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=instance_id) == "running"

terminate_instance = EC2TerminateInstanceOperator(
task_id="test_terminate_instance", instance_ids=instance_ids
)
terminate_instance.execute(None)
for id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=id) == "terminated"
for instance_id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=instance_id) == "terminated"


class TestEC2StartInstanceOperator(BaseEc2TestClass):
Expand Down Expand Up @@ -253,15 +253,15 @@ def test_hibernate_multiple_instances(self):
instance_ids = create_instances.execute(None)
assert len(instance_ids) == 5

for id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=id) == "running"
for instance_id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=instance_id) == "running"

hibernate_instance = EC2HibernateInstanceOperator(
task_id="test_hibernate_instance", instance_ids=instance_ids
)
hibernate_instance.execute(None)
for id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=id) == "stopped"
for instance_id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=instance_id) == "stopped"

@mock_aws
def test_cannot_hibernate_instance(self):
Expand Down Expand Up @@ -319,8 +319,8 @@ def test_cannot_hibernate_some_instances(self):
hibernate_test.execute(None)

# assert instance state is running
for id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=id) == "running"
for instance_id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=instance_id) == "running"


class TestEC2RebootInstanceOperator(BaseEc2TestClass):
Expand Down Expand Up @@ -363,12 +363,12 @@ def test_reboot_multiple_instances(self):
instance_ids = create_instances.execute(None)
assert len(instance_ids) == 5

for id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=id) == "running"
for instance_id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=instance_id) == "running"

terminate_instance = EC2RebootInstanceOperator(
task_id="test_reboot_instance", instance_ids=instance_ids
)
terminate_instance.execute(None)
for id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=id) == "running"
for instance_id in instance_ids:
assert ec2_hook.get_instance_state(instance_id=instance_id) == "running"
4 changes: 2 additions & 2 deletions tests/providers/amazon/aws/operators/test_ecs.py
Expand Up @@ -309,8 +309,8 @@ def test_execute_without_failures(
assert self.ecs.arn == f"arn:aws:ecs:us-east-1:012345678910:task/{TASK_ID}"

def test_task_id_parsing(self):
id = EcsRunTaskOperator._get_ecs_task_id(f"arn:aws:ecs:us-east-1:012345678910:task/{TASK_ID}")
assert id == TASK_ID
task_id = EcsRunTaskOperator._get_ecs_task_id(f"arn:aws:ecs:us-east-1:012345678910:task/{TASK_ID}")
assert task_id == TASK_ID

@mock.patch.object(EcsBaseOperator, "client")
def test_execute_with_failures(self, client_mock):
Expand Down

0 comments on commit b449b3d

Please sign in to comment.