diff --git a/.dockerignore b/.dockerignore index 6e42e741fb40e..d10cfbcaae575 100644 --- a/.dockerignore +++ b/.dockerignore @@ -51,8 +51,6 @@ !.rat-excludes !.flake8 !.dockerignore -!pylintrc -!pylintrc-tests !pytest.ini !CHANGELOG.txt !LICENSE diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index 7d97c2f6eafce..d5f76329512a8 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -203,7 +203,7 @@ firstPRWelcomeComment: > Here are some useful points: - - Pay attention to the quality of your code (flake8, pylint and type annotations). Our [pre-commits]( + - Pay attention to the quality of your code (flake8, mypy and type annotations). Our [pre-commits]( https://github.com/apache/airflow/blob/main/STATIC_CODE_CHECKS.rst#prerequisites-for-pre-commit-hooks) will help you with that. diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a3144f4e19325..569dc17fc31fa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -308,7 +308,7 @@ jobs: needs: [build-info, ci-images] env: RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - SKIP: "pylint,identity" + SKIP: "identity" MOUNT_SELECTED_LOCAL_SOURCES: "true" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} @@ -338,20 +338,20 @@ jobs: ${{ hashFiles('setup.py', 'setup.cfg') }}" restore-keys: "\ pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-" - - name: "Cache pre-commit envs: no-pylint" + - name: "Cache pre-commit envs" uses: actions/cache@v2 with: path: ~/.cache/pre-commit - key: "pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}}-\ + key: "pre-commit-${{steps.host-python-version.outputs.host-python-version}}-\ ${{ hashFiles('.pre-commit-config.yaml') }}" - restore-keys: pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}} + restore-keys: pre-commit-${{steps.host-python-version.outputs.host-python-version}} - name: "Cache eslint" uses: actions/cache@v2 with: path: 'airflow/ui/node_modules' key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/yarn.lock') }} - - name: "Static checks: except pylint" + - name: "Static checks" run: ./scripts/ci/static_checks/run_static_checks.sh env: VERBOSE: false @@ -366,7 +366,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" needs: [build-info] env: RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - SKIP: "build,mypy,flake8,pylint,bats-in-container-tests,identity" + SKIP: "build,mypy,flake8,bats-in-container-tests,identity" MOUNT_SELECTED_LOCAL_SOURCES: "true" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} if: needs.build-info.outputs.basic-checks-only == 'true' @@ -398,68 +398,17 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" ${{ hashFiles('setup.py', 'setup.cfg') }}" restore-keys: "\ pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-" - - name: "Cache pre-commit envs: no-pylint" + - name: "Cache pre-commit envs" uses: actions/cache@v2 with: path: ~/.cache/pre-commit - key: "pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}}-\ + key: "pre-commit-basic-${{steps.host-python-version.outputs.host-python-version}}-\ ${{ hashFiles('.pre-commit-config.yaml') }}" - restore-keys: pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}} + restore-keys: pre-commit-basic-${{steps.host-python-version.outputs.host-python-version}} - name: "Static checks: basic checks only" run: ./scripts/ci/static_checks/run_basic_static_checks.sh "${{ github.sha }}" env: VERBOSE: false - - static-checks-pylint: - timeout-minutes: 60 - name: "Pylint" - runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} - needs: [build-info, ci-images] - if: needs.build-info.outputs.basic-checks-only == 'false' - env: - RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} - # We want to make sure we have latest sources as only in_container scripts are added - # to the image but we want to static-check all of them - MOUNT_SELECTED_LOCAL_SOURCES: "true" - PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} - GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 - with: - persist-credentials: false - - name: "Setup python" - uses: actions/setup-python@v2 - with: - python-version: ${{needs.build-info.outputs.defaultPythonVersion}} - - name: "Free space" - run: ./scripts/ci/tools/ci_free_space_on_ci.sh - - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" - run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - - name: "Get Python version" - run: "echo \"::set-output name=host-python-version::$(python -c - 'import platform; print(platform.python_version())')\"" - id: host-python-version - - name: "Cache pre-commit local-installation" - uses: actions/cache@v2 - with: - path: ~/.local - key: "pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-\ -${{ hashFiles('setup.py', 'setup.cfg') }}" - restore-keys: "\ -pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-" - - name: "Cache pre-commit envs - pylint" - uses: actions/cache@v2 - with: - path: ~/.cache/pre-commit - key: "pre-commit-pylint-${{steps.host-python-version.outputs.host-python-version}}-\ -${{ hashFiles('.pre-commit-config.yaml') }}" - restore-keys: pre-commit-pylint-${{steps.host-python-version.outputs.host-python-version}} - - name: "Static checks: pylint" - run: ./scripts/ci/static_checks/run_static_checks.sh pylint - env: - VERBOSE: false - docs: timeout-minutes: 45 name: "Build docs" @@ -1042,7 +991,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" needs: - build-info - static-checks - - static-checks-pylint - tests-sqlite - tests-postgres - tests-mysql @@ -1105,7 +1053,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" needs: - build-info - static-checks - - static-checks-pylint - tests-sqlite - tests-postgres - tests-mysql @@ -1152,7 +1099,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - ci-images - prod-images - static-checks - - static-checks-pylint - tests-sqlite - tests-mysql - tests-postgres @@ -1221,7 +1167,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}" - docs - build-info - static-checks - - static-checks-pylint - tests-sqlite - tests-postgres - tests-mysql diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f27aeb03b4dcd..c10167e330118 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -218,7 +218,7 @@ repos: name: Run pydocstyle args: - --convention=pep257 - - --add-ignore=D100,D102,D104,D105,D107,D205,D400,D401 + - --add-ignore=D100,D102,D103,D104,D105,D107,D205,D400,D401 exclude: | (?x) ^tests/.*\.py$| @@ -229,6 +229,14 @@ repos: .*example_dags/.*| ^chart/.*\.py$| ^airflow/_vendor/ + additional_dependencies: ['toml'] + - repo: https://github.com/asottile/yesqa + rev: v1.2.3 + hooks: + - id: yesqa + exclude: | + (?x) + ^airflow/_vendor/ - repo: local hooks: - id: lint-openapi @@ -339,14 +347,7 @@ repos: ^docs/apache-airflow-providers-apache-cassandra/connections/cassandra\.rst$| ^docs/apache-airflow-providers-apache-hive/commits\.rst$| git| - ^pylintrc | ^CHANGELOG\.txt$ - - id: consistent-pylint - language: pygrep - name: Check for inconsistent pylint disable/enable without space - entry: "pylint:disable|pylint:enable" - pass_filenames: true - files: \.py$ - id: base-operator language: pygrep name: Check BaseOperator[Link] core imports @@ -609,11 +610,12 @@ repos: additional_dependencies: ['flynt==0.63'] files: \.py$ exclude: ^airflow/_vendor/ - - id: bats-in-container-tests - name: Run in container bats tests - language: system - entry: ./scripts/ci/pre_commit/pre_commit_in_container_bats_test.sh - files: ^tests/bats/in_container/.*\.bats$|^scripts/in_container/.*sh + - id: ui-lint + name: ESLint against airflow/ui + language: node + 'types_or': [javascript, tsx, ts] + files: ^airflow/ui/ + entry: ./scripts/ci/static_checks/eslint.sh pass_filenames: false ## ADD MOST PRE-COMMITS ABOVE THAT LINE # The below pre-commits are those requiring CI image to be built @@ -623,6 +625,12 @@ repos: language: system always_run: true pass_filenames: false + - id: bats-in-container-tests + name: Run in container bats tests + language: system + entry: ./scripts/ci/pre_commit/pre_commit_in_container_bats_test.sh + files: ^tests/bats/in_container/.*\.bats$|^scripts/in_container/.*sh + pass_filenames: false - id: mypy name: Run mypy language: system @@ -642,28 +650,6 @@ repos: files: ^docs/.*\.py$ exclude: rtd-deprecation require_serial: false - - id: pylint - name: Run pylint for main code - language: system - entry: ./scripts/ci/pre_commit/pre_commit_pylint.sh - files: \.py$ - exclude: ^scripts/.*\.py$|^dev|^provider_packages|^chart|^tests|^kubernetes_tests|^airflow/_vendor/ - pass_filenames: true - require_serial: true - - id: pylint - name: Run pylint for tests - language: system - entry: env PYLINTRC=pylintrc-tests ./scripts/ci/pre_commit/pre_commit_pylint.sh - files: ^tests/.*\.py$ - pass_filenames: true - require_serial: true - - id: pylint - name: Run pylint for helm chart tests - language: system - entry: env PYLINTRC=pylintrc-tests ./scripts/ci/pre_commit/pre_commit_pylint.sh - files: ^chart/.*\.py$ - pass_filenames: true - require_serial: true - id: flake8 name: Run flake8 language: system diff --git a/.rat-excludes b/.rat-excludes index 145bdf67310a8..70bd984891e9c 100644 --- a/.rat-excludes +++ b/.rat-excludes @@ -83,9 +83,6 @@ PROVIDER_CHANGES*.md manifests/* redirects.txt -# Temporary list of files to make compatible with Pylint -pylint_todo.txt - # Locally mounted files .*egg-info/* .bash_history diff --git a/BREEZE.rst b/BREEZE.rst index 549adddf1538e..2082744a53465 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -2239,22 +2239,22 @@ This is the current syntax for `./breeze <./breeze>`_: Run selected static checks for currently changed files. You should specify static check that you would like to run or 'all' to run all checks. One of: - all all-but-pylint airflow-config-yaml airflow-providers-available - airflow-provider-yaml-files-ok base-operator bats-tests bats-in-container-tests - black build build-providers-dependencies check-apache-license check-builtin-literals + all airflow-config-yaml airflow-providers-available airflow-provider-yaml-files-ok + base-operator bats-tests bats-in-container-tests black build + build-providers-dependencies check-apache-license check-builtin-literals check-executables-have-shebangs check-hooks-apply check-integrations - check-merge-conflict check-xml consistent-pylint daysago-import-check - debug-statements detect-private-key doctoc dont-use-safe-filter end-of-file-fixer - fix-encoding-pragma flake8 flynt forbid-tabs helm-lint identity - incorrect-use-of-LoggingMixin insert-license isort json-schema language-matters - lint-dockerfile lint-openapi markdownlint mermaid mixed-line-ending mypy mypy-helm - no-providers-in-core-examples no-relative-imports pre-commit-descriptions - pre-commit-hook-names provide-create-sessions providers-changelogs - providers-init-file provider-yamls pydevd pydocstyle pylint pylint-tests - python-no-log-warn pyupgrade restrict-start_date rst-backticks setup-order - setup-extra-packages shellcheck sort-in-the-wild sort-spelling-wordlist stylelint - trailing-whitespace ui-lint update-breeze-file update-extras update-local-yml-file - update-setup-cfg-file verify-db-migrations-documented version-sync yamllint + check-merge-conflict check-xml daysago-import-check debug-statements + detect-private-key doctoc dont-use-safe-filter end-of-file-fixer fix-encoding-pragma + flake8 flynt forbid-tabs helm-lint identity incorrect-use-of-LoggingMixin + insert-license isort json-schema language-matters lint-dockerfile lint-openapi + markdownlint mermaid mixed-line-ending mypy mypy-helm no-providers-in-core-examples + no-relative-imports pre-commit-descriptions pre-commit-hook-names + provide-create-sessions providers-changelogs providers-init-file provider-yamls + pydevd pydocstyle python-no-log-warn pyupgrade restrict-start_date rst-backticks + setup-order setup-extra-packages shellcheck sort-in-the-wild sort-spelling-wordlist + stylelint trailing-whitespace ui-lint update-breeze-file update-extras + update-local-yml-file update-setup-cfg-file verify-db-migrations-documented + version-sync yamllint yesqa You can pass extra arguments including options to the pre-commit framework as passed after --. For example: diff --git a/CI.rst b/CI.rst index 260d570678553..1ab37d157ba58 100644 --- a/CI.rst +++ b/CI.rst @@ -669,9 +669,7 @@ This workflow is a regular workflow that performs all checks of Airflow code. +---------------------------+----------------------------------------------+-------+-------+------+ | CI Images | Waits for CI Images (3) | Yes | Yes | Yes | +---------------------------+----------------------------------------------+-------+-------+------+ -| Static checks | Performs static checks without pylint | Yes | Yes | Yes | -+---------------------------+----------------------------------------------+-------+-------+------+ -| Static checks: pylint | Performs pylint static checks | Yes | Yes | Yes | +| Static checks | Performs static checks | Yes | Yes | Yes | +---------------------------+----------------------------------------------+-------+-------+------+ | Build docs | Builds documentation | Yes | Yes | Yes | +---------------------------+----------------------------------------------+-------+-------+------+ diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 21cd85e505461..3cb96ee76c5f8 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -925,7 +925,7 @@ as described in the static code checks documentation. Coding style and best practices =============================== -Most of our coding style rules are enforced programmatically by flake8 and pylint (which are run automatically +Most of our coding style rules are enforced programmatically by flake8 and mypy (which are run automatically on every pull request), but there are some rules that are not yet automated and are more Airflow specific or semantic than style diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst index d487b2d3eac13..40fd98e47b64b 100644 --- a/CONTRIBUTORS_QUICK_START.rst +++ b/CONTRIBUTORS_QUICK_START.rst @@ -614,8 +614,7 @@ All Tests are inside ./tests directory. entrypoint_exec.sh* run_install_and_test_provider_packages.sh* _in_container_script_init.sh* run_mypy.sh* prod/ run_prepare_provider_packages.sh* - refresh_pylint_todo.sh* run_prepare_provider_documentation.sh* - run_ci_tests.sh* run_pylint.sh* + run_ci_tests.sh* run_prepare_provider_documentation.sh* run_clear_tmp.sh* run_system_tests.sh* run_docs_build.sh* run_tmux_welcome.sh* run_extract_tests.sh* stop_tmux_airflow.sh* @@ -812,8 +811,7 @@ To avoid burden on CI infrastructure and to save time, Pre-commit hooks can be r entrypoint_exec.sh* run_install_and_test_provider_packages.sh* _in_container_script_init.sh* run_mypy.sh* prod/ run_prepare_provider_packages.sh* - refresh_pylint_todo.sh* run_prepare_provider_documentation.sh* - run_ci_tests.sh* run_pylint.sh* + run_ci_tests.sh* run_prepare_provider_documentation.sh* run_clear_tmp.sh* run_system_tests.sh* run_docs_build.sh* run_tmux_welcome.sh* run_extract_tests.sh* stop_tmux_airflow.sh* @@ -857,14 +855,6 @@ To avoid burden on CI infrastructure and to save time, Pre-commit hooks can be r Pre-commit Hooks - - |Pylint Static Code Checks| - - .. |Pylint Static Code Checks| raw:: html - - Pylint Static Code Checks - - - |Running Static Code Checks via Breeze| .. |Running Static Code Checks via Breeze| raw:: html diff --git a/PULL_REQUEST_WORKFLOW.rst b/PULL_REQUEST_WORKFLOW.rst index 3e3cce6b3a10e..0f48967842fd6 100644 --- a/PULL_REQUEST_WORKFLOW.rst +++ b/PULL_REQUEST_WORKFLOW.rst @@ -109,7 +109,7 @@ We have the following test types (separated by packages in which they are): We also have several special kinds of tests that are not separated by packages but they are marked with pytest markers. They can be found in any of those packages and they can be selected by the appropriate -pylint custom command line options. See `TESTING.rst `_ for details but those are: +pytest custom command line options. See `TESTING.rst `_ for details but those are: * Integration - tests that require external integration images running in docker-compose * Quarantined - tests that are flaky and need to be fixed @@ -175,11 +175,11 @@ The logic implemented for the changes works as follows: Quarantined tests are described in `TESTING.rst `_ 11) There is a special case of static checks. In case the above logic determines that the CI image - needs to be build, we run long and more comprehensive version of static checks - including Pylint, + needs to be build, we run long and more comprehensive version of static checks - including Mypy, Flake8. And those tests are run on all files, no matter how many files changed. In case the image is not built, we run only simpler set of changes - the longer static checks that require CI image are skipped, and we only run the tests on the files that changed in the incoming - commit - unlike pylint/flake8/mypy, those static checks are per-file based and they should not miss any + commit - unlike flake8/mypy, those static checks are per-file based and they should not miss any important change. Similarly to selective tests we also run selective security scans. In Pull requests, diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index 2c743d4c386c9..5ac8f708f5937 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -60,7 +60,11 @@ require Breeze Docker images to be installed locally: ----------------------------------- ---------------------------------------------------------------- ------------ ``black`` Runs Black (the uncompromising Python code formatter) ----------------------------------- ---------------------------------------------------------------- ------------ -``build`` Builds image for mypy, pylint, flake8 * +``blacken-docs`` Run black on python code blocks in documentation files +----------------------------------- ---------------------------------------------------------------- ------------ +``boring-cyborg`` Checks for Boring Cyborg configuration consistency +----------------------------------- ---------------------------------------------------------------- ------------ +``build`` Builds image for mypy, flake8 * ----------------------------------- ---------------------------------------------------------------- ------------ ``build-providers-dependencies`` Regenerates the JSON file with cross-provider dependencies ----------------------------------- ---------------------------------------------------------------- ------------ @@ -78,8 +82,6 @@ require Breeze Docker images to be installed locally: ----------------------------------- ---------------------------------------------------------------- ------------ ``check-xml`` Checks XML files with xmllint ----------------------------------- ---------------------------------------------------------------- ------------ -``consistent-pylint`` Consistent usage of pylint enable/disable with space ------------------------------------ ---------------------------------------------------------------- ------------ ``daysago-import-check`` Checks if daysago is properly imported ----------------------------------- ---------------------------------------------------------------- ------------ ``debug-statements`` Detects accidentally committed debug statements @@ -150,8 +152,6 @@ require Breeze Docker images to be installed locally: ----------------------------------- ---------------------------------------------------------------- ------------ ``pydocstyle`` Runs pydocstyle ----------------------------------- ---------------------------------------------------------------- ------------ -``pylint`` Runs pylint check * ------------------------------------ ---------------------------------------------------------------- ------------ ``python-no-log-warn`` Checks if there are no deprecate log warn ----------------------------------- ---------------------------------------------------------------- ------------ ``restrict-start_date`` 'start_date' should not be in default_args in example_dags @@ -188,6 +188,8 @@ require Breeze Docker images to be installed locally: ----------------------------------- ---------------------------------------------------------------- ------------ ``www-lint`` Static checks of js in airflow/www/static/js/ folder ----------------------------------- ---------------------------------------------------------------- ------------ +``yesqa`` Removes unnecessary noqa statements +----------------------------------- ---------------------------------------------------------------- ------------ ``yamllint`` Checks YAML files with yamllint =================================== ================================================================ ============ @@ -288,7 +290,7 @@ code. But you can run pre-commit hooks manually as needed. .. code-block:: bash - SKIP=pylint,mypy pre-commit run --all-files + SKIP=mypy,flake8,build pre-commit run --all-files You can always skip running the tests by providing ``--no-verify`` flag to the @@ -296,67 +298,6 @@ You can always skip running the tests by providing ``--no-verify`` flag to the To check other usage types of the pre-commit framework, see `Pre-commit website `__. -Pylint Static Code Checks -------------------------- - -We are in the process of fixing the code flagged with pylint checks for the whole Airflow project. -This is a huge task so we implemented an incremental approach for the process. -Currently most of the code is excluded from pylint checks via scripts/ci/pylint_todo.txt. -We have an open JIRA issue AIRFLOW-4364 which has a number of sub-tasks for each of -the modules that should be made compatible. Fixing problems identified with pylint is one of -straightforward and easy tasks to do (but time-consuming), so if you are a first-time -contributor to Airflow, you can choose one of the sub-tasks as your first issue to fix. - -To fix a pylint issue, do the following: - -1. Remove module/modules from the - `scripts/ci/static_checks/pylint_todo.txt `__. - -2. Run ``__. - -3. Fix all the issues reported by pylint. - -4. Re-run ``__. - -5. If you see "success", submit a PR following - `Pull Request guidelines <#pull-request-guidelines>`__. - - -These are guidelines for fixing errors reported by pylint: - -- Fix the errors rather than disable pylint checks. Often you can easily - refactor the code (IntelliJ/PyCharm might be helpful when extracting methods - in complex code or moving methods around). - -- If disabling a particular problem, make sure to disable only that error by - using the symbolic name of the error as reported by pylint. - -.. code-block:: python - - import airflow.* # pylint: disable=wildcard-import - - -- If there is a single line where you need to disable a particular error, - consider adding a comment to the line that causes the problem. For example: - -.. code-block:: python - - def MakeSummary(pcoll, metric_fn, metric_keys): # pylint: disable=invalid-name - - -- For multiple lines/block of code, to disable an error, you can surround the - block with ``pylint:disable/pylint:enable`` comment lines. For example: - -.. code-block:: python - - # pylint: disable=too-few-public-methods - class LoginForm(Form): - """Form for the user""" - username = StringField('Username', [InputRequired()]) - password = PasswordField('Password', [InputRequired()]) - # pylint: enable=too-few-public-methods - - Running Static Code Checks via Breeze ------------------------------------- @@ -364,14 +305,8 @@ The static code checks can be launched using the Breeze environment. You run the static code checks via ``./breeze static-check`` or commands. -Note that it may take a lot of time to run checks for all files with pylint on macOS due to a slow -filesystem for macOS Docker. As a workaround, you can add their arguments after ``--`` as extra arguments. -For example ``--files`` flag. By default those checks are run only on the files you've changed in your -commit, but you can also add ``-- --all-files`` flag to run check on all files. - You can see the list of available static checks either via ``--help`` flag or by using the autocomplete -option. Note that the ``all`` static check runs all configured static checks. Also since pylint tests take -a lot of time, you can run a special ``all-but-pylint`` check that skips pylint checks. +option. Note that the ``all`` static check runs all configured static checks. Run the ``mypy`` check for the currently staged changes: @@ -409,30 +344,11 @@ Run all tests for all files: ./breeze static-check all -- --all-files -Run all tests but pylint for all files: - -.. code-block:: bash - - ./breeze static-check all-but-pylint --all-files - -Run pylint checks for all changed files: - -.. code-block:: bash - - ./breeze static-check pylint - -Run pylint checks for selected files: - -.. code-block:: bash - - ./breeze static-check pylint -- --files airflow/configuration.py - - -Run pylint checks for all files: +Run all tests for last commit : .. code-block:: bash - ./breeze static-check pylint -- --all-files + ./breeze static-check all -- --ref-from HEAD^ --ref-to HEAD The ``license`` check is run via a separate script and a separate Docker image containing the @@ -454,7 +370,6 @@ this, run the following scripts: * ``_ - runs Flake8 source code style enforcement tool. * ``_ - runs lint checker for the dockerfiles. * ``_ - runs a check for Mypy type annotation consistency. -* ``_ - runs Pylint static code checker. The scripts may ask you to rebuild the images, if needed. @@ -471,7 +386,6 @@ If you are already in the Breeze Docker environment (by running the ``./breeze`` you can also run the same static checks via run_scripts: * Mypy: ``./scripts/in_container/run_mypy.sh airflow tests`` -* Pylint: ``./scripts/in_container/run_pylint.sh`` * Flake8: ``./scripts/in_container/run_flake8.sh`` * License check: ``./scripts/in_container/run_check_licence.sh`` * Documentation: ``./scripts/in_container/run_docs_build.sh`` @@ -486,20 +400,20 @@ In the Docker container: .. code-block:: - ./scripts/in_container/run_pylint.sh ./airflow/example_dags/ + ./scripts/in_container/run_mypy.sh ./airflow/example_dags/ or .. code-block:: - ./scripts/in_container/run_pylint.sh ./airflow/example_dags/test_utils.py + ./scripts/in_container/run_mypy.sh ./airflow/example_dags/test_utils.py On the host: .. code-block:: - ./scripts/ci/static_checks/pylint.sh ./airflow/example_dags/ + ./scripts/ci/static_checks/mypy.sh ./airflow/example_dags/ .. code-block:: - ./scripts/ci/static_checks/pylint.sh ./airflow/example_dags/test_utils.py + ./scripts/ci/static_checks/mypy.sh ./airflow/example_dags/test_utils.py diff --git a/airflow/__init__.py b/airflow/__init__.py index 6d04886252a50..a448491057467 100644 --- a/airflow/__init__.py +++ b/airflow/__init__.py @@ -27,7 +27,7 @@ """ # flake8: noqa: F401 -# pylint: disable=wrong-import-position + import sys from typing import Callable, Optional @@ -56,11 +56,11 @@ def __getattr__(name): # PEP-562: Lazy loaded attributes on python modules if name == "DAG": - from airflow.models.dag import DAG # pylint: disable=redefined-outer-name + from airflow.models.dag import DAG return DAG if name == "AirflowException": - from airflow.exceptions import AirflowException # pylint: disable=redefined-outer-name + from airflow.exceptions import AirflowException return AirflowException raise AttributeError(f"module {__name__} has no attribute {name}") @@ -77,8 +77,8 @@ def __getattr__(name): providers_manager.ProvidersManager().initialize_providers_manager() -# This is never executed, but tricks static analyzers (PyDev, PyCharm, -# pylint, etc.) into knowing the types of these symbols, and what +# This is never executed, but tricks static analyzers (PyDev, PyCharm,) +# into knowing the types of these symbols, and what # they contain. STATICA_HACK = True globals()['kcah_acitats'[::-1].upper()] = False diff --git a/airflow/api/auth/backend/basic_auth.py b/airflow/api/auth/backend/basic_auth.py index 623beaf90b65e..071e7fbf7b6f4 100644 --- a/airflow/api/auth/backend/basic_auth.py +++ b/airflow/api/auth/backend/basic_auth.py @@ -30,7 +30,7 @@ def init_app(_): """Initializes authentication backend""" -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def auth_current_user() -> Optional[User]: diff --git a/airflow/api/auth/backend/default.py b/airflow/api/auth/backend/default.py index d699c912dceaa..6b0a1a6c67907 100644 --- a/airflow/api/auth/backend/default.py +++ b/airflow/api/auth/backend/default.py @@ -26,7 +26,7 @@ def init_app(_): """Initializes authentication backend""" -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def requires_authentication(function: T): diff --git a/airflow/api/auth/backend/deny_all.py b/airflow/api/auth/backend/deny_all.py index 9ef05d0b8193e..614e263684ad8 100644 --- a/airflow/api/auth/backend/deny_all.py +++ b/airflow/api/auth/backend/deny_all.py @@ -28,14 +28,14 @@ def init_app(_): """Initializes authentication""" -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def requires_authentication(function: T): """Decorator for functions that require authentication""" @wraps(function) - def decorated(*args, **kwargs): # pylint: disable=unused-argument + def decorated(*args, **kwargs): return Response("Forbidden", 403) return cast(T, decorated) diff --git a/airflow/api/auth/backend/kerberos_auth.py b/airflow/api/auth/backend/kerberos_auth.py index e57d477cbbf48..fb76e8a1aa0fa 100644 --- a/airflow/api/auth/backend/kerberos_auth.py +++ b/airflow/api/auth/backend/kerberos_auth.py @@ -54,11 +54,11 @@ log = logging.getLogger(__name__) -# pylint: disable=c-extension-no-member + CLIENT_AUTH: Optional[Union[Tuple[str, str], Any]] = HTTPKerberosAuth(service='airflow') -class KerberosService: # pylint: disable=too-few-public-methods +class KerberosService: """Class to keep information about the Kerberos Service initialized""" def __init__(self): @@ -126,7 +126,7 @@ def _gssapi_authenticate(token): kerberos.authGSSServerClean(state) -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def requires_authentication(function: T): diff --git a/airflow/api/client/json_client.py b/airflow/api/client/json_client.py index 1ffe7fd88cd5b..2d5fd0230466e 100644 --- a/airflow/api/client/json_client.py +++ b/airflow/api/client/json_client.py @@ -31,12 +31,12 @@ def _request(self, url, method='GET', json=None): } if json is not None: params['json'] = json - resp = getattr(self._session, method.lower())(**params) # pylint: disable=not-callable + resp = getattr(self._session, method.lower())(**params) if not resp.ok: # It is justified here because there might be many resp types. try: data = resp.json() - except Exception: # noqa pylint: disable=broad-except + except Exception: data = {} raise OSError(data.get('error', 'Server error')) diff --git a/airflow/api/common/experimental/delete_dag.py b/airflow/api/common/experimental/delete_dag.py index d27c21f18b2fa..4462070a9b409 100644 --- a/airflow/api/common/experimental/delete_dag.py +++ b/airflow/api/common/experimental/delete_dag.py @@ -51,7 +51,7 @@ def delete_dag(dag_id: str, keep_records_in_log: bool = True, session=None) -> i count = 0 - for model in models.base.Base._decl_class_registry.values(): # noqa pylint: disable=protected-access + for model in models.base.Base._decl_class_registry.values(): if hasattr(model, "dag_id"): if keep_records_in_log and model.__name__ == 'Log': continue diff --git a/airflow/api/common/experimental/mark_tasks.py b/airflow/api/common/experimental/mark_tasks.py index 7612270b921f5..6adfd11106f28 100644 --- a/airflow/api/common/experimental/mark_tasks.py +++ b/airflow/api/common/experimental/mark_tasks.py @@ -70,7 +70,7 @@ def set_state( state: str = State.SUCCESS, commit: bool = False, session=None, -): # pylint: disable=too-many-arguments,too-many-locals +): """ Set the state of a task instance and if needed its relatives. Can set state for future tasks (calculated from execution_date) and retroactively @@ -134,14 +134,13 @@ def set_state( return tis_altered -# Flake and pylint disagree about correct indents here -def all_subdag_tasks_query(sub_dag_run_ids, session, state, confirmed_dates): # noqa: E123 +def all_subdag_tasks_query(sub_dag_run_ids, session, state, confirmed_dates): """Get *all* tasks of the sub dags""" qry_sub_dag = ( session.query(TaskInstance) .filter(TaskInstance.dag_id.in_(sub_dag_run_ids), TaskInstance.execution_date.in_(confirmed_dates)) .filter(or_(TaskInstance.state.is_(None), TaskInstance.state != state)) - ) # noqa: E123 + ) return qry_sub_dag @@ -152,7 +151,7 @@ def get_all_dag_task_query(dag, session, state, task_ids, confirmed_dates): .filter( TaskInstance.dag_id == dag.dag_id, TaskInstance.execution_date.in_(confirmed_dates), - TaskInstance.task_id.in_(task_ids), # noqa: E123 + TaskInstance.task_id.in_(task_ids), ) .filter(or_(TaskInstance.state.is_(None), TaskInstance.state != state)) ) diff --git a/airflow/api/common/experimental/trigger_dag.py b/airflow/api/common/experimental/trigger_dag.py index 519079e0d30b4..f82f88feeac7b 100644 --- a/airflow/api/common/experimental/trigger_dag.py +++ b/airflow/api/common/experimental/trigger_dag.py @@ -34,7 +34,7 @@ def _trigger_dag( conf: Optional[Union[dict, str]] = None, execution_date: Optional[datetime] = None, replace_microseconds: bool = True, -) -> List[DagRun]: # pylint: disable=too-many-arguments +) -> List[DagRun]: """Triggers DAG run. :param dag_id: DAG ID diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py index 2f65f06a827d8..27d13b6f5af3c 100644 --- a/airflow/api_connexion/endpoints/dag_run_endpoint.py +++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py @@ -96,7 +96,7 @@ def get_dag_runs( offset=None, limit=None, order_by='id', -): # pylint: disable=too-many-arguments +): """Get all DAG Runs.""" query = session.query(DagRun) @@ -134,7 +134,7 @@ def _fetch_dag_runs( limit, offset, order_by, -): # pylint: disable=too-many-arguments +): query = _apply_date_filters_to_query( query, end_date_gte, diff --git a/airflow/api_connexion/endpoints/health_endpoint.py b/airflow/api_connexion/endpoints/health_endpoint.py index ea0d62d57a594..ce7666ba320af 100644 --- a/airflow/api_connexion/endpoints/health_endpoint.py +++ b/airflow/api_connexion/endpoints/health_endpoint.py @@ -33,7 +33,7 @@ def get_health(): latest_scheduler_heartbeat = scheduler_job.latest_heartbeat.isoformat() if scheduler_job.is_alive(): scheduler_status = HEALTHY - except Exception: # pylint: disable=broad-except + except Exception: metadatabase_status = UNHEALTHY payload = { diff --git a/airflow/api_connexion/endpoints/task_instance_endpoint.py b/airflow/api_connexion/endpoints/task_instance_endpoint.py index 418bded44dcb0..51c48bd7c79b4 100644 --- a/airflow/api_connexion/endpoints/task_instance_endpoint.py +++ b/airflow/api_connexion/endpoints/task_instance_endpoint.py @@ -126,7 +126,7 @@ def get_task_instances( queue: Optional[List[str]] = None, offset: Optional[int] = None, session=None, -): # pylint: disable=too-many-arguments +): """Get list of task instances.""" base_query = session.query(TI) diff --git a/airflow/api_connexion/parameters.py b/airflow/api_connexion/parameters.py index 8e06301676900..649a0f4dee0bb 100644 --- a/airflow/api_connexion/parameters.py +++ b/airflow/api_connexion/parameters.py @@ -64,7 +64,7 @@ def check_limit(value: int): return value -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def format_parameters(params_formatters: Dict[str, Callable[..., bool]]) -> Callable[[T], T]: diff --git a/airflow/api_connexion/schemas/common_schema.py b/airflow/api_connexion/schemas/common_schema.py index f8d6e48296d83..22b18121ca292 100644 --- a/airflow/api_connexion/schemas/common_schema.py +++ b/airflow/api_connexion/schemas/common_schema.py @@ -155,12 +155,12 @@ class ClassReferenceSchema(Schema): def _get_module(self, obj): if isinstance(obj, SerializedBaseOperator): - return obj._task_module # pylint: disable=protected-access + return obj._task_module return inspect.getmodule(obj).__name__ def _get_class_name(self, obj): if isinstance(obj, SerializedBaseOperator): - return obj._task_type # pylint: disable=protected-access + return obj._task_type if isinstance(obj, type): return obj.__name__ return type(obj).__name__ diff --git a/airflow/api_connexion/schemas/connection_schema.py b/airflow/api_connexion/schemas/connection_schema.py index 44e3224c812ed..c3b03888a641c 100644 --- a/airflow/api_connexion/schemas/connection_schema.py +++ b/airflow/api_connexion/schemas/connection_schema.py @@ -39,7 +39,7 @@ class Meta: port = auto_field() -class ConnectionSchema(ConnectionCollectionItemSchema): # pylint: disable=too-many-ancestors +class ConnectionSchema(ConnectionCollectionItemSchema): """Connection schema""" password = auto_field(load_only=True) diff --git a/airflow/api_connexion/schemas/error_schema.py b/airflow/api_connexion/schemas/error_schema.py index 5ae228f3c0c9b..c9462b5f967a8 100644 --- a/airflow/api_connexion/schemas/error_schema.py +++ b/airflow/api_connexion/schemas/error_schema.py @@ -19,7 +19,7 @@ from marshmallow import Schema, fields from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field -from airflow.models.errors import ImportError # pylint: disable=redefined-builtin +from airflow.models.errors import ImportError class ImportErrorSchema(SQLAlchemySchema): diff --git a/airflow/api_connexion/security.py b/airflow/api_connexion/security.py index 4faa9ed8f3457..df718568ca98e 100644 --- a/airflow/api_connexion/security.py +++ b/airflow/api_connexion/security.py @@ -22,7 +22,7 @@ from airflow.api_connexion.exceptions import PermissionDenied, Unauthenticated -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def check_authentication() -> None: diff --git a/airflow/cli/cli_parser.py b/airflow/cli/cli_parser.py index e1a5826ed2f01..1c16c9ff36ca1 100644 --- a/airflow/cli/cli_parser.py +++ b/airflow/cli/cli_parser.py @@ -63,7 +63,7 @@ def _check_value(self, action, value): raise ArgumentError(action, message) if value == 'kubernetes': try: - import kubernetes.client # noqa: F401 pylint: disable=unused-import + import kubernetes.client # noqa: F401 except ImportError: message = ( 'The kubernetes subcommand requires that you pip install the kubernetes python client.' @@ -89,7 +89,6 @@ def error(self, message): class Arg: """Class to keep information about command line argument""" - # pylint: disable=redefined-builtin,unused-argument,too-many-arguments def __init__( self, flags=_UNSET, @@ -113,8 +112,6 @@ def __init__( self.kwargs[k] = v - # pylint: enable=redefined-builtin,unused-argument,too-many-arguments - def add_to_parser(self, parser: argparse.ArgumentParser): """Add this argument to an ArgumentParser""" parser.add_argument(*self.flags, **self.kwargs) @@ -1641,7 +1638,7 @@ class AirflowHelpFormatter(argparse.HelpFormatter): """ def _format_action(self, action: Action): - if isinstance(action, argparse._SubParsersAction): # pylint: disable=protected-access + if isinstance(action, argparse._SubParsersAction): parts = [] action_header = self._format_action_invocation(action) @@ -1649,7 +1646,7 @@ def _format_action(self, action: Action): parts.append(action_header) self._indent() - subactions = action._get_subactions() # pylint: disable=protected-access + subactions = action._get_subactions() action_subcommands, group_subcommands = partition( lambda d: isinstance(ALL_COMMANDS_DICT[d.dest], GroupCommand), subactions ) @@ -1703,9 +1700,7 @@ def get_long_option(arg: Arg): yield from sorted(optional, key=lambda x: get_long_option(x).lower()) -def _add_command( - subparsers: argparse._SubParsersAction, sub: CLICommand # pylint: disable=protected-access -) -> None: +def _add_command(subparsers: argparse._SubParsersAction, sub: CLICommand) -> None: sub_proc = subparsers.add_parser( sub.name, help=sub.help, description=sub.description or sub.help, epilog=sub.epilog ) diff --git a/airflow/cli/commands/celery_command.py b/airflow/cli/commands/celery_command.py index e01f17774d32b..ba3c45e58a111 100644 --- a/airflow/cli/commands/celery_command.py +++ b/airflow/cli/commands/celery_command.py @@ -180,7 +180,7 @@ def worker(args): @cli_utils.action_logging -def stop_worker(args): # pylint: disable=unused-argument +def stop_worker(args): """Sends SIGTERM to Celery worker""" # Read PID from file pid_file_path, _, _, _ = setup_locations(process=WORKER_PROCESS_NAME) diff --git a/airflow/cli/commands/info_command.py b/airflow/cli/commands/info_command.py index 9eb3e4b42c8dd..590a96ce4911b 100644 --- a/airflow/cli/commands/info_command.py +++ b/airflow/cli/commands/info_command.py @@ -214,7 +214,7 @@ def get_fullname(o): try: handler_names = [get_fullname(handler) for handler in logging.getLogger('airflow.task').handlers] return ", ".join(handler_names) - except Exception: # noqa pylint: disable=broad-except + except Exception: return "NOT AVAILABLE" @property diff --git a/airflow/cli/commands/kubernetes_command.py b/airflow/cli/commands/kubernetes_command.py index 3aad61622c2ec..3c3c8e68cfca3 100644 --- a/airflow/cli/commands/kubernetes_command.py +++ b/airflow/cli/commands/kubernetes_command.py @@ -106,7 +106,7 @@ def cleanup_pods(args): ] ), } - while True: # pylint: disable=too-many-nested-blocks + while True: pod_list = kube_client.list_namespaced_pod(**list_kwargs) for pod in pod_list.items: pod_name = pod.metadata.name @@ -130,7 +130,7 @@ def cleanup_pods(args): print(f"Can't remove POD: {e}", file=sys.stderr) continue print(f'No action taken on pod {pod_name}') - continue_token = pod_list.metadata._continue # pylint: disable=protected-access + continue_token = pod_list.metadata._continue if not continue_token: break list_kwargs["_continue"] = continue_token diff --git a/airflow/cli/commands/plugins_command.py b/airflow/cli/commands/plugins_command.py index 09a78dffdb94b..2d59e901a187a 100644 --- a/airflow/cli/commands/plugins_command.py +++ b/airflow/cli/commands/plugins_command.py @@ -45,7 +45,7 @@ def dump_plugins(args): return # Remove empty info - if args.output == "table": # pylint: disable=too-many-nested-blocks + if args.output == "table": # We can do plugins_info[0] as the element it will exist as there's # at least one plugin at this point for col in list(plugins_info[0]): diff --git a/airflow/cli/commands/pool_command.py b/airflow/cli/commands/pool_command.py index b116e25f3c5d7..3ea9e652906d1 100644 --- a/airflow/cli/commands/pool_command.py +++ b/airflow/cli/commands/pool_command.py @@ -103,7 +103,7 @@ def pool_import_helper(filepath): with open(filepath) as poolfile: data = poolfile.read() - try: # pylint: disable=too-many-nested-blocks + try: pools_json = json.loads(data) except JSONDecodeError as e: raise SystemExit("Invalid json file: " + str(e)) diff --git a/airflow/cli/commands/role_command.py b/airflow/cli/commands/role_command.py index 0d6ab3399cf01..454a1b2e1dcb4 100644 --- a/airflow/cli/commands/role_command.py +++ b/airflow/cli/commands/role_command.py @@ -27,7 +27,7 @@ @suppress_logs_and_warning def roles_list(args): """Lists all existing roles""" - appbuilder = cached_app().appbuilder # pylint: disable=no-member + appbuilder = cached_app().appbuilder roles = appbuilder.sm.get_all_roles() AirflowConsole().print_as( data=sorted(r.name for r in roles), output=args.output, mapper=lambda x: {"name": x} @@ -38,7 +38,7 @@ def roles_list(args): @suppress_logs_and_warning def roles_create(args): """Creates new empty role in DB""" - appbuilder = cached_app().appbuilder # pylint: disable=no-member + appbuilder = cached_app().appbuilder for role_name in args.role: appbuilder.sm.add_role(role_name) print(f"Added {len(args.role)} role(s)") diff --git a/airflow/cli/commands/sync_perm_command.py b/airflow/cli/commands/sync_perm_command.py index d957fcbfaaeac..eba8c2f56b81c 100644 --- a/airflow/cli/commands/sync_perm_command.py +++ b/airflow/cli/commands/sync_perm_command.py @@ -23,8 +23,8 @@ @cli_utils.action_logging def sync_perm(args): """Updates permissions for existing roles and DAGs""" - appbuilder = cached_app().appbuilder # pylint: disable=no-member - print('Updating permission, view-menu for all existing roles') + appbuilder = cached_app().appbuilder + print('Updating actions and resources for all existing roles') # Add missing permissions for all the Base Views _before_ syncing/creating roles appbuilder.add_permissions(update_perms=True) appbuilder.sm.sync_roles() diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py index c0cfb03f66258..0302d4d773d68 100644 --- a/airflow/cli/commands/task_command.py +++ b/airflow/cli/commands/task_command.py @@ -146,7 +146,7 @@ def _run_raw_task(args, ti: TaskInstance) -> None: ", ".join(f"--{o}" for o in unsupported_options), ) ) - ti._run_raw_task( # pylint: disable=protected-access + ti._run_raw_task( mark_success=args.mark_success, job_id=args.job_id, pool=args.pool, @@ -391,7 +391,7 @@ def task_test(args, dag=None): ti.dry_run() else: ti.run(ignore_task_deps=True, ignore_ti_state=True, test_mode=True) - except Exception: # pylint: disable=broad-except + except Exception: if args.post_mortem: debugger = _guess_debugger() debugger.post_mortem() diff --git a/airflow/cli/commands/user_command.py b/airflow/cli/commands/user_command.py index ffaf337d01584..d475e4b9e86dc 100644 --- a/airflow/cli/commands/user_command.py +++ b/airflow/cli/commands/user_command.py @@ -32,7 +32,7 @@ @suppress_logs_and_warning def users_list(args): """Lists users at the command line""" - appbuilder = cached_app().appbuilder # pylint: disable=no-member + appbuilder = cached_app().appbuilder users = appbuilder.sm.get_all_users() fields = ['id', 'username', 'email', 'first_name', 'last_name', 'roles'] @@ -44,7 +44,7 @@ def users_list(args): @cli_utils.action_logging def users_create(args): """Creates new user in the DB""" - appbuilder = cached_app().appbuilder # pylint: disable=no-member + appbuilder = cached_app().appbuilder role = appbuilder.sm.find_role(args.role) if not role: valid_roles = appbuilder.sm.get_all_roles() @@ -73,7 +73,7 @@ def users_create(args): @cli_utils.action_logging def users_delete(args): """Deletes user from DB""" - appbuilder = cached_app().appbuilder # pylint: disable=no-member + appbuilder = cached_app().appbuilder try: user = next(u for u in appbuilder.sm.get_all_users() if u.username == args.username) @@ -95,7 +95,7 @@ def users_manage_role(args, remove=False): if args.username and args.email: raise SystemExit('Conflicting args: must supply either --username or --email, but not both') - appbuilder = cached_app().appbuilder # pylint: disable=no-member + appbuilder = cached_app().appbuilder user = appbuilder.sm.find_user(username=args.username) or appbuilder.sm.find_user(email=args.email) if not user: raise SystemExit(f'User "{args.username or args.email}" does not exist') @@ -123,7 +123,7 @@ def users_manage_role(args, remove=False): def users_export(args): """Exports all users to the json file""" - appbuilder = cached_app().appbuilder # pylint: disable=no-member + appbuilder = cached_app().appbuilder users = appbuilder.sm.get_all_users() fields = ['id', 'username', 'email', 'first_name', 'last_name', 'roles'] @@ -154,7 +154,7 @@ def users_import(args): if not os.path.exists(json_file): raise SystemExit(f"File '{json_file}' does not exist") - users_list = None # pylint: disable=redefined-outer-name + users_list = None try: with open(json_file) as file: users_list = json.loads(file.read()) @@ -169,8 +169,8 @@ def users_import(args): print("Updated the following users:\n\t{}".format("\n\t".join(users_updated))) -def _import_users(users_list): # pylint: disable=redefined-outer-name - appbuilder = cached_app().appbuilder # pylint: disable=no-member +def _import_users(users_list): + appbuilder = cached_app().appbuilder users_created = [] users_updated = [] diff --git a/airflow/cli/commands/variable_command.py b/airflow/cli/commands/variable_command.py index 07563e1570587..cacfbdf7aa8b7 100644 --- a/airflow/cli/commands/variable_command.py +++ b/airflow/cli/commands/variable_command.py @@ -91,7 +91,7 @@ def _import_helper(filepath): for k, v in var_json.items(): try: Variable.set(k, v, serialize_json=not isinstance(v, str)) - except Exception as e: # pylint: disable=broad-except + except Exception as e: print(f'Variable import failed: {repr(e)}') fail_count += 1 else: @@ -111,7 +111,7 @@ def _variable_export_helper(filepath): for var in qry: try: val = data.decode(var.val) - except Exception: # pylint: disable=broad-except + except Exception: val = var.val var_dict[var.key] = val diff --git a/airflow/cli/commands/webserver_command.py b/airflow/cli/commands/webserver_command.py index e786eb3656875..db6fbbb30a499 100644 --- a/airflow/cli/commands/webserver_command.py +++ b/airflow/cli/commands/webserver_command.py @@ -133,7 +133,7 @@ def _get_num_ready_workers_running(self) -> int: def ready_prefix_on_cmdline(proc): try: cmdline = proc.cmdline() - if len(cmdline) > 0: # pylint: disable=len-as-condition + if len(cmdline) > 0: return settings.GUNICORN_WORKER_READY_PREFIX in cmdline[0] except psutil.NoSuchProcess: pass @@ -201,7 +201,7 @@ def _reload_gunicorn(self) -> None: def start(self) -> NoReturn: """Starts monitoring the webserver.""" - try: # pylint: disable=too-many-nested-blocks + try: self._wait_until_true( lambda: self.num_workers_expected == self._get_num_workers_running(), timeout=self.master_timeout, @@ -427,7 +427,7 @@ def webserver(args): gunicorn_master_proc = None - def kill_proc(signum, _): # pylint: disable=unused-argument + def kill_proc(signum, _): log.info("Received signal: %s. Closing gunicorn.", signum) gunicorn_master_proc.terminate() with suppress(TimeoutError): diff --git a/airflow/cli/simple_table.py b/airflow/cli/simple_table.py index d17f948d2907f..30f29e3249733 100644 --- a/airflow/cli/simple_table.py +++ b/airflow/cli/simple_table.py @@ -73,7 +73,6 @@ def print_as_plain_table(self, data: List[Dict]): output = tabulate(rows, tablefmt="plain", headers=data[0].keys()) print(output) - # pylint: disable=too-many-return-statements def _normalize_data(self, value: Any, output: str) -> Optional[Union[list, str, dict]]: if isinstance(value, (tuple, list)): if output == "table": @@ -125,7 +124,7 @@ def __init__(self, *args, **kwargs): self.title_justify = kwargs.get("title_justify", "left") self.caption = kwargs.get("caption", " ") - def add_column(self, *args, **kwargs) -> None: # pylint: disable=signature-differs + def add_column(self, *args, **kwargs) -> None: """Add a column to the table. We use different default""" kwargs["overflow"] = kwargs.get("overflow") # to avoid truncating super().add_column(*args, **kwargs) diff --git a/airflow/compat/functools.py b/airflow/compat/functools.py index 10b4085d2db0b..e3dea0a660bbc 100644 --- a/airflow/compat/functools.py +++ b/airflow/compat/functools.py @@ -18,12 +18,12 @@ import sys if sys.version_info >= (3, 8): - from functools import cached_property # pylint: disable=no-name-in-module + from functools import cached_property else: from cached_property import cached_property if sys.version_info >= (3, 9): - from functools import cache # pylint: disable=no-name-in-module + from functools import cache else: from functools import lru_cache diff --git a/airflow/configuration.py b/airflow/configuration.py index ed38efdb106e3..8ea38fe5185f5 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -102,7 +102,7 @@ def default_config_yaml() -> List[dict]: return yaml.safe_load(config_file) -class AirflowConfigParser(ConfigParser): # pylint: disable=too-many-ancestors +class AirflowConfigParser(ConfigParser): """Custom Airflow Configparser supporting defaults and deprecated options""" # These configuration elements can be fetched as the stdout of commands @@ -265,7 +265,7 @@ def _validate_config_dependencies(self): + f"{list_mode}. Possible values are {', '.join(file_parser_modes)}." ) - def _using_old_value(self, old, current_value): # noqa + def _using_old_value(self, old, current_value): return old.search(current_value) is not None def _update_env_var(self, section, name, new_value): @@ -450,7 +450,7 @@ def getfloat(self, section, key, **kwargs): f'Current value: "{val}".' ) - def getimport(self, section, key, **kwargs): # noqa + def getimport(self, section, key, **kwargs): """ Reads options, imports the full qualified name, and returns the object. @@ -779,7 +779,7 @@ def parameterized_config(template): :param template: a config content templated with {{variables}} """ all_vars = {k: v for d in [globals(), locals()] for k, v in d.items()} - return template.format(**all_vars) # noqa + return template.format(**all_vars) def get_airflow_test_config(airflow_home): @@ -876,7 +876,7 @@ def initialize_config(): # Historical convenience functions to access config entries -def load_test_config(): # noqa: D103 +def load_test_config(): """Historical load_test_config""" warnings.warn( "Accessing configuration method 'load_test_config' directly from the configuration module is " @@ -888,7 +888,7 @@ def load_test_config(): # noqa: D103 conf.load_test_config() -def get(*args, **kwargs): # noqa: D103 +def get(*args, **kwargs): """Historical get""" warnings.warn( "Accessing configuration method 'get' directly from the configuration module is " @@ -900,7 +900,7 @@ def get(*args, **kwargs): # noqa: D103 return conf.get(*args, **kwargs) -def getboolean(*args, **kwargs): # noqa: D103 +def getboolean(*args, **kwargs): """Historical getboolean""" warnings.warn( "Accessing configuration method 'getboolean' directly from the configuration module is " @@ -912,7 +912,7 @@ def getboolean(*args, **kwargs): # noqa: D103 return conf.getboolean(*args, **kwargs) -def getfloat(*args, **kwargs): # noqa: D103 +def getfloat(*args, **kwargs): """Historical getfloat""" warnings.warn( "Accessing configuration method 'getfloat' directly from the configuration module is " @@ -924,7 +924,7 @@ def getfloat(*args, **kwargs): # noqa: D103 return conf.getfloat(*args, **kwargs) -def getint(*args, **kwargs): # noqa: D103 +def getint(*args, **kwargs): """Historical getint""" warnings.warn( "Accessing configuration method 'getint' directly from the configuration module is " @@ -936,7 +936,7 @@ def getint(*args, **kwargs): # noqa: D103 return conf.getint(*args, **kwargs) -def getsection(*args, **kwargs): # noqa: D103 +def getsection(*args, **kwargs): """Historical getsection""" warnings.warn( "Accessing configuration method 'getsection' directly from the configuration module is " @@ -948,7 +948,7 @@ def getsection(*args, **kwargs): # noqa: D103 return conf.getsection(*args, **kwargs) -def has_option(*args, **kwargs): # noqa: D103 +def has_option(*args, **kwargs): """Historical has_option""" warnings.warn( "Accessing configuration method 'has_option' directly from the configuration module is " @@ -960,7 +960,7 @@ def has_option(*args, **kwargs): # noqa: D103 return conf.has_option(*args, **kwargs) -def remove_option(*args, **kwargs): # noqa: D103 +def remove_option(*args, **kwargs): """Historical remove_option""" warnings.warn( "Accessing configuration method 'remove_option' directly from the configuration module is " @@ -972,7 +972,7 @@ def remove_option(*args, **kwargs): # noqa: D103 return conf.remove_option(*args, **kwargs) -def as_dict(*args, **kwargs): # noqa: D103 +def as_dict(*args, **kwargs): """Historical as_dict""" warnings.warn( "Accessing configuration method 'as_dict' directly from the configuration module is " @@ -984,7 +984,7 @@ def as_dict(*args, **kwargs): # noqa: D103 return conf.as_dict(*args, **kwargs) -def set(*args, **kwargs): # noqa pylint: disable=redefined-builtin +def set(*args, **kwargs): """Historical set""" warnings.warn( "Accessing configuration method 'set' directly from the configuration module is " diff --git a/airflow/contrib/hooks/aws_athena_hook.py b/airflow/contrib/hooks/aws_athena_hook.py index a00fadba15b0c..db1ecdfdbf3c5 100644 --- a/airflow/contrib/hooks/aws_athena_hook.py +++ b/airflow/contrib/hooks/aws_athena_hook.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.athena import AWSAthenaHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/aws_datasync_hook.py b/airflow/contrib/hooks/aws_datasync_hook.py index 68d74d8e5176e..0d485475b0310 100644 --- a/airflow/contrib/hooks/aws_datasync_hook.py +++ b/airflow/contrib/hooks/aws_datasync_hook.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.datasync import AWSDataSyncHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/aws_dynamodb_hook.py b/airflow/contrib/hooks/aws_dynamodb_hook.py index 92153b9909b59..dedb80073e3e5 100644 --- a/airflow/contrib/hooks/aws_dynamodb_hook.py +++ b/airflow/contrib/hooks/aws_dynamodb_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.dynamodb import AwsDynamoDBHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/aws_firehose_hook.py b/airflow/contrib/hooks/aws_firehose_hook.py index 4e98e2813d07e..c6d39cd795b79 100644 --- a/airflow/contrib/hooks/aws_firehose_hook.py +++ b/airflow/contrib/hooks/aws_firehose_hook.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.kinesis import AwsFirehoseHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/aws_glue_catalog_hook.py b/airflow/contrib/hooks/aws_glue_catalog_hook.py index 372648881d8f2..703ba47b81bf3 100644 --- a/airflow/contrib/hooks/aws_glue_catalog_hook.py +++ b/airflow/contrib/hooks/aws_glue_catalog_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.glue_catalog import AwsGlueCatalogHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/aws_hook.py b/airflow/contrib/hooks/aws_hook.py index 1677982abfce1..c40e32c0305f1 100644 --- a/airflow/contrib/hooks/aws_hook.py +++ b/airflow/contrib/hooks/aws_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook, _parse_s3_config, boto3 # noqa warnings.warn( diff --git a/airflow/contrib/hooks/aws_lambda_hook.py b/airflow/contrib/hooks/aws_lambda_hook.py index b1987f1e81903..379aaf5486655 100644 --- a/airflow/contrib/hooks/aws_lambda_hook.py +++ b/airflow/contrib/hooks/aws_lambda_hook.py @@ -23,7 +23,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.lambda_function import AwsLambdaHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/aws_logs_hook.py b/airflow/contrib/hooks/aws_logs_hook.py index 88c03c8da6f6a..9b9c449f8415e 100644 --- a/airflow/contrib/hooks/aws_logs_hook.py +++ b/airflow/contrib/hooks/aws_logs_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/aws_sns_hook.py b/airflow/contrib/hooks/aws_sns_hook.py index 47e641074af4d..b1318f52add2d 100644 --- a/airflow/contrib/hooks/aws_sns_hook.py +++ b/airflow/contrib/hooks/aws_sns_hook.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.sns import AwsSnsHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/aws_sqs_hook.py b/airflow/contrib/hooks/aws_sqs_hook.py index c504faa4d54a8..66cc1fa46e5da 100644 --- a/airflow/contrib/hooks/aws_sqs_hook.py +++ b/airflow/contrib/hooks/aws_sqs_hook.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.sqs import SQSHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/azure_container_instance_hook.py b/airflow/contrib/hooks/azure_container_instance_hook.py index 9ee03c770594f..5b40f9c9edaa1 100644 --- a/airflow/contrib/hooks/azure_container_instance_hook.py +++ b/airflow/contrib/hooks/azure_container_instance_hook.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.hooks.azure_container_instance import ( # noqa AzureContainerInstanceHook, ) diff --git a/airflow/contrib/hooks/azure_container_registry_hook.py b/airflow/contrib/hooks/azure_container_registry_hook.py index fe9cfdc488e70..840cf89cf943c 100644 --- a/airflow/contrib/hooks/azure_container_registry_hook.py +++ b/airflow/contrib/hooks/azure_container_registry_hook.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.hooks.azure_container_registry import ( # noqa AzureContainerRegistryHook, ) diff --git a/airflow/contrib/hooks/azure_container_volume_hook.py b/airflow/contrib/hooks/azure_container_volume_hook.py index b00c18a72223c..4b325ad266770 100644 --- a/airflow/contrib/hooks/azure_container_volume_hook.py +++ b/airflow/contrib/hooks/azure_container_volume_hook.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.hooks.azure_container_volume import AzureContainerVolumeHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/azure_cosmos_hook.py b/airflow/contrib/hooks/azure_cosmos_hook.py index d449507a53257..26abe6194a641 100644 --- a/airflow/contrib/hooks/azure_cosmos_hook.py +++ b/airflow/contrib/hooks/azure_cosmos_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.hooks.azure_cosmos import AzureCosmosDBHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/azure_data_lake_hook.py b/airflow/contrib/hooks/azure_data_lake_hook.py index ff63e56193fed..a89961d73c6f5 100644 --- a/airflow/contrib/hooks/azure_data_lake_hook.py +++ b/airflow/contrib/hooks/azure_data_lake_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/azure_fileshare_hook.py b/airflow/contrib/hooks/azure_fileshare_hook.py index a69e57b4e0395..2c49d41b825fa 100644 --- a/airflow/contrib/hooks/azure_fileshare_hook.py +++ b/airflow/contrib/hooks/azure_fileshare_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.hooks.azure_fileshare import AzureFileShareHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/bigquery_hook.py b/airflow/contrib/hooks/bigquery_hook.py index 64e8672990ebd..8a90ef80ec2bf 100644 --- a/airflow/contrib/hooks/bigquery_hook.py +++ b/airflow/contrib/hooks/bigquery_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.bigquery import ( # noqa BigQueryBaseCursor, BigQueryConnection, diff --git a/airflow/contrib/hooks/cassandra_hook.py b/airflow/contrib/hooks/cassandra_hook.py index 38efbcef4ab19..ea4c748c2bfc5 100644 --- a/airflow/contrib/hooks/cassandra_hook.py +++ b/airflow/contrib/hooks/cassandra_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.cassandra.hooks.cassandra import CassandraHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/cloudant_hook.py b/airflow/contrib/hooks/cloudant_hook.py index 9a68195b817f0..ab7a1fa398fa7 100644 --- a/airflow/contrib/hooks/cloudant_hook.py +++ b/airflow/contrib/hooks/cloudant_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.cloudant.hooks.cloudant import CloudantHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/databricks_hook.py b/airflow/contrib/hooks/databricks_hook.py index 7d12d84c599e6..ce77751283645 100644 --- a/airflow/contrib/hooks/databricks_hook.py +++ b/airflow/contrib/hooks/databricks_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.databricks.hooks.databricks import ( # noqa CANCEL_RUN_ENDPOINT, GET_RUN_ENDPOINT, diff --git a/airflow/contrib/hooks/datadog_hook.py b/airflow/contrib/hooks/datadog_hook.py index 728dc927da881..be275e9adf81d 100644 --- a/airflow/contrib/hooks/datadog_hook.py +++ b/airflow/contrib/hooks/datadog_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.datadog.hooks.datadog import DatadogHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/datastore_hook.py b/airflow/contrib/hooks/datastore_hook.py index 31300acc8f03d..9898e2fb16814 100644 --- a/airflow/contrib/hooks/datastore_hook.py +++ b/airflow/contrib/hooks/datastore_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.datastore import DatastoreHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/dingding_hook.py b/airflow/contrib/hooks/dingding_hook.py index cde1b09e6e541..deff0414baf94 100644 --- a/airflow/contrib/hooks/dingding_hook.py +++ b/airflow/contrib/hooks/dingding_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.dingding.hooks.dingding import DingdingHook, requests # noqa warnings.warn( diff --git a/airflow/contrib/hooks/discord_webhook_hook.py b/airflow/contrib/hooks/discord_webhook_hook.py index a9aca476cd1f6..a907d2115a724 100644 --- a/airflow/contrib/hooks/discord_webhook_hook.py +++ b/airflow/contrib/hooks/discord_webhook_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.discord.hooks.discord_webhook import DiscordWebhookHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/emr_hook.py b/airflow/contrib/hooks/emr_hook.py index 3278cd53c0f2d..1a15ee3edcd17 100644 --- a/airflow/contrib/hooks/emr_hook.py +++ b/airflow/contrib/hooks/emr_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.emr import EmrHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/fs_hook.py b/airflow/contrib/hooks/fs_hook.py index 35178fd4c1082..bc247c1948074 100644 --- a/airflow/contrib/hooks/fs_hook.py +++ b/airflow/contrib/hooks/fs_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.hooks.filesystem import FSHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/ftp_hook.py b/airflow/contrib/hooks/ftp_hook.py index 1ff3980959c33..8d2e9cb06c843 100644 --- a/airflow/contrib/hooks/ftp_hook.py +++ b/airflow/contrib/hooks/ftp_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.ftp.hooks.ftp import FTPHook, FTPSHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_bigtable_hook.py b/airflow/contrib/hooks/gcp_bigtable_hook.py index 23ba27590d061..47ccd2414a839 100644 --- a/airflow/contrib/hooks/gcp_bigtable_hook.py +++ b/airflow/contrib/hooks/gcp_bigtable_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.bigtable import BigtableHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_cloud_build_hook.py b/airflow/contrib/hooks/gcp_cloud_build_hook.py index 11e89027c40cc..691ae728a46d1 100644 --- a/airflow/contrib/hooks/gcp_cloud_build_hook.py +++ b/airflow/contrib/hooks/gcp_cloud_build_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.cloud_build import CloudBuildHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_dlp_hook.py b/airflow/contrib/hooks/gcp_dlp_hook.py index d35009f478425..77a9da6b1a8a8 100644 --- a/airflow/contrib/hooks/gcp_dlp_hook.py +++ b/airflow/contrib/hooks/gcp_dlp_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.dlp import CloudDLPHook, DlpJob # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_mlengine_hook.py b/airflow/contrib/hooks/gcp_mlengine_hook.py index f154786355055..57978e008b375 100644 --- a/airflow/contrib/hooks/gcp_mlengine_hook.py +++ b/airflow/contrib/hooks/gcp_mlengine_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.mlengine import MLEngineHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_natural_language_hook.py b/airflow/contrib/hooks/gcp_natural_language_hook.py index 08035ed519b9a..86ee9f8675d57 100644 --- a/airflow/contrib/hooks/gcp_natural_language_hook.py +++ b/airflow/contrib/hooks/gcp_natural_language_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.natural_language import CloudNaturalLanguageHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_pubsub_hook.py b/airflow/contrib/hooks/gcp_pubsub_hook.py index 29c979f6228cc..677a0f03fa506 100644 --- a/airflow/contrib/hooks/gcp_pubsub_hook.py +++ b/airflow/contrib/hooks/gcp_pubsub_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.pubsub import PubSubException, PubSubHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_tasks_hook.py b/airflow/contrib/hooks/gcp_tasks_hook.py index ecb7980f4df72..1753b2a1842d4 100644 --- a/airflow/contrib/hooks/gcp_tasks_hook.py +++ b/airflow/contrib/hooks/gcp_tasks_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.tasks import CloudTasksHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_translate_hook.py b/airflow/contrib/hooks/gcp_translate_hook.py index db9d54c697440..1b0cec8b5ec9c 100644 --- a/airflow/contrib/hooks/gcp_translate_hook.py +++ b/airflow/contrib/hooks/gcp_translate_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.translate import CloudTranslateHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_video_intelligence_hook.py b/airflow/contrib/hooks/gcp_video_intelligence_hook.py index 631268330d6e4..a71ef4649e909 100644 --- a/airflow/contrib/hooks/gcp_video_intelligence_hook.py +++ b/airflow/contrib/hooks/gcp_video_intelligence_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.video_intelligence import CloudVideoIntelligenceHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gcp_vision_hook.py b/airflow/contrib/hooks/gcp_vision_hook.py index 1690059b1f702..52f47f42bfdf5 100644 --- a/airflow/contrib/hooks/gcp_vision_hook.py +++ b/airflow/contrib/hooks/gcp_vision_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.hooks.vision import CloudVisionHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/gdrive_hook.py b/airflow/contrib/hooks/gdrive_hook.py index f5b687a3f27ad..dad8459c58394 100644 --- a/airflow/contrib/hooks/gdrive_hook.py +++ b/airflow/contrib/hooks/gdrive_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.suite.hooks.drive import GoogleDriveHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/grpc_hook.py b/airflow/contrib/hooks/grpc_hook.py index 41c02823e91ff..f7aa6e2216fa9 100644 --- a/airflow/contrib/hooks/grpc_hook.py +++ b/airflow/contrib/hooks/grpc_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.grpc.hooks.grpc import GrpcHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/imap_hook.py b/airflow/contrib/hooks/imap_hook.py index 0332803561b0f..57703966ab3c9 100644 --- a/airflow/contrib/hooks/imap_hook.py +++ b/airflow/contrib/hooks/imap_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.imap.hooks.imap import ImapHook, Mail, MailPart # noqa warnings.warn( diff --git a/airflow/contrib/hooks/jenkins_hook.py b/airflow/contrib/hooks/jenkins_hook.py index b00fdcc79f10a..178474ea77991 100644 --- a/airflow/contrib/hooks/jenkins_hook.py +++ b/airflow/contrib/hooks/jenkins_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.jenkins.hooks.jenkins import JenkinsHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/jira_hook.py b/airflow/contrib/hooks/jira_hook.py index 69113eb514aa8..8f9d4670154a2 100644 --- a/airflow/contrib/hooks/jira_hook.py +++ b/airflow/contrib/hooks/jira_hook.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.jira.hooks.jira import JiraHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/mongo_hook.py b/airflow/contrib/hooks/mongo_hook.py index 6f30b7f85d523..63f6eea36ba51 100644 --- a/airflow/contrib/hooks/mongo_hook.py +++ b/airflow/contrib/hooks/mongo_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.mongo.hooks.mongo import MongoHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/openfaas_hook.py b/airflow/contrib/hooks/openfaas_hook.py index 03f2458a574b1..a0e71ffe5e564 100644 --- a/airflow/contrib/hooks/openfaas_hook.py +++ b/airflow/contrib/hooks/openfaas_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.openfaas.hooks.openfaas import OK_STATUS_CODE, OpenFaasHook, requests # noqa warnings.warn( diff --git a/airflow/contrib/hooks/opsgenie_alert_hook.py b/airflow/contrib/hooks/opsgenie_alert_hook.py index 41908cf170a62..a65de9c3329c9 100644 --- a/airflow/contrib/hooks/opsgenie_alert_hook.py +++ b/airflow/contrib/hooks/opsgenie_alert_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.opsgenie.hooks.opsgenie_alert import OpsgenieAlertHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/pagerduty_hook.py b/airflow/contrib/hooks/pagerduty_hook.py index facf44e0d0fd5..33797b0e60ea4 100644 --- a/airflow/contrib/hooks/pagerduty_hook.py +++ b/airflow/contrib/hooks/pagerduty_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.pagerduty.hooks.pagerduty import PagerdutyHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/pinot_hook.py b/airflow/contrib/hooks/pinot_hook.py index 43c5f86d96de8..159677fdec52a 100644 --- a/airflow/contrib/hooks/pinot_hook.py +++ b/airflow/contrib/hooks/pinot_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.pinot.hooks.pinot import PinotAdminHook, PinotDbApiHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/qubole_check_hook.py b/airflow/contrib/hooks/qubole_check_hook.py index 86479f3608be6..0a674d7c763ad 100644 --- a/airflow/contrib/hooks/qubole_check_hook.py +++ b/airflow/contrib/hooks/qubole_check_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.qubole.hooks.qubole_check import QuboleCheckHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/qubole_hook.py b/airflow/contrib/hooks/qubole_hook.py index ba5075669964c..6a695bca76462 100644 --- a/airflow/contrib/hooks/qubole_hook.py +++ b/airflow/contrib/hooks/qubole_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.qubole.hooks.qubole import QuboleHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/redis_hook.py b/airflow/contrib/hooks/redis_hook.py index 69390a16fa0ec..57bdab5aead08 100644 --- a/airflow/contrib/hooks/redis_hook.py +++ b/airflow/contrib/hooks/redis_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.redis.hooks.redis import RedisHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/redshift_hook.py b/airflow/contrib/hooks/redshift_hook.py index 3cdda456ab168..ebaac1bdac0f5 100644 --- a/airflow/contrib/hooks/redshift_hook.py +++ b/airflow/contrib/hooks/redshift_hook.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.redshift import RedshiftHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/sagemaker_hook.py b/airflow/contrib/hooks/sagemaker_hook.py index 002a39a757948..321f25b022fa9 100644 --- a/airflow/contrib/hooks/sagemaker_hook.py +++ b/airflow/contrib/hooks/sagemaker_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.sagemaker import ( # noqa LogState, Position, diff --git a/airflow/contrib/hooks/salesforce_hook.py b/airflow/contrib/hooks/salesforce_hook.py index bcde12df2d353..a707a527b8449 100644 --- a/airflow/contrib/hooks/salesforce_hook.py +++ b/airflow/contrib/hooks/salesforce_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.salesforce.hooks.salesforce import SalesforceHook, pd # noqa warnings.warn( diff --git a/airflow/contrib/hooks/segment_hook.py b/airflow/contrib/hooks/segment_hook.py index e702817f20902..6da62578cee8b 100644 --- a/airflow/contrib/hooks/segment_hook.py +++ b/airflow/contrib/hooks/segment_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.segment.hooks.segment import SegmentHook, analytics # noqa warnings.warn( diff --git a/airflow/contrib/hooks/sftp_hook.py b/airflow/contrib/hooks/sftp_hook.py index 7b91ee36c048a..0153e8e54d062 100644 --- a/airflow/contrib/hooks/sftp_hook.py +++ b/airflow/contrib/hooks/sftp_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.sftp.hooks.sftp import SFTPHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/slack_webhook_hook.py b/airflow/contrib/hooks/slack_webhook_hook.py index 68e4195707dc2..f438d11575b43 100644 --- a/airflow/contrib/hooks/slack_webhook_hook.py +++ b/airflow/contrib/hooks/slack_webhook_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/snowflake_hook.py b/airflow/contrib/hooks/snowflake_hook.py index afda9e140dc54..804baccf66cdf 100644 --- a/airflow/contrib/hooks/snowflake_hook.py +++ b/airflow/contrib/hooks/snowflake_hook.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/spark_jdbc_hook.py b/airflow/contrib/hooks/spark_jdbc_hook.py index ca2969940316e..1b48d094acbb6 100644 --- a/airflow/contrib/hooks/spark_jdbc_hook.py +++ b/airflow/contrib/hooks/spark_jdbc_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.spark.hooks.spark_jdbc import SparkJDBCHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/spark_sql_hook.py b/airflow/contrib/hooks/spark_sql_hook.py index cbac63fad6600..6b262ed3efce5 100644 --- a/airflow/contrib/hooks/spark_sql_hook.py +++ b/airflow/contrib/hooks/spark_sql_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.spark.hooks.spark_sql import SparkSqlHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/spark_submit_hook.py b/airflow/contrib/hooks/spark_submit_hook.py index 9a30d773ed9b1..fbdbf4f9f0d53 100644 --- a/airflow/contrib/hooks/spark_submit_hook.py +++ b/airflow/contrib/hooks/spark_submit_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.spark.hooks.spark_submit import SparkSubmitHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/sqoop_hook.py b/airflow/contrib/hooks/sqoop_hook.py index 07d07de46d530..f231c0f26a8dc 100644 --- a/airflow/contrib/hooks/sqoop_hook.py +++ b/airflow/contrib/hooks/sqoop_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.sqoop.hooks.sqoop import SqoopHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/ssh_hook.py b/airflow/contrib/hooks/ssh_hook.py index 75cb3207d5bd7..ef3000d888c50 100644 --- a/airflow/contrib/hooks/ssh_hook.py +++ b/airflow/contrib/hooks/ssh_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.ssh.hooks.ssh import SSHHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/vertica_hook.py b/airflow/contrib/hooks/vertica_hook.py index 4aff07c79d410..fc84b222d0a5a 100644 --- a/airflow/contrib/hooks/vertica_hook.py +++ b/airflow/contrib/hooks/vertica_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.vertica.hooks.vertica import VerticaHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/wasb_hook.py b/airflow/contrib/hooks/wasb_hook.py index d256aef30c497..3b5eb650934af 100644 --- a/airflow/contrib/hooks/wasb_hook.py +++ b/airflow/contrib/hooks/wasb_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.hooks.wasb import WasbHook # noqa warnings.warn( diff --git a/airflow/contrib/hooks/winrm_hook.py b/airflow/contrib/hooks/winrm_hook.py index e22f3b6f0c3da..35e7db2bc7294 100644 --- a/airflow/contrib/hooks/winrm_hook.py +++ b/airflow/contrib/hooks/winrm_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.winrm.hooks.winrm import WinRMHook # noqa warnings.warn( diff --git a/airflow/contrib/operators/adls_list_operator.py b/airflow/contrib/operators/adls_list_operator.py index cb4357497281e..b924342600de8 100644 --- a/airflow/contrib/operators/adls_list_operator.py +++ b/airflow/contrib/operators/adls_list_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.operators.adls_list import AzureDataLakeStorageListOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/aws_athena_operator.py b/airflow/contrib/operators/aws_athena_operator.py index 97c12c92cf06e..e799c74635ed2 100644 --- a/airflow/contrib/operators/aws_athena_operator.py +++ b/airflow/contrib/operators/aws_athena_operator.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.athena import AWSAthenaOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/aws_sqs_publish_operator.py b/airflow/contrib/operators/aws_sqs_publish_operator.py index c7c2ca3c00258..b397d9e0ac6a3 100644 --- a/airflow/contrib/operators/aws_sqs_publish_operator.py +++ b/airflow/contrib/operators/aws_sqs_publish_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.sqs import SQSPublishOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/awsbatch_operator.py b/airflow/contrib/operators/awsbatch_operator.py index 0d1c5b0bd3d5d..a6be224cb93eb 100644 --- a/airflow/contrib/operators/awsbatch_operator.py +++ b/airflow/contrib/operators/awsbatch_operator.py @@ -41,7 +41,6 @@ ) -# pylint: disable=too-many-ancestors class AWSBatchOperator(AwsBatchOperator): """ This class is deprecated. Please use diff --git a/airflow/contrib/operators/azure_container_instances_operator.py b/airflow/contrib/operators/azure_container_instances_operator.py index 7efd0084cccf3..8b6a32a8ae216 100644 --- a/airflow/contrib/operators/azure_container_instances_operator.py +++ b/airflow/contrib/operators/azure_container_instances_operator.py @@ -21,7 +21,6 @@ """ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.operators.azure_container_instances import ( # noqa AzureContainerInstancesOperator, ) diff --git a/airflow/contrib/operators/azure_cosmos_operator.py b/airflow/contrib/operators/azure_cosmos_operator.py index f4bec653beb2a..6c087285ce8f7 100644 --- a/airflow/contrib/operators/azure_cosmos_operator.py +++ b/airflow/contrib/operators/azure_cosmos_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.operators.azure_cosmos import AzureCosmosInsertDocumentOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/bigquery_check_operator.py b/airflow/contrib/operators/bigquery_check_operator.py index ff021fb7e67fa..39b658cacab8d 100644 --- a/airflow/contrib/operators/bigquery_check_operator.py +++ b/airflow/contrib/operators/bigquery_check_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.operators.bigquery import ( # noqa BigQueryCheckOperator, BigQueryIntervalCheckOperator, diff --git a/airflow/contrib/operators/bigquery_get_data.py b/airflow/contrib/operators/bigquery_get_data.py index 8434cb4ede5fd..00c8575df3d70 100644 --- a/airflow/contrib/operators/bigquery_get_data.py +++ b/airflow/contrib/operators/bigquery_get_data.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.operators.bigquery import BigQueryGetDataOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/bigquery_operator.py b/airflow/contrib/operators/bigquery_operator.py index ca9a84580e915..6fe8f0816263f 100644 --- a/airflow/contrib/operators/bigquery_operator.py +++ b/airflow/contrib/operators/bigquery_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.operators.bigquery import ( # noqa; noqa; noqa; noqa; noqa BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, diff --git a/airflow/contrib/operators/bigquery_to_bigquery.py b/airflow/contrib/operators/bigquery_to_bigquery.py index 43c05ecbd8d40..84c26fb718c91 100644 --- a/airflow/contrib/operators/bigquery_to_bigquery.py +++ b/airflow/contrib/operators/bigquery_to_bigquery.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/bigquery_to_mysql_operator.py b/airflow/contrib/operators/bigquery_to_mysql_operator.py index bd9f0ac04f31a..401921cff502b 100644 --- a/airflow/contrib/operators/bigquery_to_mysql_operator.py +++ b/airflow/contrib/operators/bigquery_to_mysql_operator.py @@ -21,7 +21,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.transfers.bigquery_to_mysql import BigQueryToMySqlOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/databricks_operator.py b/airflow/contrib/operators/databricks_operator.py index 90edb6032a487..b591dd63b8274 100644 --- a/airflow/contrib/operators/databricks_operator.py +++ b/airflow/contrib/operators/databricks_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.databricks.operators.databricks import ( # noqa DatabricksRunNowOperator, DatabricksSubmitRunOperator, diff --git a/airflow/contrib/operators/dingding_operator.py b/airflow/contrib/operators/dingding_operator.py index 7abbac34fdaac..bfe91e8a72491 100644 --- a/airflow/contrib/operators/dingding_operator.py +++ b/airflow/contrib/operators/dingding_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.dingding.operators.dingding import DingdingOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/discord_webhook_operator.py b/airflow/contrib/operators/discord_webhook_operator.py index 2dcc117d704c0..be5809afbfcbd 100644 --- a/airflow/contrib/operators/discord_webhook_operator.py +++ b/airflow/contrib/operators/discord_webhook_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.discord.operators.discord_webhook import DiscordWebhookOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/docker_swarm_operator.py b/airflow/contrib/operators/docker_swarm_operator.py index 041f4fd7a522d..b023da796a4c8 100644 --- a/airflow/contrib/operators/docker_swarm_operator.py +++ b/airflow/contrib/operators/docker_swarm_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.docker.operators.docker_swarm import DockerSwarmOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/druid_operator.py b/airflow/contrib/operators/druid_operator.py index 1a35b0fa894d2..20dff77192313 100644 --- a/airflow/contrib/operators/druid_operator.py +++ b/airflow/contrib/operators/druid_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.druid.operators.druid import DruidOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/dynamodb_to_s3.py b/airflow/contrib/operators/dynamodb_to_s3.py index 3785d8eeb86e0..a2054007c0678 100644 --- a/airflow/contrib/operators/dynamodb_to_s3.py +++ b/airflow/contrib/operators/dynamodb_to_s3.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator # noqa warnings.warn( diff --git a/airflow/contrib/operators/ecs_operator.py b/airflow/contrib/operators/ecs_operator.py index a7865b9a39691..e460a6a51b5b9 100644 --- a/airflow/contrib/operators/ecs_operator.py +++ b/airflow/contrib/operators/ecs_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.ecs import ECSOperator, ECSProtocol as NewECSProtocol # noqa from airflow.typing_compat import Protocol, runtime_checkable diff --git a/airflow/contrib/operators/emr_add_steps_operator.py b/airflow/contrib/operators/emr_add_steps_operator.py index e03c0c6e0dd37..e53f284e447c6 100644 --- a/airflow/contrib/operators/emr_add_steps_operator.py +++ b/airflow/contrib/operators/emr_add_steps_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.emr_add_steps import EmrAddStepsOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/emr_create_job_flow_operator.py b/airflow/contrib/operators/emr_create_job_flow_operator.py index 95950e72431c6..14344a90e1579 100644 --- a/airflow/contrib/operators/emr_create_job_flow_operator.py +++ b/airflow/contrib/operators/emr_create_job_flow_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.emr_create_job_flow import EmrCreateJobFlowOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/emr_terminate_job_flow_operator.py b/airflow/contrib/operators/emr_terminate_job_flow_operator.py index 86e08497e9fb1..7c73bc32dc8db 100644 --- a/airflow/contrib/operators/emr_terminate_job_flow_operator.py +++ b/airflow/contrib/operators/emr_terminate_job_flow_operator.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.emr_terminate_job_flow import EmrTerminateJobFlowOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/file_to_wasb.py b/airflow/contrib/operators/file_to_wasb.py index d288c256d6b54..2af0476f9a912 100644 --- a/airflow/contrib/operators/file_to_wasb.py +++ b/airflow/contrib/operators/file_to_wasb.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.transfers.file_to_wasb import FileToWasbOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/gcp_bigtable_operator.py b/airflow/contrib/operators/gcp_bigtable_operator.py index b07a04c075921..f45fde7d5a388 100644 --- a/airflow/contrib/operators/gcp_bigtable_operator.py +++ b/airflow/contrib/operators/gcp_bigtable_operator.py @@ -119,7 +119,6 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) -# pylint: disable=too-many-ancestors class BigtableTableWaitForReplicationSensor(BigtableTableReplicationCompletedSensor): """ This class is deprecated. diff --git a/airflow/contrib/operators/gcp_cloud_build_operator.py b/airflow/contrib/operators/gcp_cloud_build_operator.py index b07a295296b8f..443fdbfdab73f 100644 --- a/airflow/contrib/operators/gcp_cloud_build_operator.py +++ b/airflow/contrib/operators/gcp_cloud_build_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.operators.cloud_build import CloudBuildCreateBuildOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/gcp_dlp_operator.py b/airflow/contrib/operators/gcp_dlp_operator.py index a5f4cae9f4722..f5b4c07d4cc8d 100644 --- a/airflow/contrib/operators/gcp_dlp_operator.py +++ b/airflow/contrib/operators/gcp_dlp_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.operators.dlp import ( # noqa CloudDLPCancelDLPJobOperator, CloudDLPCreateDeidentifyTemplateOperator, diff --git a/airflow/contrib/operators/gcp_tasks_operator.py b/airflow/contrib/operators/gcp_tasks_operator.py index 62ea14c92490f..319ddb4fe2ad5 100644 --- a/airflow/contrib/operators/gcp_tasks_operator.py +++ b/airflow/contrib/operators/gcp_tasks_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.operators.tasks import ( # noqa CloudTasksQueueCreateOperator, CloudTasksQueueDeleteOperator, diff --git a/airflow/contrib/operators/gcp_translate_operator.py b/airflow/contrib/operators/gcp_translate_operator.py index dc1cacaff32bd..c61cc8492c250 100644 --- a/airflow/contrib/operators/gcp_translate_operator.py +++ b/airflow/contrib/operators/gcp_translate_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.operators.translate import CloudTranslateTextOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/gcp_translate_speech_operator.py b/airflow/contrib/operators/gcp_translate_speech_operator.py index f8f8cc56131a6..2e0bb70787b61 100644 --- a/airflow/contrib/operators/gcp_translate_speech_operator.py +++ b/airflow/contrib/operators/gcp_translate_speech_operator.py @@ -22,8 +22,7 @@ import warnings -# pylint: disable=unused-import -from airflow.providers.google.cloud.operators.translate_speech import CloudTranslateSpeechOperator # noqa +from airflow.providers.google.cloud.operators.translate_speech import CloudTranslateSpeechOperator warnings.warn( "This module is deprecated. Please use `airflow.providers.google.cloud.operators.translate_speech`.", diff --git a/airflow/contrib/operators/gcp_video_intelligence_operator.py b/airflow/contrib/operators/gcp_video_intelligence_operator.py index edc9539269a36..a82fc9aeb8411 100644 --- a/airflow/contrib/operators/gcp_video_intelligence_operator.py +++ b/airflow/contrib/operators/gcp_video_intelligence_operator.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.operators.video_intelligence import ( # noqa CloudVideoIntelligenceDetectVideoExplicitContentOperator, CloudVideoIntelligenceDetectVideoLabelsOperator, diff --git a/airflow/contrib/operators/gcp_vision_operator.py b/airflow/contrib/operators/gcp_vision_operator.py index 18424cf0bf6fb..09a5b1e817e28 100644 --- a/airflow/contrib/operators/gcp_vision_operator.py +++ b/airflow/contrib/operators/gcp_vision_operator.py @@ -19,7 +19,7 @@ import warnings -from airflow.providers.google.cloud.operators.vision import ( # noqa # pylint: disable=unused-import +from airflow.providers.google.cloud.operators.vision import ( # noqa CloudVisionAddProductToProductSetOperator, CloudVisionCreateProductOperator, CloudVisionCreateProductSetOperator, diff --git a/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py b/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py index 99d7ca2d805cd..75a672f2c18b2 100644 --- a/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py +++ b/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py @@ -22,8 +22,6 @@ import warnings -# pylint: disable=unused-import - warnings.warn( "This module is deprecated. " "Please use `airflow.providers.google.cloud.operators.cloud_storage_transfer_service`.", diff --git a/airflow/contrib/operators/gcs_to_gdrive_operator.py b/airflow/contrib/operators/gcs_to_gdrive_operator.py index 72627ce4b57e6..1fb55d1fc3dfe 100644 --- a/airflow/contrib/operators/gcs_to_gdrive_operator.py +++ b/airflow/contrib/operators/gcs_to_gdrive_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/grpc_operator.py b/airflow/contrib/operators/grpc_operator.py index dbf49c62a04d2..bd8cfbd6003ee 100644 --- a/airflow/contrib/operators/grpc_operator.py +++ b/airflow/contrib/operators/grpc_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.grpc.operators.grpc import GrpcOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/hive_to_dynamodb.py b/airflow/contrib/operators/hive_to_dynamodb.py index 3cd467e393542..ba4f8b967cc98 100644 --- a/airflow/contrib/operators/hive_to_dynamodb.py +++ b/airflow/contrib/operators/hive_to_dynamodb.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.transfers.hive_to_dynamodb import HiveToDynamoDBOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/imap_attachment_to_s3_operator.py b/airflow/contrib/operators/imap_attachment_to_s3_operator.py index ea88e6e1966ed..e82a8bc05aeae 100644 --- a/airflow/contrib/operators/imap_attachment_to_s3_operator.py +++ b/airflow/contrib/operators/imap_attachment_to_s3_operator.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator # noqa warnings.warn( diff --git a/airflow/contrib/operators/jenkins_job_trigger_operator.py b/airflow/contrib/operators/jenkins_job_trigger_operator.py index 9fde004585622..0b401d2b430a7 100644 --- a/airflow/contrib/operators/jenkins_job_trigger_operator.py +++ b/airflow/contrib/operators/jenkins_job_trigger_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.jenkins.operators.jenkins_job_trigger import JenkinsJobTriggerOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/jira_operator.py b/airflow/contrib/operators/jira_operator.py index a9a919b03ff2d..b6e3b3e124a85 100644 --- a/airflow/contrib/operators/jira_operator.py +++ b/airflow/contrib/operators/jira_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.jira.operators.jira import JiraOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/kubernetes_pod_operator.py b/airflow/contrib/operators/kubernetes_pod_operator.py index e4d264fa3abad..962fa22859013 100644 --- a/airflow/contrib/operators/kubernetes_pod_operator.py +++ b/airflow/contrib/operators/kubernetes_pod_operator.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/mongo_to_s3.py b/airflow/contrib/operators/mongo_to_s3.py index 6a76e3befd19f..17b0676952e91 100644 --- a/airflow/contrib/operators/mongo_to_s3.py +++ b/airflow/contrib/operators/mongo_to_s3.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator # noqa warnings.warn( diff --git a/airflow/contrib/operators/opsgenie_alert_operator.py b/airflow/contrib/operators/opsgenie_alert_operator.py index de3459f0399e4..b8ce1ddcf4a9e 100644 --- a/airflow/contrib/operators/opsgenie_alert_operator.py +++ b/airflow/contrib/operators/opsgenie_alert_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.opsgenie.operators.opsgenie_alert import OpsgenieAlertOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py index c27d94826fb39..3907b6f8e8b6b 100644 --- a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py +++ b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake import ( # noqa OracleToAzureDataLakeOperator, ) diff --git a/airflow/contrib/operators/oracle_to_oracle_transfer.py b/airflow/contrib/operators/oracle_to_oracle_transfer.py index 905cba5a45c21..2efbf4d0a89b4 100644 --- a/airflow/contrib/operators/oracle_to_oracle_transfer.py +++ b/airflow/contrib/operators/oracle_to_oracle_transfer.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.oracle.transfers.oracle_to_oracle import OracleToOracleOperator warnings.warn( diff --git a/airflow/contrib/operators/qubole_check_operator.py b/airflow/contrib/operators/qubole_check_operator.py index 14f5bd92a6dcb..e42a9e0c8f13a 100644 --- a/airflow/contrib/operators/qubole_check_operator.py +++ b/airflow/contrib/operators/qubole_check_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.qubole.operators.qubole_check import ( # noqa QuboleCheckOperator, QuboleValueCheckOperator, diff --git a/airflow/contrib/operators/qubole_operator.py b/airflow/contrib/operators/qubole_operator.py index 171e275ccc1c9..e4a30748a03c6 100644 --- a/airflow/contrib/operators/qubole_operator.py +++ b/airflow/contrib/operators/qubole_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.qubole.operators.qubole import QuboleOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/redis_publish_operator.py b/airflow/contrib/operators/redis_publish_operator.py index 12f9261ad2573..994d9323ad170 100644 --- a/airflow/contrib/operators/redis_publish_operator.py +++ b/airflow/contrib/operators/redis_publish_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.redis.operators.redis_publish import RedisPublishOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/s3_copy_object_operator.py b/airflow/contrib/operators/s3_copy_object_operator.py index 287c0817857f0..cbe9c63440629 100644 --- a/airflow/contrib/operators/s3_copy_object_operator.py +++ b/airflow/contrib/operators/s3_copy_object_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.s3_copy_object import S3CopyObjectOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/s3_delete_objects_operator.py b/airflow/contrib/operators/s3_delete_objects_operator.py index 2e55dc5ea2efe..a0ab210324b55 100644 --- a/airflow/contrib/operators/s3_delete_objects_operator.py +++ b/airflow/contrib/operators/s3_delete_objects_operator.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.s3_delete_objects import S3DeleteObjectsOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/s3_list_operator.py b/airflow/contrib/operators/s3_list_operator.py index b9eb3542287f5..172b94cb116c2 100644 --- a/airflow/contrib/operators/s3_list_operator.py +++ b/airflow/contrib/operators/s3_list_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.s3_list import S3ListOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/s3_to_gcs_operator.py b/airflow/contrib/operators/s3_to_gcs_operator.py index 18cfcd33c82cc..d0ea8e09bdbb1 100644 --- a/airflow/contrib/operators/s3_to_gcs_operator.py +++ b/airflow/contrib/operators/s3_to_gcs_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/s3_to_gcs_transfer_operator.py b/airflow/contrib/operators/s3_to_gcs_transfer_operator.py index d82657b774bc5..71df06229334b 100644 --- a/airflow/contrib/operators/s3_to_gcs_transfer_operator.py +++ b/airflow/contrib/operators/s3_to_gcs_transfer_operator.py @@ -21,7 +21,6 @@ """ import warnings -# pylint: disable=unused-import,line-too-long from airflow.providers.google.cloud.operators.cloud_storage_transfer_service import ( # noqa isort:skip CloudDataTransferServiceS3ToGCSOperator, ) diff --git a/airflow/contrib/operators/s3_to_sftp_operator.py b/airflow/contrib/operators/s3_to_sftp_operator.py index 5de43305b97b1..e129af1471782 100644 --- a/airflow/contrib/operators/s3_to_sftp_operator.py +++ b/airflow/contrib/operators/s3_to_sftp_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sagemaker_base_operator.py b/airflow/contrib/operators/sagemaker_base_operator.py index 551502f67d85b..4c2c8f6baf131 100644 --- a/airflow/contrib/operators/sagemaker_base_operator.py +++ b/airflow/contrib/operators/sagemaker_base_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.sagemaker_base import SageMakerBaseOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sagemaker_endpoint_config_operator.py b/airflow/contrib/operators/sagemaker_endpoint_config_operator.py index cf828d4ac7317..43945b222734d 100644 --- a/airflow/contrib/operators/sagemaker_endpoint_config_operator.py +++ b/airflow/contrib/operators/sagemaker_endpoint_config_operator.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.sagemaker_endpoint_config import ( # noqa SageMakerEndpointConfigOperator, ) diff --git a/airflow/contrib/operators/sagemaker_endpoint_operator.py b/airflow/contrib/operators/sagemaker_endpoint_operator.py index 363bea19769d9..fe175a67b29cf 100644 --- a/airflow/contrib/operators/sagemaker_endpoint_operator.py +++ b/airflow/contrib/operators/sagemaker_endpoint_operator.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.sagemaker_endpoint import SageMakerEndpointOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sagemaker_model_operator.py b/airflow/contrib/operators/sagemaker_model_operator.py index efcfc58a46d82..9a003485606ab 100644 --- a/airflow/contrib/operators/sagemaker_model_operator.py +++ b/airflow/contrib/operators/sagemaker_model_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.sagemaker_model import SageMakerModelOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sagemaker_training_operator.py b/airflow/contrib/operators/sagemaker_training_operator.py index 6502df0393ae9..d3749c68573d2 100644 --- a/airflow/contrib/operators/sagemaker_training_operator.py +++ b/airflow/contrib/operators/sagemaker_training_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.sagemaker_training import SageMakerTrainingOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sagemaker_transform_operator.py b/airflow/contrib/operators/sagemaker_transform_operator.py index ca8d874ce4d5a..93cf7070db220 100644 --- a/airflow/contrib/operators/sagemaker_transform_operator.py +++ b/airflow/contrib/operators/sagemaker_transform_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.sagemaker_transform import SageMakerTransformOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sagemaker_tuning_operator.py b/airflow/contrib/operators/sagemaker_tuning_operator.py index 8885189dd29a3..05760a74569cf 100644 --- a/airflow/contrib/operators/sagemaker_tuning_operator.py +++ b/airflow/contrib/operators/sagemaker_tuning_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.sagemaker_tuning import SageMakerTuningOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/segment_track_event_operator.py b/airflow/contrib/operators/segment_track_event_operator.py index 759f4b9efdbfe..92419a1977405 100644 --- a/airflow/contrib/operators/segment_track_event_operator.py +++ b/airflow/contrib/operators/segment_track_event_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.segment.operators.segment_track_event import SegmentTrackEventOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sftp_operator.py b/airflow/contrib/operators/sftp_operator.py index 0cd44eec5ee8b..e73a84743c83b 100644 --- a/airflow/contrib/operators/sftp_operator.py +++ b/airflow/contrib/operators/sftp_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.sftp.operators.sftp import SFTPOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sftp_to_s3_operator.py b/airflow/contrib/operators/sftp_to_s3_operator.py index c5f1f17acac6b..7c13b1817d63e 100644 --- a/airflow/contrib/operators/sftp_to_s3_operator.py +++ b/airflow/contrib/operators/sftp_to_s3_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator # noqa warnings.warn( diff --git a/airflow/contrib/operators/slack_webhook_operator.py b/airflow/contrib/operators/slack_webhook_operator.py index f50ba53736de1..f271102e14550 100644 --- a/airflow/contrib/operators/slack_webhook_operator.py +++ b/airflow/contrib/operators/slack_webhook_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.slack.operators.slack_webhook import SlackWebhookOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/snowflake_operator.py b/airflow/contrib/operators/snowflake_operator.py index a81ac10f6d3c1..f01cc72d1a531 100644 --- a/airflow/contrib/operators/snowflake_operator.py +++ b/airflow/contrib/operators/snowflake_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.snowflake.operators.snowflake import SnowflakeOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sns_publish_operator.py b/airflow/contrib/operators/sns_publish_operator.py index b79cf4607c5c2..104e240836f12 100644 --- a/airflow/contrib/operators/sns_publish_operator.py +++ b/airflow/contrib/operators/sns_publish_operator.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.sns import SnsPublishOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/spark_jdbc_operator.py b/airflow/contrib/operators/spark_jdbc_operator.py index 1657fb225eca3..fc3cdc02704c6 100644 --- a/airflow/contrib/operators/spark_jdbc_operator.py +++ b/airflow/contrib/operators/spark_jdbc_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.spark.operators.spark_jdbc import SparkJDBCOperator, SparkSubmitOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/spark_sql_operator.py b/airflow/contrib/operators/spark_sql_operator.py index 7c4d3d4e346e0..19e20d215dbf5 100644 --- a/airflow/contrib/operators/spark_sql_operator.py +++ b/airflow/contrib/operators/spark_sql_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.spark.operators.spark_sql import SparkSqlOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/spark_submit_operator.py b/airflow/contrib/operators/spark_submit_operator.py index a0f9edddbe160..103187e445fa4 100644 --- a/airflow/contrib/operators/spark_submit_operator.py +++ b/airflow/contrib/operators/spark_submit_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.spark.operators.spark_submit import SparkSubmitOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/sqoop_operator.py b/airflow/contrib/operators/sqoop_operator.py index 0cee705601154..2757847abd65e 100644 --- a/airflow/contrib/operators/sqoop_operator.py +++ b/airflow/contrib/operators/sqoop_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.sqoop.operators.sqoop import SqoopOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/ssh_operator.py b/airflow/contrib/operators/ssh_operator.py index 401ca43a5d229..56f94b9b26b63 100644 --- a/airflow/contrib/operators/ssh_operator.py +++ b/airflow/contrib/operators/ssh_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.ssh.operators.ssh import SSHOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/vertica_operator.py b/airflow/contrib/operators/vertica_operator.py index 38d13d55a73de..e652512ad4056 100644 --- a/airflow/contrib/operators/vertica_operator.py +++ b/airflow/contrib/operators/vertica_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.vertica.operators.vertica import VerticaOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/vertica_to_mysql.py b/airflow/contrib/operators/vertica_to_mysql.py index 35b0df66dc776..c85738f9091b2 100644 --- a/airflow/contrib/operators/vertica_to_mysql.py +++ b/airflow/contrib/operators/vertica_to_mysql.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.mysql.transfers.vertica_to_mysql import VerticaToMySqlOperator warnings.warn( diff --git a/airflow/contrib/operators/wasb_delete_blob_operator.py b/airflow/contrib/operators/wasb_delete_blob_operator.py index d97cdc04a5e76..cbf11b38fbf86 100644 --- a/airflow/contrib/operators/wasb_delete_blob_operator.py +++ b/airflow/contrib/operators/wasb_delete_blob_operator.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.operators.wasb_delete_blob import WasbDeleteBlobOperator # noqa warnings.warn( diff --git a/airflow/contrib/operators/winrm_operator.py b/airflow/contrib/operators/winrm_operator.py index dcdceb38f2000..fcc6213e71d6e 100644 --- a/airflow/contrib/operators/winrm_operator.py +++ b/airflow/contrib/operators/winrm_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.winrm.operators.winrm import WinRMOperator # noqa warnings.warn( diff --git a/airflow/contrib/secrets/aws_secrets_manager.py b/airflow/contrib/secrets/aws_secrets_manager.py index cac72cd580245..833b03a59b06c 100644 --- a/airflow/contrib/secrets/aws_secrets_manager.py +++ b/airflow/contrib/secrets/aws_secrets_manager.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.secrets.secrets_manager import SecretsManagerBackend # noqa warnings.warn( diff --git a/airflow/contrib/secrets/aws_systems_manager.py b/airflow/contrib/secrets/aws_systems_manager.py index 3e443e19dbef3..4c7a30cf05ab7 100644 --- a/airflow/contrib/secrets/aws_systems_manager.py +++ b/airflow/contrib/secrets/aws_systems_manager.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.secrets.systems_manager import SystemsManagerParameterStoreBackend # noqa warnings.warn( diff --git a/airflow/contrib/secrets/azure_key_vault.py b/airflow/contrib/secrets/azure_key_vault.py index e29fcd3a840ff..f254dedf23c3f 100644 --- a/airflow/contrib/secrets/azure_key_vault.py +++ b/airflow/contrib/secrets/azure_key_vault.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.secrets.azure_key_vault import AzureKeyVaultBackend # noqa warnings.warn( diff --git a/airflow/contrib/secrets/gcp_secrets_manager.py b/airflow/contrib/secrets/gcp_secrets_manager.py index 797e133422ab5..7caa7ea2e88a7 100644 --- a/airflow/contrib/secrets/gcp_secrets_manager.py +++ b/airflow/contrib/secrets/gcp_secrets_manager.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.secrets.secret_manager import CloudSecretManagerBackend warnings.warn( diff --git a/airflow/contrib/secrets/hashicorp_vault.py b/airflow/contrib/secrets/hashicorp_vault.py index 30932807fd424..a3158d5d03abf 100644 --- a/airflow/contrib/secrets/hashicorp_vault.py +++ b/airflow/contrib/secrets/hashicorp_vault.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.hashicorp.secrets.vault import VaultBackend # noqa warnings.warn( diff --git a/airflow/contrib/sensors/aws_athena_sensor.py b/airflow/contrib/sensors/aws_athena_sensor.py index fbf585a9803c3..ddffc38bb63e8 100644 --- a/airflow/contrib/sensors/aws_athena_sensor.py +++ b/airflow/contrib/sensors/aws_athena_sensor.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.athena import AthenaSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py b/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py index 8378da541f194..66975a86f3e2f 100644 --- a/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py +++ b/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.glue_catalog_partition import AwsGlueCatalogPartitionSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/aws_redshift_cluster_sensor.py b/airflow/contrib/sensors/aws_redshift_cluster_sensor.py index a3086e2f00819..5c1341d8a0635 100644 --- a/airflow/contrib/sensors/aws_redshift_cluster_sensor.py +++ b/airflow/contrib/sensors/aws_redshift_cluster_sensor.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.redshift import AwsRedshiftClusterSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/aws_sqs_sensor.py b/airflow/contrib/sensors/aws_sqs_sensor.py index 984f2273a62be..d8365214d41d2 100644 --- a/airflow/contrib/sensors/aws_sqs_sensor.py +++ b/airflow/contrib/sensors/aws_sqs_sensor.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.sqs import SQSSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/azure_cosmos_sensor.py b/airflow/contrib/sensors/azure_cosmos_sensor.py index 77bc26b24d2ef..b7c357d9658c8 100644 --- a/airflow/contrib/sensors/azure_cosmos_sensor.py +++ b/airflow/contrib/sensors/azure_cosmos_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.sensors.azure_cosmos import AzureCosmosDocumentSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/bash_sensor.py b/airflow/contrib/sensors/bash_sensor.py index ac9762c594a91..c3d9c814696c7 100644 --- a/airflow/contrib/sensors/bash_sensor.py +++ b/airflow/contrib/sensors/bash_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.sensors.bash import STDOUT, BashSensor, Popen, TemporaryDirectory, gettempdir # noqa warnings.warn( diff --git a/airflow/contrib/sensors/cassandra_record_sensor.py b/airflow/contrib/sensors/cassandra_record_sensor.py index 908cb8e48d964..cfc3b30107ead 100644 --- a/airflow/contrib/sensors/cassandra_record_sensor.py +++ b/airflow/contrib/sensors/cassandra_record_sensor.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.cassandra.sensors.record import CassandraRecordSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/cassandra_table_sensor.py b/airflow/contrib/sensors/cassandra_table_sensor.py index ed09da2cf9904..0b7c7aa6eb73f 100644 --- a/airflow/contrib/sensors/cassandra_table_sensor.py +++ b/airflow/contrib/sensors/cassandra_table_sensor.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.cassandra.sensors.table import CassandraTableSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/celery_queue_sensor.py b/airflow/contrib/sensors/celery_queue_sensor.py index 59143182a3710..6ed2be1c93ac5 100644 --- a/airflow/contrib/sensors/celery_queue_sensor.py +++ b/airflow/contrib/sensors/celery_queue_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.celery.sensors.celery_queue import CeleryQueueSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/datadog_sensor.py b/airflow/contrib/sensors/datadog_sensor.py index a1d91f036b662..d0377d11a8889 100644 --- a/airflow/contrib/sensors/datadog_sensor.py +++ b/airflow/contrib/sensors/datadog_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.datadog.sensors.datadog import DatadogSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/emr_base_sensor.py b/airflow/contrib/sensors/emr_base_sensor.py index 7b8965fa41455..08d0efed81475 100644 --- a/airflow/contrib/sensors/emr_base_sensor.py +++ b/airflow/contrib/sensors/emr_base_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.emr_base import EmrBaseSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/emr_job_flow_sensor.py b/airflow/contrib/sensors/emr_job_flow_sensor.py index 4c1debb170464..429052a4ec969 100644 --- a/airflow/contrib/sensors/emr_job_flow_sensor.py +++ b/airflow/contrib/sensors/emr_job_flow_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.emr_job_flow import EmrJobFlowSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/emr_step_sensor.py b/airflow/contrib/sensors/emr_step_sensor.py index 9e7e37d52cdfd..9d4ac9b166ed6 100644 --- a/airflow/contrib/sensors/emr_step_sensor.py +++ b/airflow/contrib/sensors/emr_step_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.emr_step import EmrStepSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/file_sensor.py b/airflow/contrib/sensors/file_sensor.py index 63ca731e5fec3..6d75b657e6cef 100644 --- a/airflow/contrib/sensors/file_sensor.py +++ b/airflow/contrib/sensors/file_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.sensors.filesystem import FileSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/ftp_sensor.py b/airflow/contrib/sensors/ftp_sensor.py index 7d85ea42d506f..76c47c4609854 100644 --- a/airflow/contrib/sensors/ftp_sensor.py +++ b/airflow/contrib/sensors/ftp_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.ftp.sensors.ftp import FTPSensor, FTPSSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/hdfs_sensor.py b/airflow/contrib/sensors/hdfs_sensor.py index 1fa3ce6264d1c..d71ec8fc2f454 100644 --- a/airflow/contrib/sensors/hdfs_sensor.py +++ b/airflow/contrib/sensors/hdfs_sensor.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hdfs.sensors.hdfs import HdfsFolderSensor, HdfsRegexSensor warnings.warn( @@ -32,7 +31,6 @@ ) -# pylint: disable=too-many-ancestors class HdfsSensorFolder(HdfsFolderSensor): """This class is deprecated. @@ -51,7 +49,6 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) -# pylint: disable=too-many-ancestors class HdfsSensorRegex(HdfsRegexSensor): """This class is deprecated. diff --git a/airflow/contrib/sensors/imap_attachment_sensor.py b/airflow/contrib/sensors/imap_attachment_sensor.py index a080a34aee53e..34d2d7f1402e8 100644 --- a/airflow/contrib/sensors/imap_attachment_sensor.py +++ b/airflow/contrib/sensors/imap_attachment_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.imap.sensors.imap_attachment import ImapAttachmentSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/jira_sensor.py b/airflow/contrib/sensors/jira_sensor.py index 4e6106303d82f..e7c3785209616 100644 --- a/airflow/contrib/sensors/jira_sensor.py +++ b/airflow/contrib/sensors/jira_sensor.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.jira.sensors.jira import JiraSensor, JiraTicketSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/mongo_sensor.py b/airflow/contrib/sensors/mongo_sensor.py index 9c03e36ba9e53..13a5f0b65af4c 100644 --- a/airflow/contrib/sensors/mongo_sensor.py +++ b/airflow/contrib/sensors/mongo_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.mongo.sensors.mongo import MongoSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/pubsub_sensor.py b/airflow/contrib/sensors/pubsub_sensor.py index 08f33f8fe693d..eea404e216995 100644 --- a/airflow/contrib/sensors/pubsub_sensor.py +++ b/airflow/contrib/sensors/pubsub_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.sensors.pubsub import PubSubPullSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/python_sensor.py b/airflow/contrib/sensors/python_sensor.py index 3f4bc57ff7ecd..bc7543c2fd372 100644 --- a/airflow/contrib/sensors/python_sensor.py +++ b/airflow/contrib/sensors/python_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.sensors.python import PythonSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/qubole_sensor.py b/airflow/contrib/sensors/qubole_sensor.py index c17086f5ccc45..6b656249003e9 100644 --- a/airflow/contrib/sensors/qubole_sensor.py +++ b/airflow/contrib/sensors/qubole_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.qubole.sensors.qubole import ( # noqa QuboleFileSensor, QubolePartitionSensor, diff --git a/airflow/contrib/sensors/redis_key_sensor.py b/airflow/contrib/sensors/redis_key_sensor.py index 61f6b55cb20f3..f500c86dac5f3 100644 --- a/airflow/contrib/sensors/redis_key_sensor.py +++ b/airflow/contrib/sensors/redis_key_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.redis.sensors.redis_key import RedisKeySensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/redis_pub_sub_sensor.py b/airflow/contrib/sensors/redis_pub_sub_sensor.py index b7080616bc8ed..16946ac40d198 100644 --- a/airflow/contrib/sensors/redis_pub_sub_sensor.py +++ b/airflow/contrib/sensors/redis_pub_sub_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.redis.sensors.redis_pub_sub import RedisPubSubSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/sagemaker_base_sensor.py b/airflow/contrib/sensors/sagemaker_base_sensor.py index 1791a38a14278..86e32330278e1 100644 --- a/airflow/contrib/sensors/sagemaker_base_sensor.py +++ b/airflow/contrib/sensors/sagemaker_base_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.sagemaker_base import SageMakerBaseSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/sagemaker_endpoint_sensor.py b/airflow/contrib/sensors/sagemaker_endpoint_sensor.py index 7593e74d124e2..5107d6f542fd0 100644 --- a/airflow/contrib/sensors/sagemaker_endpoint_sensor.py +++ b/airflow/contrib/sensors/sagemaker_endpoint_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.sagemaker_endpoint import SageMakerEndpointSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/sagemaker_training_sensor.py b/airflow/contrib/sensors/sagemaker_training_sensor.py index d933b5cec79a2..e8a7650660974 100644 --- a/airflow/contrib/sensors/sagemaker_training_sensor.py +++ b/airflow/contrib/sensors/sagemaker_training_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.sagemaker_training import ( # noqa SageMakerHook, SageMakerTrainingSensor, diff --git a/airflow/contrib/sensors/sagemaker_transform_sensor.py b/airflow/contrib/sensors/sagemaker_transform_sensor.py index 98444eaa62884..29fd18f8baaae 100644 --- a/airflow/contrib/sensors/sagemaker_transform_sensor.py +++ b/airflow/contrib/sensors/sagemaker_transform_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.sagemaker_transform import SageMakerTransformSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/sagemaker_tuning_sensor.py b/airflow/contrib/sensors/sagemaker_tuning_sensor.py index 715936bae6aa3..7079e4ccb774f 100644 --- a/airflow/contrib/sensors/sagemaker_tuning_sensor.py +++ b/airflow/contrib/sensors/sagemaker_tuning_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.sagemaker_tuning import SageMakerTuningSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/sftp_sensor.py b/airflow/contrib/sensors/sftp_sensor.py index 6d630a30aafa7..d2700e814295a 100644 --- a/airflow/contrib/sensors/sftp_sensor.py +++ b/airflow/contrib/sensors/sftp_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.sftp.sensors.sftp import SFTPSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/wasb_sensor.py b/airflow/contrib/sensors/wasb_sensor.py index 5fef5bb002bb7..d8e0748907afe 100644 --- a/airflow/contrib/sensors/wasb_sensor.py +++ b/airflow/contrib/sensors/wasb_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.sensors.wasb import WasbBlobSensor, WasbPrefixSensor # noqa warnings.warn( diff --git a/airflow/contrib/sensors/weekday_sensor.py b/airflow/contrib/sensors/weekday_sensor.py index 4d5342d479cbb..1f836e1dda936 100644 --- a/airflow/contrib/sensors/weekday_sensor.py +++ b/airflow/contrib/sensors/weekday_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.sensors.weekday import DayOfWeekSensor # noqa warnings.warn( diff --git a/airflow/contrib/task_runner/cgroup_task_runner.py b/airflow/contrib/task_runner/cgroup_task_runner.py index 3a996f8cec6f9..f923126fe4475 100644 --- a/airflow/contrib/task_runner/cgroup_task_runner.py +++ b/airflow/contrib/task_runner/cgroup_task_runner.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.task.task_runner.cgroup_task_runner import CgroupTaskRunner # noqa warnings.warn( diff --git a/airflow/contrib/utils/gcp_field_sanitizer.py b/airflow/contrib/utils/gcp_field_sanitizer.py index a31b6d33d0f63..37c0aff2b76d6 100644 --- a/airflow/contrib/utils/gcp_field_sanitizer.py +++ b/airflow/contrib/utils/gcp_field_sanitizer.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.utils.field_sanitizer import ( # noqa GcpBodyFieldSanitizer, GcpFieldSanitizerException, diff --git a/airflow/contrib/utils/gcp_field_validator.py b/airflow/contrib/utils/gcp_field_validator.py index 6ac5b4dbe76e0..fc42dca94be00 100644 --- a/airflow/contrib/utils/gcp_field_validator.py +++ b/airflow/contrib/utils/gcp_field_validator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.utils.field_validator import ( # noqa GcpBodyFieldValidator, GcpFieldValidationException, diff --git a/airflow/contrib/utils/log/task_handler_with_custom_formatter.py b/airflow/contrib/utils/log/task_handler_with_custom_formatter.py index 6741b9236b82e..9bbdee3b2c8c3 100644 --- a/airflow/contrib/utils/log/task_handler_with_custom_formatter.py +++ b/airflow/contrib/utils/log/task_handler_with_custom_formatter.py @@ -18,7 +18,6 @@ import warnings -# pylint: disable=unused-import from airflow.utils.log.task_handler_with_custom_formatter import TaskHandlerWithCustomFormatter # noqa warnings.warn( diff --git a/airflow/contrib/utils/mlengine_operator_utils.py b/airflow/contrib/utils/mlengine_operator_utils.py index 46373ffc11376..ebd630c1912a2 100644 --- a/airflow/contrib/utils/mlengine_operator_utils.py +++ b/airflow/contrib/utils/mlengine_operator_utils.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.utils.mlengine_operator_utils import create_evaluate_ops # noqa warnings.warn( diff --git a/airflow/contrib/utils/mlengine_prediction_summary.py b/airflow/contrib/utils/mlengine_prediction_summary.py index 2edc1301c6090..ea390525a359d 100644 --- a/airflow/contrib/utils/mlengine_prediction_summary.py +++ b/airflow/contrib/utils/mlengine_prediction_summary.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.utils.mlengine_prediction_summary import JsonCoder, MakeSummary # noqa warnings.warn( diff --git a/airflow/contrib/utils/weekday.py b/airflow/contrib/utils/weekday.py index ff3dc22b09d63..2f2448c8896da 100644 --- a/airflow/contrib/utils/weekday.py +++ b/airflow/contrib/utils/weekday.py @@ -17,7 +17,6 @@ """This module is deprecated. Please use :mod:`airflow.utils.weekday`.""" import warnings -# pylint: disable=unused-import from airflow.utils.weekday import WeekDay # noqa warnings.warn( diff --git a/airflow/decorators/__init__.py b/airflow/decorators/__init__.py index d1386b0b3f0de..1250f321de291 100644 --- a/airflow/decorators/__init__.py +++ b/airflow/decorators/__init__.py @@ -19,8 +19,8 @@ from airflow.decorators.python import python_task from airflow.decorators.python_virtualenv import _virtualenv_task -from airflow.decorators.task_group import task_group # noqa # pylint: disable=unused-import -from airflow.models.dag import dag # noqa # pylint: disable=unused-import +from airflow.decorators.task_group import task_group # noqa +from airflow.models.dag import dag # noqa class _TaskDecorator: diff --git a/airflow/decorators/base.py b/airflow/decorators/base.py index 2f17980b901c6..b13396484c305 100644 --- a/airflow/decorators/base.py +++ b/airflow/decorators/base.py @@ -175,7 +175,7 @@ def _hook_apply_defaults(self, *args, **kwargs): return args, kwargs -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def task_decorator_factory( diff --git a/airflow/decorators/python.py b/airflow/decorators/python.py index f089995c3af9f..a7251cb241c00 100644 --- a/airflow/decorators/python.py +++ b/airflow/decorators/python.py @@ -59,7 +59,7 @@ def __init__( super().__init__(kwargs_to_upstream=kwargs_to_upstream, **kwargs) -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def python_task( diff --git a/airflow/decorators/python_virtualenv.py b/airflow/decorators/python_virtualenv.py index 386c267f9bde7..61e5d1f5dc93e 100644 --- a/airflow/decorators/python_virtualenv.py +++ b/airflow/decorators/python_virtualenv.py @@ -67,7 +67,7 @@ def get_python_source(self): return res -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def _virtualenv_task( diff --git a/airflow/decorators/task_group.py b/airflow/decorators/task_group.py index 89283b201554e..cdd98835d1954 100644 --- a/airflow/decorators/task_group.py +++ b/airflow/decorators/task_group.py @@ -25,7 +25,7 @@ from airflow.utils.task_group import TaskGroup -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) task_group_sig = signature(TaskGroup.__init__) diff --git a/airflow/example_dags/example_branch_labels.py b/airflow/example_dags/example_branch_labels.py index 4f1cb257f0fb8..926e35d3026db 100644 --- a/airflow/example_dags/example_branch_labels.py +++ b/airflow/example_dags/example_branch_labels.py @@ -36,5 +36,5 @@ report = DummyOperator(task_id="report") ingest >> analyse >> check - check >> Label("No errors") >> save >> report # pylint: disable=expression-not-assigned - check >> Label("Errors found") >> describe >> error >> report # pylint: disable=expression-not-assigned + check >> Label("No errors") >> save >> report + check >> Label("Errors found") >> describe >> error >> report diff --git a/airflow/example_dags/example_branch_operator.py b/airflow/example_dags/example_branch_operator.py index 6c1fb8f95df25..79284905e4cef 100644 --- a/airflow/example_dags/example_branch_operator.py +++ b/airflow/example_dags/example_branch_operator.py @@ -65,4 +65,4 @@ ) # Label is optional here, but it can help identify more complex branches - branching >> Label(option) >> t >> dummy_follow >> join # pylint: disable=expression-not-assigned + branching >> Label(option) >> t >> dummy_follow >> join diff --git a/airflow/example_dags/libs/helper.py b/airflow/example_dags/libs/helper.py index f80194e056920..a3d3a720a0255 100644 --- a/airflow/example_dags/libs/helper.py +++ b/airflow/example_dags/libs/helper.py @@ -17,6 +17,5 @@ # under the License. -# pylint: disable=missing-docstring -def print_stuff(): # noqa: D103 +def print_stuff(): print("annotated!") diff --git a/airflow/example_dags/tutorial_etl_dag.py b/airflow/example_dags/tutorial_etl_dag.py index 8b45600a677b7..a23c83682e54a 100644 --- a/airflow/example_dags/tutorial_etl_dag.py +++ b/airflow/example_dags/tutorial_etl_dag.py @@ -16,7 +16,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=missing-function-docstring """ ### ETL DAG Tutorial Documentation diff --git a/airflow/example_dags/tutorial_taskflow_api_etl.py b/airflow/example_dags/tutorial_taskflow_api_etl.py index 52745f3c26b28..edc589ce4f117 100644 --- a/airflow/example_dags/tutorial_taskflow_api_etl.py +++ b/airflow/example_dags/tutorial_taskflow_api_etl.py @@ -16,7 +16,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=missing-function-docstring # [START tutorial] # [START import_module] diff --git a/airflow/example_dags/tutorial_taskflow_api_etl_virtualenv.py b/airflow/example_dags/tutorial_taskflow_api_etl_virtualenv.py index 6d00cf662ae9d..d2723beee6d81 100644 --- a/airflow/example_dags/tutorial_taskflow_api_etl_virtualenv.py +++ b/airflow/example_dags/tutorial_taskflow_api_etl_virtualenv.py @@ -16,7 +16,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=missing-function-docstring # [START tutorial] # [START import_module] diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index 4c976cfe50ff5..b2c50163af5b2 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -39,7 +39,7 @@ from celery.backends.database import DatabaseBackend, Task as TaskDb, session_cleanup from celery.result import AsyncResult from celery.signals import import_modules as celery_import_modules -from setproctitle import setproctitle # pylint: disable=no-name-in-module +from setproctitle import setproctitle import airflow.settings as settings from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG @@ -116,21 +116,21 @@ def _execute_in_fork(command_to_exec: CommandType) -> None: args.func(args) ret = 0 - except Exception as e: # pylint: disable=broad-except + except Exception as e: log.exception("Failed to execute task %s.", str(e)) ret = 1 finally: Sentry.flush() logging.shutdown() - os._exit(ret) # pylint: disable=protected-access + os._exit(ret) def _execute_in_subprocess(command_to_exec: CommandType) -> None: env = os.environ.copy() try: - # pylint: disable=unexpected-keyword-arg + subprocess.check_output(command_to_exec, stderr=subprocess.STDOUT, close_fds=True, env=env) - # pylint: disable=unexpected-keyword-arg + except subprocess.CalledProcessError as e: log.exception('execute_command encountered a CalledProcessError') log.error(e.output) @@ -166,14 +166,13 @@ def send_task_to_executor( try: with timeout(seconds=OPERATION_TIMEOUT): result = task_to_run.apply_async(args=[command], queue=queue) - except Exception as e: # pylint: disable=broad-except + except Exception as e: exception_traceback = f"Celery Task ID: {key}\n{traceback.format_exc()}" result = ExceptionWithTraceback(e, exception_traceback) return key, command, result -# pylint: disable=unused-import @celery_import_modules.connect def on_celery_import_modules(*args, **kwargs): """ @@ -198,9 +197,6 @@ def on_celery_import_modules(*args, **kwargs): pass -# pylint: enable=unused-import - - class CeleryExecutor(BaseExecutor): """ CeleryExecutor is recommended for production use of Airflow. It allows @@ -292,9 +288,7 @@ def _process_tasks(self, task_tuples_to_send: List[TaskInstanceInCelery]) -> Non self.queued_tasks.pop(key) self.task_publish_retries.pop(key) if isinstance(result, ExceptionWithTraceback): - self.log.error( # pylint: disable=logging-not-lazy - CELERY_SEND_ERR_MSG_HEADER + ": %s\n%s\n", result.exception, result.traceback - ) + self.log.error(CELERY_SEND_ERR_MSG_HEADER + ": %s\n%s\n", result.exception, result.traceback) self.event_buffer[key] = (State.FAILED, None) elif result is not None: result.backend = cached_celery_backend @@ -413,7 +407,7 @@ def update_task_state(self, key: TaskInstanceKey, state: str, info: Any) -> None pass else: self.log.info("Unexpected state for %s: %s", key, state) - except Exception: # noqa pylint: disable=broad-except + except Exception: self.log.exception("Error syncing the Celery executor, ignoring it.") def end(self, synchronous: bool = False) -> None: @@ -511,7 +505,7 @@ def fetch_celery_task_state(async_result: AsyncResult) -> Tuple[str, Union[str, # to get the current state of the task info = async_result.info if hasattr(async_result, 'info') else None return async_result.task_id, async_result.state, info - except Exception as e: # pylint: disable=broad-except + except Exception as e: exception_traceback = f"Celery Task ID: {async_result}\n{traceback.format_exc()}" return async_result.task_id, ExceptionWithTraceback(e, exception_traceback), None @@ -592,7 +586,7 @@ def _get_many_using_multiprocessing(self, async_results) -> Mapping[str, EventBu states_and_info_by_task_id: MutableMapping[str, EventBufferValueType] = {} for task_id, state_or_exception, info in task_id_to_states_and_info: if isinstance(state_or_exception, ExceptionWithTraceback): - self.log.error( # pylint: disable=logging-not-lazy + self.log.error( CELERY_FETCH_ERR_MSG_HEADER + ":%s\n%s\n", state_or_exception.exception, state_or_exception.traceback, diff --git a/airflow/executors/debug_executor.py b/airflow/executors/debug_executor.py index 3bf784cd49208..d9f2313764237 100644 --- a/airflow/executors/debug_executor.py +++ b/airflow/executors/debug_executor.py @@ -49,7 +49,7 @@ def __init__(self): self.tasks_params: Dict[TaskInstanceKey, Dict[str, Any]] = {} self.fail_fast = conf.getboolean("debug", "fail_fast") - def execute_async(self, *args, **kwargs) -> None: # pylint: disable=signature-differs + def execute_async(self, *args, **kwargs) -> None: """The method is replaced by custom trigger_task implementation.""" def sync(self) -> None: @@ -66,7 +66,7 @@ def sync(self) -> None: self.log.info("Executor is terminated! Stopping %s to %s", ti.key, State.FAILED) ti.set_state(State.FAILED) self.change_state(ti.key, State.FAILED) - ti._run_finished_callback() # pylint: disable=protected-access + ti._run_finished_callback() continue task_succeeded = self._run_task(ti) @@ -76,14 +76,14 @@ def _run_task(self, ti: TaskInstance) -> bool: key = ti.key try: params = self.tasks_params.pop(ti.key, {}) - ti._run_raw_task(job_id=ti.job_id, **params) # pylint: disable=protected-access + ti._run_raw_task(job_id=ti.job_id, **params) self.change_state(key, State.SUCCESS) - ti._run_finished_callback() # pylint: disable=protected-access + ti._run_finished_callback() return True - except Exception as e: # pylint: disable=broad-except + except Exception as e: ti.set_state(State.FAILED) self.change_state(key, State.FAILED) - ti._run_finished_callback() # pylint: disable=protected-access + ti._run_finished_callback() self.log.exception("Failed to execute task: %s.", str(e)) return False diff --git a/airflow/executors/kubernetes_executor.py b/airflow/executors/kubernetes_executor.py index 5c42acd31d1c1..91656879468c7 100644 --- a/airflow/executors/kubernetes_executor.py +++ b/airflow/executors/kubernetes_executor.py @@ -27,7 +27,7 @@ import multiprocessing import time from datetime import timedelta -from queue import Empty, Queue # pylint: disable=unused-import +from queue import Empty, Queue from typing import Any, Dict, List, Optional, Tuple from kubernetes import client, watch @@ -451,7 +451,7 @@ def clear_not_launched_queued_tasks(self, session=None) -> None: self.log.info('When executor started up, found %s queued task instances', len(queued_tasks)) for task in queued_tasks: - # pylint: disable=protected-access + self.log.debug("Checking task %s", task) dict_string = "dag_id={},task_id={},execution_date={},airflow-worker={}".format( pod_generator.make_safe_label_value(task.dag_id), @@ -459,7 +459,7 @@ def clear_not_launched_queued_tasks(self, session=None) -> None: pod_generator.datetime_to_label_safe_datestring(task.execution_date), pod_generator.make_safe_label_value(str(self.scheduler_job_id)), ) - # pylint: enable=protected-access + kwargs = dict(label_selector=dict_string) if self.kube_config.kube_client_request_args: for key, value in self.kube_config.kube_client_request_args.items(): @@ -504,7 +504,7 @@ def execute_async( self.log.info('Add task %s with command %s with executor_config %s', key, command, executor_config) try: kube_executor_config = PodGenerator.from_obj(executor_config) - except Exception: # pylint: disable=broad-except + except Exception: self.log.error("Invalid executor_config for %s", key) self.fail(key=key, info="Invalid executor_config passed") return @@ -537,7 +537,7 @@ def sync(self) -> None: self.kube_scheduler.sync() last_resource_version = None - while True: # pylint: disable=too-many-nested-blocks + while True: try: results = self.result_queue.get_nowait() try: @@ -546,7 +546,7 @@ def sync(self) -> None: self.log.info('Changing state of %s to %s', results, state) try: self._change_state(key, state, pod_id, namespace) - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.exception( "Exception: %s when attempting to change state of %s to %s, re-queueing.", e, @@ -562,7 +562,6 @@ def sync(self) -> None: resource_instance = ResourceVersion() resource_instance.resource_version = last_resource_version or resource_instance.resource_version - # pylint: disable=too-many-nested-blocks for _ in range(self.kube_config.worker_pods_creation_batch_size): try: task = self.task_queue.get_nowait() @@ -583,7 +582,6 @@ def sync(self) -> None: self.task_queue.task_done() except Empty: break - # pylint: enable=too-many-nested-blocks # Run any pending timed events next_event = self.event_scheduler.run(blocking=False) @@ -725,7 +723,7 @@ def _flush_result_queue(self) -> None: if not self.result_queue: raise AirflowException(NOT_STARTED_MESSAGE) self.log.debug('Executor shutting down, result_queue approximate size=%d', self.result_queue.qsize()) - while True: # pylint: disable=too-many-nested-blocks + while True: try: results = self.result_queue.get_nowait() self.log.warning('Executor shutting down, flushing results=%s', results) @@ -736,7 +734,7 @@ def _flush_result_queue(self) -> None: ) try: self._change_state(key, state, pod_id, namespace) - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.exception( 'Ignoring exception: %s when attempting to change state of %s to %s.', e, diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py index a29342f2c8e46..ab9356f920d95 100644 --- a/airflow/executors/local_executor.py +++ b/airflow/executors/local_executor.py @@ -28,18 +28,15 @@ from abc import abstractmethod from multiprocessing import Manager, Process from multiprocessing.managers import SyncManager -from queue import Empty, Queue # pylint: disable=unused-import # noqa: F401 -from typing import Any, List, Optional, Tuple, Union # pylint: disable=unused-import # noqa: F401 +from queue import Empty, Queue +from typing import Any, List, Optional, Tuple, Union -from setproctitle import setproctitle # pylint: disable=no-name-in-module +from setproctitle import setproctitle from airflow import settings from airflow.exceptions import AirflowException from airflow.executors.base_executor import NOT_STARTED_MESSAGE, PARALLELISM, BaseExecutor, CommandType -from airflow.models.taskinstance import ( # pylint: disable=unused-import # noqa: F401 - TaskInstanceKey, - TaskInstanceStateType, -) +from airflow.models.taskinstance import TaskInstanceKey, TaskInstanceStateType from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.state import State @@ -123,12 +120,12 @@ def _execute_work_in_fork(self, command: CommandType) -> str: args.func(args) ret = 0 return State.SUCCESS - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.error("Failed to execute task %s.", str(e)) finally: Sentry.flush() logging.shutdown() - os._exit(ret) # pylint: disable=protected-access + os._exit(ret) raise RuntimeError('unreachable -- keep mypy happy') @abstractmethod @@ -227,7 +224,6 @@ def start(self) -> None: self.executor.workers_used = 0 self.executor.workers_active = 0 - # pylint: disable=unused-argument # pragma: no cover def execute_async( self, key: TaskInstanceKey, @@ -250,7 +246,6 @@ def execute_async( self.executor.workers_active += 1 local_worker.start() - # pylint: enable=unused-argument # pragma: no cover def sync(self) -> None: """Sync will get called periodically by the heartbeat method.""" if not self.executor.result_queue: @@ -302,8 +297,8 @@ def execute_async( self, key: TaskInstanceKey, command: CommandType, - queue: Optional[str] = None, # pylint: disable=unused-argument - executor_config: Optional[Any] = None, # pylint: disable=unused-argument + queue: Optional[str] = None, + executor_config: Optional[Any] = None, ) -> None: """ Executes task asynchronously. diff --git a/airflow/hooks/S3_hook.py b/airflow/hooks/S3_hook.py index cc0fd67299603..b59311a1ba9e4 100644 --- a/airflow/hooks/S3_hook.py +++ b/airflow/hooks/S3_hook.py @@ -16,12 +16,11 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=invalid-name + """This module is deprecated. Please use :mod:`airflow.providers.amazon.aws.hooks.s3`.""" import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.s3 import S3Hook, provide_bucket_name # noqa warnings.warn( diff --git a/airflow/hooks/base_hook.py b/airflow/hooks/base_hook.py index 5c94e5fe08c47..cf1594d18d284 100644 --- a/airflow/hooks/base_hook.py +++ b/airflow/hooks/base_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.hooks.base import BaseHook # noqa warnings.warn("This module is deprecated. Please use `airflow.hooks.base`.", DeprecationWarning, stacklevel=2) diff --git a/airflow/hooks/dbapi.py b/airflow/hooks/dbapi.py index 6c00320d7c16c..553dacc828749 100644 --- a/airflow/hooks/dbapi.py +++ b/airflow/hooks/dbapi.py @@ -312,7 +312,7 @@ def insert_rows(self, table, rows, target_fields=None, commit_every=1000, replac self.log.info("Done loading. Loaded a total of %s rows", i) @staticmethod - def _serialize_cell(cell, conn=None): # pylint: disable=unused-argument + def _serialize_cell(cell, conn=None): """ Returns the SQL literal of the cell as a string. diff --git a/airflow/hooks/dbapi_hook.py b/airflow/hooks/dbapi_hook.py index a3aafa9c8ad13..4a441b0f50d59 100644 --- a/airflow/hooks/dbapi_hook.py +++ b/airflow/hooks/dbapi_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.hooks.dbapi import DbApiHook # noqa warnings.warn( diff --git a/airflow/hooks/docker_hook.py b/airflow/hooks/docker_hook.py index 80e6319ec9e0d..aaedd7e637d93 100644 --- a/airflow/hooks/docker_hook.py +++ b/airflow/hooks/docker_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.docker.hooks.docker import DockerHook # noqa warnings.warn( diff --git a/airflow/hooks/druid_hook.py b/airflow/hooks/druid_hook.py index 9fe43e6f4ce2d..0a43debbabddc 100644 --- a/airflow/hooks/druid_hook.py +++ b/airflow/hooks/druid_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.druid.hooks.druid import DruidDbApiHook, DruidHook # noqa warnings.warn( diff --git a/airflow/hooks/hdfs_hook.py b/airflow/hooks/hdfs_hook.py index 5476d544dced4..fd13e7337e262 100644 --- a/airflow/hooks/hdfs_hook.py +++ b/airflow/hooks/hdfs_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hdfs.hooks.hdfs import HDFSHook, HDFSHookException # noqa warnings.warn( diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py index f559d32ad48a2..74d7863c8d947 100644 --- a/airflow/hooks/hive_hooks.py +++ b/airflow/hooks/hive_hooks.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hive.hooks.hive import ( # noqa HIVE_QUEUE_PRIORITIES, HiveCliHook, diff --git a/airflow/hooks/http_hook.py b/airflow/hooks/http_hook.py index c3520745f13e9..5b8c1fdf9b776 100644 --- a/airflow/hooks/http_hook.py +++ b/airflow/hooks/http_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.http.hooks.http import HttpHook # noqa warnings.warn( diff --git a/airflow/hooks/jdbc_hook.py b/airflow/hooks/jdbc_hook.py index cf1dd6f358869..a032ab0e2598b 100644 --- a/airflow/hooks/jdbc_hook.py +++ b/airflow/hooks/jdbc_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.jdbc.hooks.jdbc import JdbcHook, jaydebeapi # noqa warnings.warn( diff --git a/airflow/hooks/mssql_hook.py b/airflow/hooks/mssql_hook.py index c0077cb32c644..64943eeea7905 100644 --- a/airflow/hooks/mssql_hook.py +++ b/airflow/hooks/mssql_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook # noqa warnings.warn( diff --git a/airflow/hooks/mysql_hook.py b/airflow/hooks/mysql_hook.py index 2fca04e64d4b0..437313680b09c 100644 --- a/airflow/hooks/mysql_hook.py +++ b/airflow/hooks/mysql_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.mysql.hooks.mysql import MySqlHook # noqa warnings.warn( diff --git a/airflow/hooks/oracle_hook.py b/airflow/hooks/oracle_hook.py index bc2bab36d9c1b..0dfe33a78ae2a 100644 --- a/airflow/hooks/oracle_hook.py +++ b/airflow/hooks/oracle_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.oracle.hooks.oracle import OracleHook # noqa warnings.warn( diff --git a/airflow/hooks/pig_hook.py b/airflow/hooks/pig_hook.py index 4c502587f4567..3ead3df6c826e 100644 --- a/airflow/hooks/pig_hook.py +++ b/airflow/hooks/pig_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.pig.hooks.pig import PigCliHook # noqa warnings.warn( diff --git a/airflow/hooks/postgres_hook.py b/airflow/hooks/postgres_hook.py index 2c6b9d17894bc..16f79dc329593 100644 --- a/airflow/hooks/postgres_hook.py +++ b/airflow/hooks/postgres_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.postgres.hooks.postgres import PostgresHook # noqa warnings.warn( diff --git a/airflow/hooks/presto_hook.py b/airflow/hooks/presto_hook.py index c277ec41ba6d3..0c33e1423d35d 100644 --- a/airflow/hooks/presto_hook.py +++ b/airflow/hooks/presto_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.presto.hooks.presto import PrestoHook # noqa warnings.warn( diff --git a/airflow/hooks/samba_hook.py b/airflow/hooks/samba_hook.py index 64118801a3659..b4c7cf83b05a6 100644 --- a/airflow/hooks/samba_hook.py +++ b/airflow/hooks/samba_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.samba.hooks.samba import SambaHook # noqa warnings.warn( diff --git a/airflow/hooks/slack_hook.py b/airflow/hooks/slack_hook.py index 798aafc73ceee..43636b2c6eeef 100644 --- a/airflow/hooks/slack_hook.py +++ b/airflow/hooks/slack_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.slack.hooks.slack import SlackHook # noqa warnings.warn( diff --git a/airflow/hooks/sqlite_hook.py b/airflow/hooks/sqlite_hook.py index faac450c87be5..773900400ccbc 100644 --- a/airflow/hooks/sqlite_hook.py +++ b/airflow/hooks/sqlite_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.sqlite.hooks.sqlite import SqliteHook # noqa warnings.warn( diff --git a/airflow/hooks/subprocess.py b/airflow/hooks/subprocess.py index 409dbb67551fa..1c6aec45e9032 100644 --- a/airflow/hooks/subprocess.py +++ b/airflow/hooks/subprocess.py @@ -62,8 +62,7 @@ def pre_exec(): self.log.info('Running command: %s', command) - # pylint: disable=consider-using-with - self.sub_process = Popen( # pylint: disable=subprocess-popen-preexec-fn + self.sub_process = Popen( command, stdout=PIPE, stderr=STDOUT, diff --git a/airflow/hooks/webhdfs_hook.py b/airflow/hooks/webhdfs_hook.py index 77aae5de6a330..1c4353835cf00 100644 --- a/airflow/hooks/webhdfs_hook.py +++ b/airflow/hooks/webhdfs_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hdfs.hooks.webhdfs import WebHDFSHook # noqa warnings.warn( diff --git a/airflow/hooks/zendesk_hook.py b/airflow/hooks/zendesk_hook.py index 462c6e919953b..ab8366e58f12a 100644 --- a/airflow/hooks/zendesk_hook.py +++ b/airflow/hooks/zendesk_hook.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.zendesk.hooks.zendesk import Zendesk, ZendeskError, ZendeskHook # noqa warnings.warn( diff --git a/airflow/jobs/__init__.py b/airflow/jobs/__init__.py index 6bcff68074678..bbdabed824d44 100644 --- a/airflow/jobs/__init__.py +++ b/airflow/jobs/__init__.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. # -import airflow.jobs.backfill_job # noqa -import airflow.jobs.base_job # noqa -import airflow.jobs.local_task_job # noqa +import airflow.jobs.backfill_job +import airflow.jobs.base_job +import airflow.jobs.local_task_job import airflow.jobs.scheduler_job # noqa diff --git a/airflow/jobs/backfill_job.py b/airflow/jobs/backfill_job.py index b306bc4818977..adab943f598ee 100644 --- a/airflow/jobs/backfill_job.py +++ b/airflow/jobs/backfill_job.py @@ -95,7 +95,7 @@ class _DagRunTaskStatus: """ # TODO(edgarRd): AIRFLOW-1444: Add consistency check on counts - def __init__( # pylint: disable=too-many-arguments + def __init__( self, to_run=None, running=None, @@ -121,7 +121,7 @@ def __init__( # pylint: disable=too-many-arguments self.finished_runs = finished_runs self.total_runs = total_runs - def __init__( # pylint: disable=too-many-arguments + def __init__( self, dag, start_date=None, @@ -391,7 +391,7 @@ def _log_progress(self, ti_status): self.log.debug("Finished dag run loop iteration. Remaining tasks %s", ti_status.to_run.values()) @provide_session - def _process_backfill_task_instances( # pylint: disable=too-many-statements + def _process_backfill_task_instances( self, ti_status, executor, @@ -428,7 +428,7 @@ def _process_backfill_task_instances( # pylint: disable=too-many-statements # determined deadlocked while they are actually # waiting for their upstream to finish @provide_session - def _per_task_process(key, ti, session=None): # pylint: disable=too-many-return-statements + def _per_task_process(key, ti, session=None): ti.refresh_from_db(lock_for_update=True, session=session) task = self.dag.get_task(ti.task_id, include_subdags=True) @@ -554,7 +554,7 @@ def _per_task_process(key, ti, session=None): # pylint: disable=too-many-return self.log.debug('Adding %s to not_ready', ti) ti_status.not_ready.add(key) - try: # pylint: disable=too-many-nested-blocks + try: for task in self.dag.topological_sort(include_subdag_tasks=True): for key, ti in list(ti_status.to_run.items()): if task.task_id != ti.task_id: @@ -790,7 +790,7 @@ def _execute(self, session=None): ti_status.total_runs = len(run_dates) # total dag runs in backfill - try: # pylint: disable=too-many-nested-blocks + try: remaining_dates = ti_status.total_runs while remaining_dates > 0: dates_to_process = [ @@ -860,7 +860,6 @@ def reset_state_for_orphaned_tasks(self, filter_by_dag_run=None, session=None): ), ) .filter( - # pylint: disable=comparison-with-callable DagRun.state == State.RUNNING, DagRun.run_type != DagRunType.BACKFILL_JOB, TaskInstance.state.in_(resettable_states), diff --git a/airflow/jobs/base_job.py b/airflow/jobs/base_job.py index 4edb692251c6b..18893f2110a4f 100644 --- a/airflow/jobs/base_job.py +++ b/airflow/jobs/base_job.py @@ -152,7 +152,7 @@ def kill(self, session=None): job.end_date = timezone.utcnow() try: self.on_kill() - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.error('on_kill() method failed: %s', str(e)) session.merge(job) session.commit() diff --git a/airflow/jobs/local_task_job.py b/airflow/jobs/local_task_job.py index c697a852b18df..06a2f577605ee 100644 --- a/airflow/jobs/local_task_job.py +++ b/airflow/jobs/local_task_job.py @@ -72,7 +72,6 @@ def __init__( def _execute(self): self.task_runner = get_task_runner(self) - # pylint: disable=unused-argument def signal_handler(signum, frame): """Setting kill signal handler""" self.log.error("Received SIGTERM. Terminating subprocesses") @@ -80,12 +79,9 @@ def signal_handler(signum, frame): self.task_instance.refresh_from_db() if self.task_instance.state not in State.finished: self.task_instance.set_state(State.FAILED) - self.task_instance._run_finished_callback( # pylint: disable=protected-access - error="task received sigterm" - ) + self.task_instance._run_finished_callback(error="task received sigterm") raise AirflowException("LocalTaskJob received SIGTERM signal") - # pylint: enable=unused-argument signal.signal(signal.SIGTERM, signal_handler) if not self.task_instance.check_and_change_state_before_execution( @@ -161,7 +157,7 @@ def handle_task_exit(self, return_code: int) -> None: self.task_instance.set_state(State.FAILED) if self.task_instance.state != State.SUCCESS: error = self.task_runner.deserialize_run_error() - self.task_instance._run_finished_callback(error=error) # pylint: disable=protected-access + self.task_instance._run_finished_callback(error=error) if not self.task_instance.test_mode: self._update_dagrun_state_for_paused_dag() @@ -209,7 +205,7 @@ def heartbeat_callback(self, session=None): # error file will not be populated and it must be updated by # external source suck as web UI error = self.task_runner.deserialize_run_error() or "task marked as failed externally" - ti._run_finished_callback(error=error) # pylint: disable=protected-access + ti._run_finished_callback(error=error) self.terminating = True @provide_session diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py index 1340fb8ac3d11..b99f4b23af2dc 100644 --- a/airflow/jobs/scheduler_job.py +++ b/airflow/jobs/scheduler_job.py @@ -1,4 +1,3 @@ -# pylint: disable=no-name-in-module # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -678,7 +677,7 @@ def _is_parent_process(): return multiprocessing.current_process().name == 'MainProcess' -class SchedulerJob(BaseJob): # pylint: disable=too-many-instance-attributes +class SchedulerJob(BaseJob): """ This SchedulerJob runs for a specific time interval and schedules the jobs that are ready to run. It figures out the latest runs for each @@ -751,7 +750,7 @@ def register_signals(self) -> None: signal.signal(signal.SIGTERM, self._exit_gracefully) signal.signal(signal.SIGUSR2, self._debug_dump) - def _exit_gracefully(self, signum, frame) -> None: # pylint: disable=unused-argument + def _exit_gracefully(self, signum, frame) -> None: """Helper method to clean up processor_agent to avoid leaving orphan processes.""" if not _is_parent_process(): # Only the parent process should perform the cleanup. @@ -762,14 +761,14 @@ def _exit_gracefully(self, signum, frame) -> None: # pylint: disable=unused-arg self.processor_agent.end() sys.exit(os.EX_OK) - def _debug_dump(self, signum, frame): # pylint: disable=unused-argument + def _debug_dump(self, signum, frame): if not _is_parent_process(): # Only the parent process should perform the debug dump. return try: - sig_name = signal.Signals(signum).name # pylint: disable=no-member - except Exception: # pylint: disable=broad-except + sig_name = signal.Signals(signum).name + except Exception: sig_name = str(signum) self.log.info("%s\n%s received, printing debug\n%s", "-" * 80, sig_name, "-" * 80) @@ -822,9 +821,7 @@ def _change_state_for_tis_without_dagrun( .filter(models.TaskInstance.state.in_(old_states)) .filter( or_( - # pylint: disable=comparison-with-callable models.DagRun.state != State.RUNNING, - # pylint: disable=no-member models.DagRun.state.is_(None), ) ) @@ -900,7 +897,6 @@ def __get_concurrency_maps( task_map[(dag_id, task_id)] = count return dag_map, task_map - # pylint: disable=too-many-locals,too-many-statements @provide_session def _executable_task_instances_to_queued(self, max_tis: int, session: Session = None) -> List[TI]: """ @@ -980,7 +976,7 @@ def _executable_task_instances_to_queued(self, max_tis: int, session: Session = # Go through each pool, and queue up a task for execution if there are # any open slots in the pool. - # pylint: disable=too-many-nested-blocks + for pool, task_instances in pool_to_task_instances.items(): pool_name = pool if pool not in pools: @@ -1315,17 +1311,17 @@ def _execute(self) -> None: models.DAG.deactivate_stale_dags(execute_start_time) settings.Session.remove() # type: ignore - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception("Exception when executing SchedulerJob._run_scheduler_loop") raise finally: try: self.executor.end() - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception("Exception when executing Executor.end") try: self.processor_agent.end() - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception("Exception when executing DagFileProcessorAgent.end") self.log.info("Exited execute loop") @@ -1658,7 +1654,7 @@ def _update_dag_next_dagruns(self, dag_models: Iterable[DagModel], session: Sess session.query(DagRun.dag_id, func.count('*')) .filter( DagRun.dag_id.in_([o.dag_id for o in dag_models]), - DagRun.state == State.RUNNING, # pylint: disable=comparison-with-callable + DagRun.state == State.RUNNING, DagRun.external_trigger.is_(False), ) .group_by(DagRun.dag_id) @@ -1874,7 +1870,6 @@ def adopt_or_reset_orphaned_tasks(self, session: Session = None): .join(TI.dag_run) .filter( DagRun.run_type != DagRunType.BACKFILL_JOB, - # pylint: disable=comparison-with-callable DagRun.state == State.RUNNING, ) .options(load_only(TI.dag_id, TI.task_id, TI.execution_date)) diff --git a/airflow/kubernetes/kube_client.py b/airflow/kubernetes/kube_client.py index 0b772bed83e93..1c20bd3b93a74 100644 --- a/airflow/kubernetes/kube_client.py +++ b/airflow/kubernetes/kube_client.py @@ -26,12 +26,9 @@ from kubernetes import client, config from kubernetes.client import Configuration from kubernetes.client.api_client import ApiClient - from kubernetes.client.rest import ApiException # pylint: disable=unused-import + from kubernetes.client.rest import ApiException - from airflow.kubernetes.refresh_config import ( # pylint: disable=ungrouped-imports - RefreshConfiguration, - load_kube_config, - ) + from airflow.kubernetes.refresh_config import RefreshConfiguration, load_kube_config has_kubernetes = True diff --git a/airflow/kubernetes/kube_config.py b/airflow/kubernetes/kube_config.py index 11ec279d488d5..ef3283155ff73 100644 --- a/airflow/kubernetes/kube_config.py +++ b/airflow/kubernetes/kube_config.py @@ -20,14 +20,14 @@ from airflow.settings import AIRFLOW_HOME -class KubeConfig: # pylint: disable=too-many-instance-attributes +class KubeConfig: """Configuration for Kubernetes""" core_section = 'core' kubernetes_section = 'kubernetes' logging_section = 'logging' - def __init__(self): # pylint: disable=too-many-statements + def __init__(self): configuration_dict = conf.as_dict(display_sensitive=True) self.core_configuration = configuration_dict[self.core_section] self.airflow_home = AIRFLOW_HOME diff --git a/airflow/kubernetes/pod.py b/airflow/kubernetes/pod.py index 02ce912441851..6bced0f55da0d 100644 --- a/airflow/kubernetes/pod.py +++ b/airflow/kubernetes/pod.py @@ -20,14 +20,11 @@ Please use :mod:`kubernetes.client.models for V1ResourceRequirements and Port. """ # flake8: noqa -# pylint: disable=unused-import + import warnings with warnings.catch_warnings(): - from airflow.providers.cncf.kubernetes.backcompat.pod import ( # pylint: disable=unused-import - Port, - Resources, - ) + from airflow.providers.cncf.kubernetes.backcompat.pod import Port, Resources warnings.warn( "This module is deprecated. Please use `kubernetes.client.models for V1ResourceRequirements and Port.", diff --git a/airflow/kubernetes/pod_generator.py b/airflow/kubernetes/pod_generator.py index 4b4028f18ce0b..fc536da283b44 100644 --- a/airflow/kubernetes/pod_generator.py +++ b/airflow/kubernetes/pod_generator.py @@ -101,7 +101,7 @@ class PodGenerator: :type extract_xcom: bool """ - def __init__( # pylint: disable=too-many-arguments,too-many-locals + def __init__( self, pod: Optional[k8s.V1Pod] = None, pod_template_file: Optional[str] = None, @@ -325,7 +325,7 @@ def reconcile_containers( ) @staticmethod - def construct_pod( # pylint: disable=too-many-arguments + def construct_pod( dag_id: str, task_id: str, pod_id: str, @@ -348,7 +348,7 @@ def construct_pod( # pylint: disable=too-many-arguments image = pod_override_object.spec.containers[0].image # type: ignore if not image: image = kube_image - except Exception: # pylint: disable=W0703 + except Exception: image = kube_image dynamic_pod = k8s.V1Pod( @@ -417,7 +417,6 @@ def deserialize_model_file(path: str) -> k8s.V1Pod: else: pod = yaml.safe_load(path) - # pylint: disable=protected-access return PodGenerator.deserialize_model_dict(pod) @staticmethod @@ -429,7 +428,7 @@ def deserialize_model_dict(pod_dict: dict) -> k8s.V1Pod: :return: De-serialized k8s.V1Pod """ api_client = ApiClient() - return api_client._ApiClient__deserialize_model(pod_dict, k8s.V1Pod) # pylint: disable=W0212 + return api_client._ApiClient__deserialize_model(pod_dict, k8s.V1Pod) @staticmethod def make_unique_pod_id(pod_id: str) -> str: diff --git a/airflow/kubernetes/pod_generator_deprecated.py b/airflow/kubernetes/pod_generator_deprecated.py index 79bdcb4406e98..c4d83d60a4e5a 100644 --- a/airflow/kubernetes/pod_generator_deprecated.py +++ b/airflow/kubernetes/pod_generator_deprecated.py @@ -143,7 +143,7 @@ class PodGenerator: :type priority_class_name: str """ - def __init__( # pylint: disable=too-many-arguments,too-many-locals + def __init__( self, image: Optional[str] = None, name: Optional[str] = None, diff --git a/airflow/kubernetes/pod_launcher.py b/airflow/kubernetes/pod_launcher.py index 72ebba4bc81eb..48a90605d442a 100644 --- a/airflow/kubernetes/pod_launcher.py +++ b/airflow/kubernetes/pod_launcher.py @@ -20,5 +20,5 @@ Please use :mod:`kubernetes.client.models` for V1ResourceRequirements and Port. """ # flake8: noqa -# pylint: disable=unused-import -from airflow.kubernetes.pod_launcher_deprecated import PodLauncher, PodStatus # pylint: disable=unused-import + +from airflow.kubernetes.pod_launcher_deprecated import PodLauncher, PodStatus diff --git a/airflow/kubernetes/pod_runtime_info_env.py b/airflow/kubernetes/pod_runtime_info_env.py index 9a7ad1951256f..4d7bd9f8773eb 100644 --- a/airflow/kubernetes/pod_runtime_info_env.py +++ b/airflow/kubernetes/pod_runtime_info_env.py @@ -17,7 +17,7 @@ # under the License. """This module is deprecated. Please use :mod:`kubernetes.client.models.V1EnvVar`.""" # flake8: noqa -# pylint: disable=unused-import + import warnings with warnings.catch_warnings(): diff --git a/airflow/kubernetes/refresh_config.py b/airflow/kubernetes/refresh_config.py index a039e7d2ff2fd..25649510ce0d2 100644 --- a/airflow/kubernetes/refresh_config.py +++ b/airflow/kubernetes/refresh_config.py @@ -62,12 +62,12 @@ def _load_from_exec_plugin(self): if 'token' not in status: logging.error('exec: missing token field in plugin output') return None - self.token = f"Bearer {status['token']}" # pylint: disable=W0201 + self.token = f"Bearer {status['token']}" ts_str = status.get('expirationTimestamp') if ts_str: self.api_key_expire_ts = _parse_timestamp(ts_str) return True - except Exception as e: # pylint: disable=W0703 + except Exception as e: logging.error(str(e)) return None @@ -93,7 +93,7 @@ def __init__(self, *args, **kwargs): def get_api_key_with_prefix(self, identifier): if self.refresh_api_key: - self.refresh_api_key(self) # pylint: disable=E1102 + self.refresh_api_key(self) return Configuration.get_api_key_with_prefix(self, identifier) diff --git a/airflow/kubernetes/volume.py b/airflow/kubernetes/volume.py index fa452df6f9c54..7fd58e22998cd 100644 --- a/airflow/kubernetes/volume.py +++ b/airflow/kubernetes/volume.py @@ -17,7 +17,7 @@ # under the License. """This module is deprecated. Please use :mod:`kubernetes.client.models.V1Volume`.""" # flake8: noqa -# pylint: disable=unused-import + import warnings diff --git a/airflow/kubernetes/volume_mount.py b/airflow/kubernetes/volume_mount.py index eee7b5ea6e8c7..08bc5d36782d2 100644 --- a/airflow/kubernetes/volume_mount.py +++ b/airflow/kubernetes/volume_mount.py @@ -17,7 +17,7 @@ # under the License. """This module is deprecated. Please use :mod:`kubernetes.client.models.V1VolumeMount`.""" # flake8: noqa -# pylint: disable=unused-import + import warnings diff --git a/airflow/lineage/__init__.py b/airflow/lineage/__init__.py index 905eb0059a2f9..ff2a028aaadde 100644 --- a/airflow/lineage/__init__.py +++ b/airflow/lineage/__init__.py @@ -92,7 +92,7 @@ def _to_dataset(obj: Any, source: str) -> Optional[Metadata]: return Metadata(type_name, source, data) -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def apply_lineage(func: T) -> T: @@ -138,7 +138,7 @@ def prepare_lineage(func: T) -> T: * "list of datasets" -> manually defined list of data """ - # pylint: disable=protected-access + @wraps(func) def wrapper(self, context, *args, **kwargs): from airflow.models.base import Operator diff --git a/airflow/lineage/backend.py b/airflow/lineage/backend.py index edfbe0e2b8924..cde8d94d1f4c8 100644 --- a/airflow/lineage/backend.py +++ b/airflow/lineage/backend.py @@ -19,7 +19,7 @@ from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: - from airflow.models.baseoperator import BaseOperator # pylint: disable=cyclic-import + from airflow.models.baseoperator import BaseOperator class LineageBackend: diff --git a/airflow/lineage/entities.py b/airflow/lineage/entities.py index f2bad75796197..87703edfbaf10 100644 --- a/airflow/lineage/entities.py +++ b/airflow/lineage/entities.py @@ -62,8 +62,9 @@ class Column: # this is a temporary hack to satisfy mypy. Once # https://github.com/python/mypy/issues/6136 is resolved, use # `attr.converters.default_if_none(default=False)` -# pylint: disable=missing-docstring -def default_if_none(arg: Optional[bool]) -> bool: # noqa: D103 + + +def default_if_none(arg: Optional[bool]) -> bool: return arg or False diff --git a/airflow/logging_config.py b/airflow/logging_config.py index bfb1a112893e3..05d6090a18e0d 100644 --- a/airflow/logging_config.py +++ b/airflow/logging_config.py @@ -77,7 +77,7 @@ def configure_logging(): return logging_class_path -def validate_logging_config(logging_config): # pylint: disable=unused-argument +def validate_logging_config(logging_config): """Validate the provided Logging Config""" # Now lets validate the other logging-related settings task_log_reader = conf.get('logging', 'task_log_reader') diff --git a/airflow/migrations/env.py b/airflow/migrations/env.py index 9b12f6c4cbebe..e0c558c7ef8b9 100644 --- a/airflow/migrations/env.py +++ b/airflow/migrations/env.py @@ -21,7 +21,6 @@ from alembic import context from airflow import models, settings -from airflow.models.serialized_dag import SerializedDagModel # pylint: disable=unused-import # noqa def include_object(_, name, type_, *args): @@ -102,7 +101,7 @@ def run_migrations_online(): if connection.dialect.name == 'mysql' and connection.dialect.server_version_info >= (5, 6): connection.execute("select GET_LOCK('alembic',1800);") if connection.dialect.name == 'postgresql': - context.get_context()._ensure_version_table() # pylint: disable=protected-access + context.get_context()._ensure_version_table() connection.execute("LOCK TABLE alembic_version IN ACCESS EXCLUSIVE MODE") context.run_migrations() if connection.dialect.name == 'mysql' and connection.dialect.server_version_info >= (5, 6): diff --git a/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py b/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py index d66bf5c96e3d5..fc8468155c235 100644 --- a/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py +++ b/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_index('sm_dag', 'sla_miss', ['dag_id'], unique=False) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_index('sm_dag', table_name='sla_miss') diff --git a/airflow/migrations/versions/05f30312d566_merge_heads.py b/airflow/migrations/versions/05f30312d566_merge_heads.py index ffe2330196270..36940c67d8605 100644 --- a/airflow/migrations/versions/05f30312d566_merge_heads.py +++ b/airflow/migrations/versions/05f30312d566_merge_heads.py @@ -30,9 +30,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): pass -def downgrade(): # noqa: D103 +def downgrade(): pass diff --git a/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py b/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py index 4c572f44ec5a2..33e89b3cb86af 100644 --- a/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py +++ b/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py @@ -41,19 +41,19 @@ # For Microsoft SQL Server, TIMESTAMP is a row-id type, # having nothing to do with date-time. DateTime() will # be sufficient. -def mssql_timestamp(): # noqa: D103 +def mssql_timestamp(): return sa.DateTime() -def mysql_timestamp(): # noqa: D103 +def mysql_timestamp(): return mysql.TIMESTAMP(fsp=6) -def sa_timestamp(): # noqa: D103 +def sa_timestamp(): return sa.TIMESTAMP(timezone=True) -def upgrade(): # noqa: D103 +def upgrade(): # See 0e2a74e0fc9f_add_time_zone_awareness conn = op.get_bind() if conn.dialect.name == 'mysql': @@ -85,6 +85,6 @@ def upgrade(): # noqa: D103 op.create_index(INDEX_NAME, TABLE_NAME, ['dag_id', 'task_id', 'execution_date'], unique=False) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_index(INDEX_NAME, table_name=TABLE_NAME) op.drop_table(TABLE_NAME) diff --git a/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py b/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py index f18809c994b10..e4d35c047ca7e 100644 --- a/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py +++ b/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py @@ -34,7 +34,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): conn = op.get_bind() if conn.dialect.name == "mysql": conn.execute("SET time_zone = '+00:00'") @@ -277,7 +277,7 @@ def upgrade(): # noqa: D103 ) -def downgrade(): # noqa: D103 +def downgrade(): conn = op.get_bind() if conn.dialect.name == "mysql": conn.execute("SET time_zone = '+00:00'") diff --git a/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py b/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py index 855e55c5ee15d..288a0b60aa821 100644 --- a/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py +++ b/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_index('dag_id_state', 'dag_run', ['dag_id', 'state'], unique=False) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_index('dag_id_state', table_name='dag_run') diff --git a/airflow/migrations/versions/13eb55f81627_for_compatibility.py b/airflow/migrations/versions/13eb55f81627_for_compatibility.py index 538db1a49ffba..a15b20c6c6ad8 100644 --- a/airflow/migrations/versions/13eb55f81627_for_compatibility.py +++ b/airflow/migrations/versions/13eb55f81627_for_compatibility.py @@ -31,9 +31,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): pass -def downgrade(): # noqa: D103 +def downgrade(): pass diff --git a/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py b/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py index 7afdeb2c1831b..1a539c603e5a1 100644 --- a/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py +++ b/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py @@ -38,7 +38,7 @@ ) -def upgrade(): # noqa: D103 +def upgrade(): # first check if the user already has this done. This should only be # true for users who are upgrading from a previous version of Airflow # that predates Alembic integration @@ -58,5 +58,5 @@ def upgrade(): # noqa: D103 conn.execute(connectionhelper.update().values(is_encrypted=False)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('connection', 'is_encrypted') diff --git a/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py b/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py index e880d77fee220..3e7c694eb018a 100644 --- a/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py +++ b/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('variable', sa.Column('is_encrypted', sa.Boolean, default=False)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('variable', 'is_encrypted') diff --git a/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py b/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py index 7edebfc4cf9d2..6ef4d91df0ce3 100644 --- a/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py +++ b/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py @@ -34,7 +34,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_table( 'dag_run', sa.Column('id', sa.Integer(), nullable=False), @@ -49,5 +49,5 @@ def upgrade(): # noqa: D103 ) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_table('dag_run') diff --git a/airflow/migrations/versions/211e584da130_add_ti_state_index.py b/airflow/migrations/versions/211e584da130_add_ti_state_index.py index 7df1550733d7e..86c2e7de3ef75 100644 --- a/airflow/migrations/versions/211e584da130_add_ti_state_index.py +++ b/airflow/migrations/versions/211e584da130_add_ti_state_index.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_index('ti_state', 'task_instance', ['state'], unique=False) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_index('ti_state', table_name='task_instance') diff --git a/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py b/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py index d0853efb79a2f..c02a1f5242eac 100644 --- a/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py +++ b/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py @@ -38,9 +38,9 @@ NEW_COLUMN = "executor_config" -def upgrade(): # noqa: D103 +def upgrade(): op.add_column(TASK_INSTANCE_TABLE, sa.Column(NEW_COLUMN, sa.PickleType(pickler=dill))) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column(TASK_INSTANCE_TABLE, NEW_COLUMN) diff --git a/airflow/migrations/versions/2e541a1dcfed_task_duration.py b/airflow/migrations/versions/2e541a1dcfed_task_duration.py index 12d8e2e5a608d..e7535bdf254ed 100644 --- a/airflow/migrations/versions/2e541a1dcfed_task_duration.py +++ b/airflow/migrations/versions/2e541a1dcfed_task_duration.py @@ -35,7 +35,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): # use batch_alter_table to support SQLite workaround with op.batch_alter_table("task_instance") as batch_op: batch_op.alter_column( @@ -46,5 +46,5 @@ def upgrade(): # noqa: D103 ) -def downgrade(): # noqa: D103 +def downgrade(): pass diff --git a/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py b/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py index 3dcbe47460efa..cf461c0323031 100644 --- a/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py +++ b/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.rename_table('user', 'users') -def downgrade(): # noqa: D103 +def downgrade(): op.rename_table('users', 'user') diff --git a/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py b/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py index 60ed6628a77dd..b8ab2908c404b 100644 --- a/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py +++ b/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py @@ -33,11 +33,11 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('task_instance', sa.Column('operator', sa.String(length=1000), nullable=True)) op.add_column('task_instance', sa.Column('queued_dttm', sa.DateTime(), nullable=True)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('task_instance', 'queued_dttm') op.drop_column('task_instance', 'operator') diff --git a/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py b/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py index 56542ebbbae24..d8d600be0ccee 100644 --- a/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py +++ b/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py @@ -36,7 +36,7 @@ RESOURCE_TABLE = "kube_resource_version" -def upgrade(): # noqa: D103 +def upgrade(): conn = op.get_bind() inspector = Inspector.from_engine(conn) @@ -60,7 +60,7 @@ def upgrade(): # noqa: D103 op.bulk_insert(table, [{"resource_version": ""}]) -def downgrade(): # noqa: D103 +def downgrade(): conn = op.get_bind() inspector = Inspector.from_engine(conn) diff --git a/airflow/migrations/versions/40e67319e3a9_dagrun_config.py b/airflow/migrations/versions/40e67319e3a9_dagrun_config.py index 96c211eebfba4..7ce183827fbe8 100644 --- a/airflow/migrations/versions/40e67319e3a9_dagrun_config.py +++ b/airflow/migrations/versions/40e67319e3a9_dagrun_config.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('dag_run', sa.Column('conf', sa.PickleType(), nullable=True)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('dag_run', 'conf') diff --git a/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py b/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py index 572845b4d04d5..8b3e9b4a41de5 100644 --- a/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py +++ b/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('users', sa.Column('superuser', sa.Boolean(), default=False)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('users', 'superuser') diff --git a/airflow/migrations/versions/4446e08588_dagrun_start_end.py b/airflow/migrations/versions/4446e08588_dagrun_start_end.py index 2ee527361d415..48c85d5c94b70 100644 --- a/airflow/migrations/versions/4446e08588_dagrun_start_end.py +++ b/airflow/migrations/versions/4446e08588_dagrun_start_end.py @@ -34,11 +34,11 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('dag_run', sa.Column('end_date', sa.DateTime(), nullable=True)) op.add_column('dag_run', sa.Column('start_date', sa.DateTime(), nullable=True)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('dag_run', 'start_date') op.drop_column('dag_run', 'end_date') diff --git a/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py b/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py index 1f32448d7848e..00ec5df75b86a 100644 --- a/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py +++ b/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py @@ -34,8 +34,8 @@ depends_on = None -def upgrade(): # noqa: D103 - conn = op.get_bind() # pylint: disable=no-member +def upgrade(): + conn = op.get_bind() if conn.dialect.name == "mysql": op.alter_column(table_name='dag', column_name='last_scheduler_run', type_=mysql.DATETIME(fsp=6)) op.alter_column(table_name='dag', column_name='last_pickled', type_=mysql.DATETIME(fsp=6)) @@ -79,8 +79,8 @@ def upgrade(): # noqa: D103 op.alter_column(table_name='xcom', column_name='execution_date', type_=mysql.DATETIME(fsp=6)) -def downgrade(): # noqa: D103 - conn = op.get_bind() # pylint: disable=no-member +def downgrade(): + conn = op.get_bind() if conn.dialect.name == "mysql": op.alter_column(table_name='dag', column_name='last_scheduler_run', type_=mysql.DATETIME()) op.alter_column(table_name='dag', column_name='last_pickled', type_=mysql.DATETIME()) diff --git a/airflow/migrations/versions/502898887f84_adding_extra_to_log.py b/airflow/migrations/versions/502898887f84_adding_extra_to_log.py index 0f00e110c1d5a..1bf5f0d3924ae 100644 --- a/airflow/migrations/versions/502898887f84_adding_extra_to_log.py +++ b/airflow/migrations/versions/502898887f84_adding_extra_to_log.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('log', sa.Column('extra', sa.Text(), nullable=True)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('log', 'extra') diff --git a/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py b/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py index 84daac5013518..122821b8d896b 100644 --- a/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py +++ b/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py @@ -44,10 +44,10 @@ def upgrade(): conn = op.get_bind() if conn.dialect.name == "mssql": json_type = sa.Text - op.drop_table(TABLE_NAME) # pylint: disable=no-member + op.drop_table(TABLE_NAME) op.create_table( - TABLE_NAME, # pylint: disable=no-member + TABLE_NAME, sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('task_id', sa.String(length=250), nullable=False), sa.Column('execution_date', mssql.DATETIME2, nullable=False), @@ -64,10 +64,10 @@ def downgrade(): conn = op.get_bind() if conn.dialect.name == "mssql": json_type = sa.Text - op.drop_table(TABLE_NAME) # pylint: disable=no-member + op.drop_table(TABLE_NAME) op.create_table( - TABLE_NAME, # pylint: disable=no-member + TABLE_NAME, sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('task_id', sa.String(length=250), nullable=False), sa.Column('execution_date', sa.TIMESTAMP, nullable=False), diff --git a/airflow/migrations/versions/52d714495f0_job_id_indices.py b/airflow/migrations/versions/52d714495f0_job_id_indices.py index fc3ecad87f200..abf4e2d00c2ef 100644 --- a/airflow/migrations/versions/52d714495f0_job_id_indices.py +++ b/airflow/migrations/versions/52d714495f0_job_id_indices.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_index('idx_job_state_heartbeat', 'job', ['state', 'latest_heartbeat'], unique=False) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_index('idx_job_state_heartbeat', table_name='job') diff --git a/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py b/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py index 144259ef06f9c..5a83b113b8ea8 100644 --- a/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py +++ b/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('user', sa.Column('password', sa.String(255))) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('user', 'password') diff --git a/airflow/migrations/versions/61ec73d9401f_add_description_field_to_connection.py b/airflow/migrations/versions/61ec73d9401f_add_description_field_to_connection.py index 1b5963b39e7d6..ff480c7d5e0e1 100644 --- a/airflow/migrations/versions/61ec73d9401f_add_description_field_to_connection.py +++ b/airflow/migrations/versions/61ec73d9401f_add_description_field_to_connection.py @@ -36,7 +36,7 @@ def upgrade(): """Apply Add description field to connection""" - conn = op.get_bind() # pylint: disable=no-member + conn = op.get_bind() with op.batch_alter_table('connection') as batch_op: if conn.dialect.name == "mysql": diff --git a/airflow/migrations/versions/64a7d6477aae_fix_description_field_in_connection_to_.py b/airflow/migrations/versions/64a7d6477aae_fix_description_field_in_connection_to_.py index 09261d51ceaec..fa7b2b21e2c22 100644 --- a/airflow/migrations/versions/64a7d6477aae_fix_description_field_in_connection_to_.py +++ b/airflow/migrations/versions/64a7d6477aae_fix_description_field_in_connection_to_.py @@ -24,8 +24,8 @@ """ -import sqlalchemy as sa # noqa -from alembic import op # noqa +import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision = '64a7d6477aae' @@ -36,7 +36,7 @@ def upgrade(): """Apply fix description field in connection to be text""" - conn = op.get_bind() # pylint: disable=no-member + conn = op.get_bind() if conn.dialect.name == "sqlite": # in sqlite TEXT and STRING column types are the same return @@ -55,7 +55,7 @@ def upgrade(): def downgrade(): """Unapply fix description field in connection to be text""" - conn = op.get_bind() # pylint: disable=no-member + conn = op.get_bind() if conn.dialect.name == "sqlite": # in sqlite TEXT and STRING column types are the same return diff --git a/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py b/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py index 40dd9ddbfae19..9d93736eb2190 100644 --- a/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py +++ b/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py @@ -35,7 +35,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_table( 'task_fail', sa.Column('id', sa.Integer(), nullable=False), @@ -49,5 +49,5 @@ def upgrade(): # noqa: D103 ) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_table('task_fail') diff --git a/airflow/migrations/versions/849da589634d_prefix_dag_permissions.py b/airflow/migrations/versions/849da589634d_prefix_dag_permissions.py index 556c1a50f2b4f..eee627423838d 100644 --- a/airflow/migrations/versions/849da589634d_prefix_dag_permissions.py +++ b/airflow/migrations/versions/849da589634d_prefix_dag_permissions.py @@ -37,7 +37,7 @@ depends_on = None -def prefix_individual_dag_permissions(session): # noqa: D103 +def prefix_individual_dag_permissions(session): dag_perms = ['can_dag_read', 'can_dag_edit'] prefix = "DAG:" permission_view_menus = ( @@ -55,7 +55,7 @@ def prefix_individual_dag_permissions(session): # noqa: D103 session.commit() -def get_or_create_dag_resource(session): # noqa: D103 +def get_or_create_dag_resource(session): dag_resource = get_resource_query(session, permissions.RESOURCE_DAG).first() if dag_resource: return dag_resource @@ -68,7 +68,7 @@ def get_or_create_dag_resource(session): # noqa: D103 return dag_resource -def get_or_create_action(session, action_name): # noqa: D103 +def get_or_create_action(session, action_name): action = get_action_query(session, action_name).first() if action: return action @@ -81,28 +81,28 @@ def get_or_create_action(session, action_name): # noqa: D103 return action -def get_resource_query(session, resource_name): # noqa: D103 +def get_resource_query(session, resource_name): return session.query(ViewMenu).filter(ViewMenu.name == resource_name) -def get_action_query(session, action_name): # noqa: D103 +def get_action_query(session, action_name): return session.query(Permission).filter(Permission.name == action_name) -def get_pv_with_action_query(session, action): # noqa: D103 +def get_pv_with_action_query(session, action): return session.query(PermissionView).filter(PermissionView.permission == action) -def get_pv_with_resource_query(session, resource): # noqa: D103 +def get_pv_with_resource_query(session, resource): return session.query(PermissionView).filter(PermissionView.view_menu_id == resource.id) -def update_pv_action(session, pv_query, action): # noqa: D103 +def update_pv_action(session, pv_query, action): pv_query.update({PermissionView.permission_id: action.id}, synchronize_session=False) session.commit() -def get_pv(session, resource, action): # noqa: D103 +def get_pv(session, resource, action): return ( session.query(PermissionView) .filter(PermissionView.view_menu == resource) @@ -111,9 +111,9 @@ def get_pv(session, resource, action): # noqa: D103 ) -def update_pv_resource(session, pv_query, resource): # noqa: D103 - for pv in pv_query.all(): # noqa: D103 - if not get_pv(session, resource, pv.permission): # noqa: D103 +def update_pv_resource(session, pv_query, resource): + for pv in pv_query.all(): + if not get_pv(session, resource, pv.permission): pv.view_menu = resource else: session.delete(pv) @@ -121,7 +121,7 @@ def update_pv_resource(session, pv_query, resource): # noqa: D103 session.commit() -def migrate_to_new_dag_permissions(db): # noqa: D103 +def migrate_to_new_dag_permissions(db): # Prefix individual dag perms with `DAG:` prefix_individual_dag_permissions(db.session) @@ -158,7 +158,7 @@ def migrate_to_new_dag_permissions(db): # noqa: D103 db.session.commit() -def upgrade(): # noqa: D103 +def upgrade(): db = SQLA() db.session = settings.Session migrate_to_new_dag_permissions(db) @@ -166,5 +166,5 @@ def upgrade(): # noqa: D103 db.session.close() -def downgrade(): # noqa: D103 +def downgrade(): pass diff --git a/airflow/migrations/versions/852ae6c715af_add_rendered_task_instance_fields_table.py b/airflow/migrations/versions/852ae6c715af_add_rendered_task_instance_fields_table.py index 282286d24e745..59cbf1a90b672 100644 --- a/airflow/migrations/versions/852ae6c715af_add_rendered_task_instance_fields_table.py +++ b/airflow/migrations/versions/852ae6c715af_add_rendered_task_instance_fields_table.py @@ -39,7 +39,7 @@ def upgrade(): """Apply Add RenderedTaskInstanceFields table""" json_type = sa.JSON - conn = op.get_bind() # pylint: disable=no-member + conn = op.get_bind() if conn.dialect.name != "postgresql": # Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for @@ -50,7 +50,7 @@ def upgrade(): json_type = sa.Text op.create_table( - TABLE_NAME, # pylint: disable=no-member + TABLE_NAME, sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('task_id', sa.String(length=250), nullable=False), sa.Column('execution_date', sa.TIMESTAMP(timezone=True), nullable=False), @@ -61,4 +61,4 @@ def upgrade(): def downgrade(): """Drop RenderedTaskInstanceFields table""" - op.drop_table(TABLE_NAME) # pylint: disable=no-member + op.drop_table(TABLE_NAME) diff --git a/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py b/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py index fd8936c4be71e..92a97fa8c2ae7 100644 --- a/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py +++ b/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py @@ -67,6 +67,6 @@ def upgrade(): batch_op.create_foreign_key('chart_user_id_fkey', 'users', ['user_id'], ['id']) -def downgrade(): # noqa: D103 +def downgrade(): # Downgrade would fail because the broken FK constraint can't be re-created. pass diff --git a/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py b/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py index bf498735b392d..c60049fc7e55c 100644 --- a/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py +++ b/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py @@ -45,7 +45,9 @@ ID_LEN = 250 -class TaskInstance(Base): # noqa: D101 # type: ignore +class TaskInstance(Base): # type: ignore + """Task instance class.""" + __tablename__ = "task_instance" task_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True) diff --git a/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py b/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py index db3ccdc043bf4..bad9f5ddf52df 100644 --- a/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py +++ b/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py @@ -35,7 +35,7 @@ RESOURCE_TABLE = "kube_worker_uuid" -def upgrade(): # noqa: D103 +def upgrade(): columns_and_constraints = [ sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True), @@ -55,5 +55,5 @@ def upgrade(): # noqa: D103 op.bulk_insert(table, [{"worker_uuid": ""}]) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_table(RESOURCE_TABLE) diff --git a/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py b/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py index ffb61a39eaaa8..7c51df4b5ffb1 100644 --- a/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py +++ b/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py @@ -32,7 +32,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): with op.batch_alter_table('task_reschedule') as batch_op: batch_op.drop_constraint('task_reschedule_dag_task_date_fkey', type_='foreignkey') batch_op.create_foreign_key( @@ -44,7 +44,7 @@ def upgrade(): # noqa: D103 ) -def downgrade(): # noqa: D103 +def downgrade(): with op.batch_alter_table('task_reschedule') as batch_op: batch_op.drop_constraint('task_reschedule_dag_task_date_fkey', type_='foreignkey') batch_op.create_foreign_key( diff --git a/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py b/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py index a1d8b8f6099ba..e7c948e66afdc 100644 --- a/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py +++ b/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_index('ti_job_id', 'task_instance', ['job_id'], unique=False) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_index('ti_job_id', table_name='task_instance') diff --git a/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py b/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py index 63fb68999e6ec..a580fc9b1b95b 100644 --- a/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py +++ b/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py @@ -52,7 +52,7 @@ class SerializedDagModel(Base): """Apply add source code table""" op.create_table( - 'dag_code', # pylint: disable=no-member + 'dag_code', sa.Column('fileloc_hash', sa.BigInteger(), nullable=False, primary_key=True, autoincrement=False), sa.Column('fileloc', sa.String(length=2000), nullable=False), sa.Column('source_code', sa.UnicodeText(), nullable=False), diff --git a/airflow/migrations/versions/9635ae0956e7_index_faskfail.py b/airflow/migrations/versions/9635ae0956e7_index_faskfail.py index c924b3a06af2a..663a069dcc9f3 100644 --- a/airflow/migrations/versions/9635ae0956e7_index_faskfail.py +++ b/airflow/migrations/versions/9635ae0956e7_index_faskfail.py @@ -31,11 +31,11 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_index( 'idx_task_fail_dag_task_date', 'task_fail', ['dag_id', 'task_id', 'execution_date'], unique=False ) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_index('idx_task_fail_dag_task_date', table_name='task_fail') diff --git a/airflow/migrations/versions/98271e7606e2_add_scheduling_decision_to_dagrun_and_.py b/airflow/migrations/versions/98271e7606e2_add_scheduling_decision_to_dagrun_and_.py index 8019aa2d91be5..f4e144df42e21 100644 --- a/airflow/migrations/versions/98271e7606e2_add_scheduling_decision_to_dagrun_and_.py +++ b/airflow/migrations/versions/98271e7606e2_add_scheduling_decision_to_dagrun_and_.py @@ -37,7 +37,7 @@ def upgrade(): """Apply Add scheduling_decision to DagRun and DAG""" - conn = op.get_bind() # pylint: disable=no-member + conn = op.get_bind() is_mysql = bool(conn.dialect.name == "mysql") is_sqlite = bool(conn.dialect.name == "sqlite") timestamp = sa.TIMESTAMP(timezone=True) if not is_mysql else mysql.TIMESTAMP(fsp=6, timezone=True) @@ -85,7 +85,7 @@ def upgrade(): def downgrade(): """Unapply Add scheduling_decision to DagRun and DAG""" - conn = op.get_bind() # pylint: disable=no-member + conn = op.get_bind() is_sqlite = bool(conn.dialect.name == "sqlite") if is_sqlite: diff --git a/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py b/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py index 121c7fa5e4fef..4dd825e8b7de4 100644 --- a/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py +++ b/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py @@ -34,9 +34,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('task_instance', sa.Column('pool_slots', sa.Integer, default=1)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('task_instance', 'pool_slots') diff --git a/airflow/migrations/versions/b0125267960b_merge_heads.py b/airflow/migrations/versions/b0125267960b_merge_heads.py index 5c05dd78d3711..bba9e73cdf8c3 100644 --- a/airflow/migrations/versions/b0125267960b_merge_heads.py +++ b/airflow/migrations/versions/b0125267960b_merge_heads.py @@ -31,9 +31,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): pass -def downgrade(): # noqa: D103 +def downgrade(): pass diff --git a/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py b/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py index 4b2cacd90f775..d559fafb1ec26 100644 --- a/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py +++ b/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py @@ -34,9 +34,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('connection', sa.Column('is_extra_encrypted', sa.Boolean, default=False)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('connection', 'is_extra_encrypted') diff --git a/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py b/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py index 2e73d05890950..4c6393836bfff 100644 --- a/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py +++ b/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('sla_miss', sa.Column('notification_sent', sa.Boolean, default=False)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('sla_miss', 'notification_sent') diff --git a/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py b/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py index cd4fa0d51c04a..d69d24aed6e91 100644 --- a/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py +++ b/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py @@ -34,7 +34,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): # There can be data truncation here as LargeBinary can be smaller than the pickle # type. # use batch_alter_table to support SQLite workaround @@ -42,7 +42,7 @@ def upgrade(): # noqa: D103 batch_op.alter_column('value', type_=sa.LargeBinary()) -def downgrade(): # noqa: D103 +def downgrade(): # use batch_alter_table to support SQLite workaround with op.batch_alter_table("xcom") as batch_op: batch_op.alter_column('value', type_=sa.PickleType(pickler=dill)) diff --git a/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py b/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py index 845ce35a026c1..7fa96ea276b7b 100644 --- a/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py +++ b/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_index('ti_dag_date', 'task_instance', ['dag_id', 'execution_date'], unique=False) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_index('ti_dag_date', table_name='task_instance') diff --git a/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py b/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py index c620286f7f15e..ff02efbf9f1a0 100644 --- a/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py +++ b/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py @@ -34,11 +34,11 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('dag', sa.Column('description', sa.Text(), nullable=True)) op.add_column('dag', sa.Column('default_view', sa.String(25), nullable=True)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('dag', 'description') op.drop_column('dag', 'default_view') diff --git a/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py b/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py index e6169b2728f28..9e3bf7b7176e7 100644 --- a/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py +++ b/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py @@ -45,7 +45,9 @@ ID_LEN = 250 -class TaskInstance(Base): # noqa: D101 # type: ignore +class TaskInstance(Base): # type: ignore + """Task Instance class.""" + __tablename__ = "task_instance" task_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True) @@ -55,7 +57,7 @@ class TaskInstance(Base): # noqa: D101 # type: ignore try_number = Column(Integer, default=0) -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('task_instance', sa.Column('max_tries', sa.Integer, server_default="-1")) # Check if table task_instance exist before data migration. This check is # needed for database that does not create table until migration finishes. @@ -97,7 +99,7 @@ def upgrade(): # noqa: D103 session.commit() -def downgrade(): # noqa: D103 +def downgrade(): engine = settings.engine if engine.dialect.has_table(engine, 'task_instance'): connection = op.get_bind() diff --git a/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py b/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py index 8200850f10631..e7252520be2e8 100644 --- a/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py +++ b/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py @@ -34,7 +34,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): # We previously had a KnownEvent's table, but we deleted the table without # a down migration to remove it (so we didn't delete anyone's data if they # were happening to use the feature. @@ -60,7 +60,7 @@ def upgrade(): # noqa: D103 op.drop_table("users") -def downgrade(): # noqa: D103 +def downgrade(): conn = op.get_bind() op.create_table( diff --git a/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py b/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py index e8d445d7cc5a5..ece0234ee55a9 100644 --- a/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py +++ b/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py @@ -33,13 +33,13 @@ depends_on = None -def upgrade(): # noqa: D103 - conn = op.get_bind() # pylint: disable=no-member +def upgrade(): + conn = op.get_bind() if conn.dialect.name == "mysql": op.alter_column(table_name='variable', column_name='val', type_=mysql.MEDIUMTEXT) -def downgrade(): # noqa: D103 - conn = op.get_bind() # pylint: disable=no-member +def downgrade(): + conn = op.get_bind() if conn.dialect.name == "mysql": op.alter_column(table_name='variable', column_name='val', type_=mysql.TEXT) diff --git a/airflow/migrations/versions/d38e04c12aa2_add_serialized_dag_table.py b/airflow/migrations/versions/d38e04c12aa2_add_serialized_dag_table.py index 2a446e6a169f8..d8022b4487787 100644 --- a/airflow/migrations/versions/d38e04c12aa2_add_serialized_dag_table.py +++ b/airflow/migrations/versions/d38e04c12aa2_add_serialized_dag_table.py @@ -37,7 +37,7 @@ def upgrade(): """Upgrade version.""" json_type = sa.JSON - conn = op.get_bind() # pylint: disable=no-member + conn = op.get_bind() if conn.dialect.name != "postgresql": # Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for @@ -48,7 +48,7 @@ def upgrade(): json_type = sa.Text op.create_table( - 'serialized_dag', # pylint: disable=no-member + 'serialized_dag', sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('fileloc', sa.String(length=2000), nullable=False), sa.Column('fileloc_hash', sa.Integer(), nullable=False), @@ -56,7 +56,7 @@ def upgrade(): sa.Column('last_updated', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('dag_id'), ) - op.create_index('idx_fileloc_hash', 'serialized_dag', ['fileloc_hash']) # pylint: disable=no-member + op.create_index('idx_fileloc_hash', 'serialized_dag', ['fileloc_hash']) if conn.dialect.name == "mysql": conn.execute("SET time_zone = '+00:00'") @@ -65,7 +65,7 @@ def upgrade(): if res[0][0] == 0: raise Exception("Global variable explicit_defaults_for_timestamp needs to be on (1) for mysql") - op.alter_column( # pylint: disable=no-member + op.alter_column( table_name="serialized_dag", column_name="last_updated", type_=mysql.TIMESTAMP(fsp=6), @@ -81,7 +81,7 @@ def upgrade(): if conn.dialect.name == "postgresql": conn.execute("set timezone=UTC") - op.alter_column( # pylint: disable=no-member + op.alter_column( table_name="serialized_dag", column_name="last_updated", type_=sa.TIMESTAMP(timezone=True), @@ -90,4 +90,4 @@ def upgrade(): def downgrade(): """Downgrade version.""" - op.drop_table('serialized_dag') # pylint: disable=no-member + op.drop_table('serialized_dag') diff --git a/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py b/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py index 220535ac7c5e8..560b763963dd6 100644 --- a/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py +++ b/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py @@ -31,9 +31,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_index('idx_log_dag', 'log', ['dag_id'], unique=False) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_index('idx_log_dag', table_name='log') diff --git a/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py b/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py index b5fdc29b9a13d..776527e16633b 100644 --- a/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py +++ b/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py @@ -34,9 +34,9 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.add_column('dag', sa.Column('schedule_interval', sa.Text(), nullable=True)) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_column('dag', 'schedule_interval') diff --git a/airflow/migrations/versions/e38be357a868_update_schema_for_smart_sensor.py b/airflow/migrations/versions/e38be357a868_update_schema_for_smart_sensor.py index e603199379d45..851b97fba8dcb 100644 --- a/airflow/migrations/versions/e38be357a868_update_schema_for_smart_sensor.py +++ b/airflow/migrations/versions/e38be357a868_update_schema_for_smart_sensor.py @@ -36,19 +36,19 @@ depends_on = None -def mssql_timestamp(): # noqa: D103 +def mssql_timestamp(): return sa.DateTime() -def mysql_timestamp(): # noqa: D103 +def mysql_timestamp(): return mysql.TIMESTAMP(fsp=6) -def sa_timestamp(): # noqa: D103 +def sa_timestamp(): return sa.TIMESTAMP(timezone=True) -def upgrade(): # noqa: D103 +def upgrade(): conn = op.get_bind() inspector = Inspector.from_engine(conn) @@ -89,7 +89,7 @@ def upgrade(): # noqa: D103 op.create_index('si_updated_at', 'sensor_instance', ['updated_at'], unique=False) -def downgrade(): # noqa: D103 +def downgrade(): conn = op.get_bind() inspector = Inspector.from_engine(conn) tables = inspector.get_table_names() diff --git a/airflow/migrations/versions/e3a246e0dc1_current_schema.py b/airflow/migrations/versions/e3a246e0dc1_current_schema.py index 60e6cdf3b3c29..bd0a8a3b55a14 100644 --- a/airflow/migrations/versions/e3a246e0dc1_current_schema.py +++ b/airflow/migrations/versions/e3a246e0dc1_current_schema.py @@ -38,7 +38,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): conn = op.get_bind() inspector = Inspector.from_engine(conn) tables = inspector.get_table_names() @@ -221,7 +221,7 @@ def upgrade(): # noqa: D103 ) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_table('chart') op.drop_table('variable') op.drop_table('user') diff --git a/airflow/migrations/versions/e959f08ac86c_change_field_in_dagcode_to_mediumtext_.py b/airflow/migrations/versions/e959f08ac86c_change_field_in_dagcode_to_mediumtext_.py index ef1c0a51d4d9d..559aa467ecff9 100644 --- a/airflow/migrations/versions/e959f08ac86c_change_field_in_dagcode_to_mediumtext_.py +++ b/airflow/migrations/versions/e959f08ac86c_change_field_in_dagcode_to_mediumtext_.py @@ -33,15 +33,15 @@ depends_on = None -def upgrade(): # noqa: D103 - conn = op.get_bind() # pylint: disable=no-member +def upgrade(): + conn = op.get_bind() if conn.dialect.name == "mysql": op.alter_column( table_name='dag_code', column_name='source_code', type_=mysql.MEDIUMTEXT, nullable=False ) -def downgrade(): # noqa: D103 - conn = op.get_bind() # pylint: disable=no-member +def downgrade(): + conn = op.get_bind() if conn.dialect.name == "mysql": op.alter_column(table_name='dag_code', column_name='source_code', type_=mysql.TEXT, nullable=False) diff --git a/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py b/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py index 7a0a3c8cc1706..77424ec76505c 100644 --- a/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py +++ b/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py @@ -32,7 +32,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): conn = op.get_bind() if conn.dialect.name == 'mysql': conn.execute("SET time_zone = '+00:00'") @@ -41,7 +41,7 @@ def upgrade(): # noqa: D103 op.alter_column('xcom', 'timestamp', existing_type=mysql.TIMESTAMP(fsp=6), nullable=False) -def downgrade(): # noqa: D103 +def downgrade(): conn = op.get_bind() if conn.dialect.name == 'mysql': conn.execute("SET time_zone = '+00:00'") diff --git a/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py b/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py index 1db0440cbd4a4..bfc191aba827c 100644 --- a/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py +++ b/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py @@ -33,7 +33,7 @@ depends_on = None -def upgrade(): # noqa: D103 +def upgrade(): op.create_table( 'dag_stats', sa.Column('dag_id', sa.String(length=250), nullable=False), @@ -44,5 +44,5 @@ def upgrade(): # noqa: D103 ) -def downgrade(): # noqa: D103 +def downgrade(): op.drop_table('dag_stats') diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index 61606ea86b601..cd6e7958ad11f 100644 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -23,7 +23,7 @@ from airflow.models.dagbag import DagBag from airflow.models.dagpickle import DagPickle from airflow.models.dagrun import DagRun -from airflow.models.errors import ImportError # pylint: disable=redefined-builtin +from airflow.models.errors import ImportError from airflow.models.log import Log from airflow.models.pool import Pool from airflow.models.renderedtifields import RenderedTaskInstanceFields diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 7af23d3ed26f2..10e8bfdd5c57e 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -74,13 +74,13 @@ from airflow.utils.weight_rule import WeightRule if TYPE_CHECKING: - from airflow.utils.task_group import TaskGroup # pylint: disable=cyclic-import + from airflow.utils.task_group import TaskGroup ScheduleInterval = Union[str, timedelta, relativedelta] TaskStateChangeCallback = Callable[[Context], None] -T = TypeVar('T', bound=Callable) # pylint: disable=invalid-name +T = TypeVar('T', bound=Callable) class BaseOperatorMeta(abc.ABCMeta): @@ -110,7 +110,6 @@ def _apply_defaults(cls, func: T) -> T: and param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD) } - # pylint: disable=invalid-name,missing-docstring class autostacklevel_warn: def __init__(self): self.warnings = __import__('warnings') @@ -124,8 +123,6 @@ def __dir__(self): def warn(self, message, category=None, stacklevel=1, source=None): self.warnings.warn(message, category, stacklevel + 2, source) - # pylint: enable=invalid-name,missing-docstring - if func.__globals__.get('warnings') is sys.modules['warnings']: # Yes, this is slightly hacky, but it _automatically_ sets the right # stacklevel parameter to `warnings.warn` to ignore the decorator. Now @@ -176,7 +173,7 @@ def apply_defaults(self, *args: Any, **kwargs: Any) -> Any: kwargs['default_args'] = default_args if hasattr(self, '_hook_apply_defaults'): - args, kwargs = self._hook_apply_defaults(*args, **kwargs) # pylint: disable=protected-access + args, kwargs = self._hook_apply_defaults(*args, **kwargs) result = func(self, *args, **kwargs) @@ -184,7 +181,7 @@ def apply_defaults(self, *args: Any, **kwargs: Any) -> Any: self.set_xcomargs_dependencies() # Mark instance as instantiated https://docs.python.org/3/tutorial/classes.html#private-variables - self._BaseOperator__instantiated = True # pylint: disable=protected-access + self._BaseOperator__instantiated = True return result return cast(T, apply_defaults) @@ -195,7 +192,6 @@ def __new__(cls, name, bases, namespace): return new_cls -# pylint: disable=too-many-instance-attributes,too-many-public-methods @functools.total_ordering class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta): """ @@ -456,7 +452,6 @@ class derived from this one results in the creation of a task object, # Set to True before calling execute method _lock_for_execution = False - # pylint: disable=too-many-arguments,too-many-locals, too-many-statements def __init__( self, task_id: str, @@ -474,7 +469,7 @@ def __init__( wait_for_downstream: bool = False, dag=None, params: Optional[Dict] = None, - default_args: Optional[Dict] = None, # pylint: disable=unused-argument + default_args: Optional[Dict] = None, priority_weight: int = 1, weight_rule: str = WeightRule.DOWNSTREAM, queue: str = conf.get('operators', 'default_queue'), @@ -584,7 +579,7 @@ def __init__( self.retry_delay = retry_delay else: self.log.debug("Retry_delay isn't timedelta object, assuming secs") - self.retry_delay = timedelta(seconds=retry_delay) # noqa + self.retry_delay = timedelta(seconds=retry_delay) self.retry_exponential_backoff = retry_exponential_backoff self.max_retry_delay = max_retry_delay if max_retry_delay: @@ -592,7 +587,7 @@ def __init__( self.max_retry_delay = max_retry_delay else: self.log.debug("Max_retry_delay isn't timedelta object, assuming secs") - self.max_retry_delay = timedelta(seconds=max_retry_delay) # noqa + self.max_retry_delay = timedelta(seconds=max_retry_delay) self.params = params or {} # Available in templates! self.priority_weight = priority_weight @@ -819,7 +814,7 @@ def prepare_for_execution(self) -> "BaseOperator": returns a copy of the task """ other = copy.copy(self) - other._lock_for_execution = True # pylint: disable=protected-access + other._lock_for_execution = True return other def set_xcomargs_dependencies(self) -> None: @@ -845,7 +840,7 @@ def set_xcomargs_dependencies(self) -> None: """ from airflow.models.xcom_arg import XComArg - def apply_set_upstream(arg: Any): # noqa + def apply_set_upstream(arg: Any): if isinstance(arg, XComArg): self.set_upstream(arg.operator) elif isinstance(arg, (tuple, set, list)): @@ -963,13 +958,11 @@ def __deepcopy__(self, memo): result = cls.__new__(cls) memo[id(self)] = result - shallow_copy = ( - cls.shallow_copy_attrs + cls._base_operator_shallow_copy_attrs - ) # pylint: disable=protected-access + shallow_copy = cls.shallow_copy_attrs + cls._base_operator_shallow_copy_attrs for k, v in self.__dict__.items(): if k not in shallow_copy: - setattr(result, k, copy.deepcopy(v, memo)) # noqa + setattr(result, k, copy.deepcopy(v, memo)) else: setattr(result, k, copy.copy(v)) return result @@ -981,7 +974,7 @@ def __getstate__(self): return state def __setstate__(self, state): - self.__dict__ = state # pylint: disable=attribute-defined-outside-init + self.__dict__ = state self._log = logging.getLogger("airflow.task.operators") def render_template_fields(self, context: Dict, jinja_env: Optional[jinja2.Environment] = None) -> None: @@ -1012,7 +1005,7 @@ def _do_render_template_fields( rendered_content = self.render_template(content, context, jinja_env, seen_oids) setattr(parent, attr_name, rendered_content) - def render_template( # pylint: disable=too-many-return-statements + def render_template( self, content: Any, context: Dict, @@ -1051,10 +1044,10 @@ def render_template( # pylint: disable=too-many-return-statements return content.resolve(context) if isinstance(content, tuple): - if type(content) is not tuple: # pylint: disable=unidiomatic-typecheck + if type(content) is not tuple: # Special case for named tuples return content.__class__( - *(self.render_template(element, context, jinja_env) for element in content) # noqa + *(self.render_template(element, context, jinja_env) for element in content) ) else: return tuple(self.render_template(element, context, jinja_env) for element in content) @@ -1093,7 +1086,7 @@ def get_template_env(self) -> jinja2.Environment: self.dag.get_template_env() if self.has_dag() else airflow.templates.SandboxedEnvironment(cache_size=0) - ) # noqa + ) def prepare_template(self) -> None: """ @@ -1105,26 +1098,26 @@ def prepare_template(self) -> None: def resolve_template_files(self) -> None: """Getting the content of files for template_field / template_ext""" - if self.template_ext: # pylint: disable=too-many-nested-blocks + if self.template_ext: for field in self.template_fields: content = getattr(self, field, None) - if content is None: # pylint: disable=no-else-continue + if content is None: continue elif isinstance(content, str) and any(content.endswith(ext) for ext in self.template_ext): env = self.get_template_env() try: setattr(self, field, env.loader.get_source(env, content)[0]) - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.exception(e) elif isinstance(content, list): env = self.dag.get_template_env() - for i in range(len(content)): # pylint: disable=consider-using-enumerate + for i in range(len(content)): if isinstance(content[i], str) and any( content[i].endswith(ext) for ext in self.template_ext ): try: content[i] = env.loader.get_source(env, content[i])[0] - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.exception(e) self.prepare_template() @@ -1334,9 +1327,7 @@ def _set_relatives( # relationships can only be set if the tasks share a single DAG. Tasks # without a DAG are assigned to that DAG. dags = { - task._dag.dag_id: task._dag # type: ignore # pylint: disable=protected-access,no-member - for task in self.roots + task_list - if task.has_dag() # pylint: disable=no-member + task._dag.dag_id: task._dag for task in self.roots + task_list if task.has_dag() # type: ignore } if len(dags) > 1: @@ -1643,7 +1634,7 @@ def cross_downstream( class BaseOperatorLink(metaclass=ABCMeta): """Abstract base class that defines how we get an operator link.""" - operators: ClassVar[List[Type[BaseOperator]]] = [] # pylint: disable=invalid-name + operators: ClassVar[List[Type[BaseOperator]]] = [] """ This property will be used by Airflow Plugins to find the Operators to which you want to assign this Operator Link diff --git a/airflow/models/connection.py b/airflow/models/connection.py index 73d0d8d1502d5..ed80741c867f3 100644 --- a/airflow/models/connection.py +++ b/airflow/models/connection.py @@ -57,7 +57,7 @@ def _parse_netloc_to_hostname(uri_parts): return hostname -class Connection(Base, LoggingMixin): # pylint: disable=too-many-instance-attributes +class Connection(Base, LoggingMixin): """ Placeholder to store information about different database instances connection information. The idea here is that scripts use references to @@ -107,7 +107,7 @@ class Connection(Base, LoggingMixin): # pylint: disable=too-many-instance-attri is_extra_encrypted = Column(Boolean, unique=False, default=False) _extra = Column('extra', Text()) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, conn_id: Optional[str] = None, conn_type: Optional[str] = None, @@ -125,9 +125,7 @@ def __init__( # pylint: disable=too-many-arguments self.description = description if extra and not isinstance(extra, str): extra = json.dumps(extra) - if uri and ( # pylint: disable=too-many-boolean-expressions - conn_type or host or login or password or schema or port or extra - ): + if uri and (conn_type or host or login or password or schema or port or extra): raise AirflowException( "You must create an object using the URI or individual values " "(conn_type, host, login, password, schema, port or extra)." @@ -148,7 +146,7 @@ def __init__( # pylint: disable=too-many-arguments mask_secret(self.password) @reconstructor - def on_db_load(self): # pylint: disable=missing-function-docstring + def on_db_load(self): if self.password: mask_secret(self.password) @@ -246,7 +244,7 @@ def set_password(self, value: Optional[str]): self.is_encrypted = fernet.is_encrypted @declared_attr - def password(cls): # pylint: disable=no-self-argument + def password(cls): """Password. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_password', descriptor=property(cls.get_password, cls.set_password)) @@ -276,7 +274,7 @@ def set_extra(self, value: str): self.is_extra_encrypted = False @declared_attr - def extra(cls): # pylint: disable=no-self-argument + def extra(cls): """Extra data. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_extra', descriptor=property(cls.get_extra, cls.set_extra)) diff --git a/airflow/models/crypto.py b/airflow/models/crypto.py index d6e0ee8341ef8..b57c53732df32 100644 --- a/airflow/models/crypto.py +++ b/airflow/models/crypto.py @@ -28,7 +28,7 @@ class FernetProtocol(Protocol): - """This class is only used for TypeChecking (for IDEs, mypy, pylint, etc)""" + """This class is only used for TypeChecking (for IDEs, mypy, etc)""" def decrypt(self, b): """Decrypt with Fernet""" @@ -71,7 +71,7 @@ def get_fernet(): :return: Fernet object :raises: airflow.exceptions.AirflowException if there's a problem trying to load Fernet """ - global _fernet # pylint: disable=global-statement + global _fernet if _fernet: return _fernet diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 53c3feaf3920c..1861edaff193b 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -1872,7 +1872,7 @@ def bulk_write_to_db(cls, dags: Collection["DAG"], session=None): session.query(DagRun.dag_id, func.count('*')) .filter( DagRun.dag_id.in_(existing_dag_ids), - DagRun.state == State.RUNNING, # pylint: disable=comparison-with-callable + DagRun.state == State.RUNNING, DagRun.external_trigger.is_(False), ) .group_by(DagRun.dag_id) diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py index ae21b90a080fb..5d84ba2a7f47b 100644 --- a/airflow/models/dagbag.py +++ b/airflow/models/dagbag.py @@ -286,7 +286,7 @@ def process_file(self, filepath, only_if_updated=True, safe_mode=True): and file_last_changed_on_disk == self.file_last_changed[filepath] ): return [] - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.exception(e) return [] @@ -325,7 +325,7 @@ def _load_modules_from_file(self, filepath, safe_mode): sys.modules[spec.name] = new_module loader.exec_module(new_module) return [new_module] - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.exception("Failed to import: %s", filepath) if self.dagbag_import_error_tracebacks: self.import_errors[filepath] = traceback.format_exc( @@ -368,7 +368,7 @@ def _load_modules_from_zip(self, filepath, safe_mode): sys.path.insert(0, filepath) current_module = importlib.import_module(mod_name) mods.append(current_module) - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.exception("Failed to import: %s", filepath) if self.dagbag_import_error_tracebacks: self.import_errors[filepath] = traceback.format_exc( @@ -520,7 +520,7 @@ def collect_dags( dags=str([dag.dag_id for dag in found_dags]), ) ) - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.exception(e) self.dagbag_stats = sorted(stats, key=lambda x: x.duration, reverse=True) @@ -594,7 +594,7 @@ def _serialize_dag_capturing_errors(dag, session): return [] except OperationalError: raise - except Exception: # pylint: disable=broad-except + except Exception: return [(dag.fileloc, traceback.format_exc(limit=-self.dagbag_import_error_traceback_depth))] # Retry 'DAG.bulk_write_to_db' & 'SerializedDagModel.bulk_sync_to_db' in case diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index 6bd96f3aa7eee..3bd86ee66f29f 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -701,7 +701,7 @@ def get_run(session: Session, dag_id: str, execution_date: datetime) -> Optional session.query(DagRun) .filter( DagRun.dag_id == dag_id, - DagRun.external_trigger == False, # noqa pylint: disable=singleton-comparison + DagRun.external_trigger == False, # noqa DagRun.execution_date == execution_date, ) .first() diff --git a/airflow/models/errors.py b/airflow/models/errors.py index 47f5bd74bf14c..9718c063a28f0 100644 --- a/airflow/models/errors.py +++ b/airflow/models/errors.py @@ -22,7 +22,7 @@ from airflow.utils.sqlalchemy import UtcDateTime -class ImportError(Base): # pylint: disable=redefined-builtin +class ImportError(Base): """ A table to store all Import Errors. The ImportErrors are recorded when parsing DAGs. This errors are displayed on the Webserver. diff --git a/airflow/models/pool.py b/airflow/models/pool.py index 3d152eee80fe3..6f217c4b025a2 100644 --- a/airflow/models/pool.py +++ b/airflow/models/pool.py @@ -53,7 +53,7 @@ class Pool(Base): DEFAULT_POOL_NAME = 'default_pool' def __repr__(self): - return str(self.pool) # pylint: disable=E0012 + return str(self.pool) @staticmethod @provide_session diff --git a/airflow/models/serialized_dag.py b/airflow/models/serialized_dag.py index bba58ad9a0d17..e7b5157227253 100644 --- a/airflow/models/serialized_dag.py +++ b/airflow/models/serialized_dag.py @@ -169,23 +169,21 @@ def read_all_dags(cls, session: Session = None) -> Dict[str, 'SerializedDAG']: @property def dag(self): """The DAG deserialized from the ``data`` column""" - SerializedDAG._load_operator_extra_links = self.load_op_links # pylint: disable=protected-access + SerializedDAG._load_operator_extra_links = self.load_op_links if isinstance(self.data, dict): dag = SerializedDAG.from_dict(self.data) # type: Any else: - dag = SerializedDAG.from_json(self.data) # noqa + dag = SerializedDAG.from_json(self.data) return dag @classmethod @provide_session def remove_dag(cls, dag_id: str, session: Session = None): """Deletes a DAG with given dag_id. - :param dag_id: dag_id to be deleted :param session: ORM Session """ - # pylint: disable=no-member session.execute(cls.__table__.delete().where(cls.dag_id == dag_id)) @classmethod @@ -202,7 +200,6 @@ def remove_deleted_dags(cls, alive_dag_filelocs: List[str], session=None): "Deleting Serialized DAGs (for which DAG files are deleted) from %s table ", cls.__tablename__ ) - # pylint: disable=no-member session.execute( cls.__table__.delete().where( and_(cls.fileloc_hash.notin_(alive_fileloc_hashes), cls.fileloc.notin_(alive_dag_filelocs)) diff --git a/airflow/models/skipmixin.py b/airflow/models/skipmixin.py index dc40329087f8c..489da524ebe04 100644 --- a/airflow/models/skipmixin.py +++ b/airflow/models/skipmixin.py @@ -96,7 +96,7 @@ def skip( # SkipMixin may not necessarily have a task_id attribute. Only store to XCom if one is available. try: - task_id = self.task_id # noqa + task_id = self.task_id except AttributeError: task_id = None diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index 2def7076236e8..76cb079e86e55 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -116,7 +116,7 @@ def load_error_file(fd: IO[bytes]) -> Optional[Union[str, Exception]]: return None try: return pickle.loads(data) - except Exception: # pylint: disable=broad-except + except Exception: return "Failed to load task run error" @@ -125,7 +125,7 @@ def set_error_file(error_file: str, error: Union[str, Exception]) -> None: with open(error_file, "wb") as fd: try: pickle.dump(error, fd) - except Exception: # pylint: disable=broad-except + except Exception: # local class objects cannot be pickled, so we fallback # to store the string representation instead pickle.dump(str(error), fd) @@ -205,7 +205,7 @@ def clear_task_instances( if job_ids: from airflow.jobs.base_job import BaseJob - for job in session.query(BaseJob).filter(BaseJob.id.in_(job_ids)).all(): # noqa + for job in session.query(BaseJob).filter(BaseJob.id.in_(job_ids)).all(): job.state = State.SHUTDOWN if activate_dag_runs is not None: @@ -263,7 +263,7 @@ def with_try_number(self, try_number: int) -> 'TaskInstanceKey': return TaskInstanceKey(self.dag_id, self.task_id, self.execution_date, try_number) -class TaskInstance(Base, LoggingMixin): # pylint: disable=R0902,R0904 +class TaskInstance(Base, LoggingMixin): """ Task instances store the state of a task instance. This table is the authority and single source of truth around what tasks have run and the @@ -400,7 +400,7 @@ def next_try_number(self): """Setting Next Try Number""" return self._try_number + 1 - def command_as_list( # pylint: disable=too-many-arguments + def command_as_list( self, mark_success=False, ignore_all_deps=False, @@ -449,7 +449,7 @@ def command_as_list( # pylint: disable=too-many-arguments @staticmethod def generate_command( - dag_id: str, # pylint: disable=too-many-arguments + dag_id: str, task_id: str, execution_date: datetime, mark_success: bool = False, @@ -631,7 +631,7 @@ def refresh_from_db(self, session=None, lock_for_update=False) -> None: self.state = ti.state # Get the raw value of try_number column, don't read through the # accessor here otherwise it will be incremented by one already. - self.try_number = ti._try_number # noqa pylint: disable=protected-access + self.try_number = ti._try_number self.max_tries = ti.max_tries self.hostname = ti.hostname self.unixname = ti.unixname @@ -932,7 +932,7 @@ def next_retry_datetime(self): ti_hash = int( hashlib.sha1( "{}#{}#{}#{}".format( - self.dag_id, self.task_id, self.execution_date, self.try_number # noqa + self.dag_id, self.task_id, self.execution_date, self.try_number ).encode('utf-8') ).hexdigest(), 16, @@ -976,7 +976,7 @@ def get_dagrun(self, session: Session = None): return dr @provide_session - def check_and_change_state_before_execution( # pylint: disable=too-many-arguments + def check_and_change_state_before_execution( self, verbose: bool = True, ignore_all_deps: bool = False, @@ -1280,7 +1280,7 @@ def _prepare_and_execute_task_with_callbacks(self, context, task): task_copy = task.prepare_for_execution() self.task = task_copy - def signal_handler(signum, frame): # pylint: disable=unused-argument + def signal_handler(signum, frame): self.log.error("Received SIGTERM. Terminating subprocesses.") task_copy.on_kill() raise AirflowException("Task received SIGTERM signal") @@ -1315,7 +1315,7 @@ def signal_handler(signum, frame): # pylint: disable=unused-argument registered = False try: registered = task_copy.register_in_sensor_service(self, context) - except Exception: # pylint: disable=broad-except + except Exception: self.log.warning( "Failed to register in sensor service." " Continue to run task in non smart sensor mode.", @@ -1369,7 +1369,7 @@ def _run_execute_callback(self, context: Context, task): try: if task.on_execute_callback: task.on_execute_callback(context) - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception("Failed when executing execute callback") def _run_finished_callback(self, error: Optional[Union[str, Exception]] = None) -> None: @@ -1398,7 +1398,7 @@ def _run_finished_callback(self, error: Optional[Union[str, Exception]] = None) task.on_retry_callback(context) @provide_session - def run( # pylint: disable=too-many-arguments + def run( self, verbose: bool = True, ignore_all_deps: bool = False, @@ -1553,7 +1553,7 @@ def handle_failure( if email_for_state and task.email: try: self.email_alert(error) - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception('Failed to send email to: %s', task.email) if not test_mode: @@ -1582,7 +1582,7 @@ def _safe_date(self, date_attr, fmt): return '' @provide_session - def get_template_context(self, session=None) -> Context: # pylint: disable=too-many-locals + def get_template_context(self, session=None) -> Context: """Return TI Context""" task = self.task from airflow import macros @@ -1673,8 +1673,7 @@ def __repr__(self): @staticmethod def get( item: str, - # pylint: disable=protected-access - default_var: Any = Variable._Variable__NO_DEFAULT_SENTINEL, # noqa + default_var: Any = Variable._Variable__NO_DEFAULT_SENTINEL, ): """Get Airflow Variable value""" return Variable.get(item, default_var=default_var) @@ -1702,8 +1701,7 @@ def __repr__(self): @staticmethod def get( item: str, - # pylint: disable=protected-access - default_var: Any = Variable._Variable__NO_DEFAULT_SENTINEL, # noqa + default_var: Any = Variable._Variable__NO_DEFAULT_SENTINEL, ): """Get Airflow Variable after deserializing JSON value""" return Variable.get(item, default_var=default_var, deserialize_json=True) @@ -1946,7 +1944,7 @@ def xcom_push( ) @provide_session - def xcom_pull( # pylint: disable=inconsistent-return-statements + def xcom_pull( self, task_ids: Optional[Union[str, Iterable[str]]] = None, dag_id: Optional[str] = None, @@ -2099,7 +2097,6 @@ def __init__(self, ti: TaskInstance): self._queue: str = ti.queue self._key = ti.key - # pylint: disable=missing-docstring @property def dag_id(self) -> str: return self._dag_id diff --git a/airflow/models/variable.py b/airflow/models/variable.py index 44627c0fdefa2..7d4726966b845 100644 --- a/airflow/models/variable.py +++ b/airflow/models/variable.py @@ -58,7 +58,7 @@ def __init__(self, key=None, val=None, description=None): self.description = description @reconstructor - def on_db_load(self): # pylint: disable=missing-function-docstring + def on_db_load(self): if self._val: mask_secret(self.val, self.key) @@ -75,7 +75,7 @@ def get_val(self): except InvalidFernetToken: self.log.error("Can't decrypt _val for key=%s, invalid token or value", self.key) return None - except Exception: # pylint: disable=broad-except + except Exception: self.log.error("Can't decrypt _val for key=%s, FERNET_KEY configuration missing", self.key) return None else: @@ -89,7 +89,7 @@ def set_val(self, value): self.is_encrypted = fernet.is_encrypted @declared_attr - def val(cls): # pylint: disable=no-self-argument + def val(cls): """Get Airflow Variable from Metadata DB and decode it using the Fernet Key""" return synonym('_val', descriptor=property(cls.get_val, cls.set_val)) diff --git a/airflow/models/xcom_arg.py b/airflow/models/xcom_arg.py index eae583ece310a..dd08ab34fa6ad 100644 --- a/airflow/models/xcom_arg.py +++ b/airflow/models/xcom_arg.py @@ -18,7 +18,7 @@ from typing import Any, Dict, List, Optional, Sequence, Union from airflow.exceptions import AirflowException -from airflow.models.baseoperator import BaseOperator # pylint: disable=R0401 +from airflow.models.baseoperator import BaseOperator from airflow.models.taskmixin import TaskMixin from airflow.models.xcom import XCOM_RETURN_KEY from airflow.utils.edgemodifier import EdgeModifier diff --git a/airflow/mypy/plugin/decorators.py b/airflow/mypy/plugin/decorators.py index 3267ca110baa3..192dc37778cb0 100644 --- a/airflow/mypy/plugin/decorators.py +++ b/airflow/mypy/plugin/decorators.py @@ -20,9 +20,9 @@ import functools from typing import List -from mypy.nodes import ARG_NAMED_OPT # pylint: disable=no-name-in-module -from mypy.plugin import FunctionContext, Plugin # pylint: disable=no-name-in-module -from mypy.types import CallableType, NoneType, UnionType # pylint: disable=no-name-in-module +from mypy.nodes import ARG_NAMED_OPT +from mypy.plugin import FunctionContext, Plugin +from mypy.types import CallableType, NoneType, UnionType TYPED_DECORATORS = { "fallback_to_default_project_id of GoogleBaseHook": ["project_id"], @@ -75,6 +75,6 @@ def _change_decorator_function_type( return decorator -def plugin(version: str): # pylint: disable=unused-argument +def plugin(version: str): """Mypy plugin entrypoint.""" return TypedDecoratorPlugin diff --git a/airflow/operators/bash_operator.py b/airflow/operators/bash_operator.py index 8c389fe9c30ce..3b7764dfbd316 100644 --- a/airflow/operators/bash_operator.py +++ b/airflow/operators/bash_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.operators.bash import BashOperator # noqa warnings.warn( diff --git a/airflow/operators/branch_operator.py b/airflow/operators/branch_operator.py index 218e26e77e8d9..b4c71d5bc1f88 100644 --- a/airflow/operators/branch_operator.py +++ b/airflow/operators/branch_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.operators.branch import BaseBranchOperator # noqa warnings.warn( diff --git a/airflow/operators/dagrun_operator.py b/airflow/operators/dagrun_operator.py index 0f4990f3ccbc4..bdcc6671516af 100644 --- a/airflow/operators/dagrun_operator.py +++ b/airflow/operators/dagrun_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.operators.trigger_dagrun import TriggerDagRunLink, TriggerDagRunOperator # noqa warnings.warn( diff --git a/airflow/operators/docker_operator.py b/airflow/operators/docker_operator.py index e815d66a64910..88235b4382461 100644 --- a/airflow/operators/docker_operator.py +++ b/airflow/operators/docker_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.docker.operators.docker import DockerOperator # noqa warnings.warn( diff --git a/airflow/operators/druid_check_operator.py b/airflow/operators/druid_check_operator.py index 5e605e5ef60af..008a91750c91d 100644 --- a/airflow/operators/druid_check_operator.py +++ b/airflow/operators/druid_check_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.druid.operators.druid_check import DruidCheckOperator # noqa warnings.warn( diff --git a/airflow/operators/dummy_operator.py b/airflow/operators/dummy_operator.py index c166c7f62b262..6b2fbeeb5bff4 100644 --- a/airflow/operators/dummy_operator.py +++ b/airflow/operators/dummy_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.operators.dummy import DummyOperator # noqa warnings.warn( diff --git a/airflow/operators/email.py b/airflow/operators/email.py index 69807f21052c4..324ec1a9a9984 100644 --- a/airflow/operators/email.py +++ b/airflow/operators/email.py @@ -50,7 +50,7 @@ class EmailOperator(BaseOperator): template_ext = ('.html',) ui_color = '#e6faf9' - def __init__( # pylint: disable=invalid-name + def __init__( self, *, to: Union[List[str], str], @@ -65,11 +65,11 @@ def __init__( # pylint: disable=invalid-name **kwargs, ) -> None: super().__init__(**kwargs) - self.to = to # pylint: disable=invalid-name + self.to = to self.subject = subject self.html_content = html_content self.files = files or [] - self.cc = cc # pylint: disable=invalid-name + self.cc = cc self.bcc = bcc self.mime_subtype = mime_subtype self.mime_charset = mime_charset diff --git a/airflow/operators/email_operator.py b/airflow/operators/email_operator.py index d1cf2ea3fc946..80901d010f669 100644 --- a/airflow/operators/email_operator.py +++ b/airflow/operators/email_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.operators.email import EmailOperator # noqa warnings.warn( diff --git a/airflow/operators/gcs_to_s3.py b/airflow/operators/gcs_to_s3.py index ed709b2f82c5f..d02bc7f224ea9 100644 --- a/airflow/operators/gcs_to_s3.py +++ b/airflow/operators/gcs_to_s3.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator # noqa warnings.warn( diff --git a/airflow/operators/hive_operator.py b/airflow/operators/hive_operator.py index 545b66f852b70..b49cf097305ea 100644 --- a/airflow/operators/hive_operator.py +++ b/airflow/operators/hive_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hive.operators.hive import HiveOperator # noqa warnings.warn( diff --git a/airflow/operators/hive_stats_operator.py b/airflow/operators/hive_stats_operator.py index 96d7b52285fc9..af1e260a4a155 100644 --- a/airflow/operators/hive_stats_operator.py +++ b/airflow/operators/hive_stats_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hive.operators.hive_stats import HiveStatsCollectionOperator # noqa warnings.warn( diff --git a/airflow/operators/hive_to_druid.py b/airflow/operators/hive_to_druid.py index 6c40c907d1183..a6537a1337a56 100644 --- a/airflow/operators/hive_to_druid.py +++ b/airflow/operators/hive_to_druid.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator warnings.warn( diff --git a/airflow/operators/hive_to_mysql.py b/airflow/operators/hive_to_mysql.py index 4ce9c82c0d736..0a13c7666a4cb 100644 --- a/airflow/operators/hive_to_mysql.py +++ b/airflow/operators/hive_to_mysql.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator warnings.warn( diff --git a/airflow/operators/hive_to_samba_operator.py b/airflow/operators/hive_to_samba_operator.py index b696912c09d8e..ed3b180b3e7c4 100644 --- a/airflow/operators/hive_to_samba_operator.py +++ b/airflow/operators/hive_to_samba_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator # noqa warnings.warn( diff --git a/airflow/operators/http_operator.py b/airflow/operators/http_operator.py index 6c75520015adf..6e2ab56df4e58 100644 --- a/airflow/operators/http_operator.py +++ b/airflow/operators/http_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.http.operators.http import SimpleHttpOperator # noqa warnings.warn( diff --git a/airflow/operators/jdbc_operator.py b/airflow/operators/jdbc_operator.py index f739157f445cc..ff36f9f5d6467 100644 --- a/airflow/operators/jdbc_operator.py +++ b/airflow/operators/jdbc_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.jdbc.operators.jdbc import JdbcOperator # noqa warnings.warn( diff --git a/airflow/operators/latest_only_operator.py b/airflow/operators/latest_only_operator.py index 42e0f93c583f3..07644f4a82c10 100644 --- a/airflow/operators/latest_only_operator.py +++ b/airflow/operators/latest_only_operator.py @@ -18,7 +18,6 @@ """This module is deprecated. Please use :mod:`airflow.operators.latest_only`""" import warnings -# pylint: disable=unused-import from airflow.operators.latest_only import LatestOnlyOperator # noqa warnings.warn( diff --git a/airflow/operators/mssql_operator.py b/airflow/operators/mssql_operator.py index 1bdd2c50c8f2e..d1047b827a722 100644 --- a/airflow/operators/mssql_operator.py +++ b/airflow/operators/mssql_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.mssql.operators.mssql import MsSqlOperator # noqa warnings.warn( diff --git a/airflow/operators/mysql_operator.py b/airflow/operators/mysql_operator.py index c3b4a5217668b..82a94edd66add 100644 --- a/airflow/operators/mysql_operator.py +++ b/airflow/operators/mysql_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.mysql.operators.mysql import MySqlOperator # noqa warnings.warn( diff --git a/airflow/operators/oracle_operator.py b/airflow/operators/oracle_operator.py index 059e7236c0dcf..8ad61db754dcb 100644 --- a/airflow/operators/oracle_operator.py +++ b/airflow/operators/oracle_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.oracle.operators.oracle import OracleOperator # noqa warnings.warn( diff --git a/airflow/operators/papermill_operator.py b/airflow/operators/papermill_operator.py index 61ee2552b2801..5d63e38e13721 100644 --- a/airflow/operators/papermill_operator.py +++ b/airflow/operators/papermill_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.papermill.operators.papermill import PapermillOperator # noqa warnings.warn( diff --git a/airflow/operators/pig_operator.py b/airflow/operators/pig_operator.py index ff2659e2cf005..3b2ea0e05ac99 100644 --- a/airflow/operators/pig_operator.py +++ b/airflow/operators/pig_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.pig.operators.pig import PigOperator # noqa warnings.warn( diff --git a/airflow/operators/postgres_operator.py b/airflow/operators/postgres_operator.py index 949ac0670142b..e5dc53c82bde6 100644 --- a/airflow/operators/postgres_operator.py +++ b/airflow/operators/postgres_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.postgres.operators.postgres import Mapping, PostgresOperator # noqa warnings.warn( diff --git a/airflow/operators/presto_check_operator.py b/airflow/operators/presto_check_operator.py index b5731626c541d..693471f18ceb4 100644 --- a/airflow/operators/presto_check_operator.py +++ b/airflow/operators/presto_check_operator.py @@ -19,8 +19,7 @@ import warnings -# pylint: disable=unused-import -from airflow.operators.sql import SQLCheckOperator, SQLIntervalCheckOperator, SQLValueCheckOperator # noqa +from airflow.operators.sql import SQLCheckOperator, SQLIntervalCheckOperator, SQLValueCheckOperator warnings.warn( "This module is deprecated. Please use `airflow.operators.sql`.", DeprecationWarning, stacklevel=2 diff --git a/airflow/operators/presto_to_mysql.py b/airflow/operators/presto_to_mysql.py index fba566a4d6aca..bfc117327d672 100644 --- a/airflow/operators/presto_to_mysql.py +++ b/airflow/operators/presto_to_mysql.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.mysql.transfers.presto_to_mysql import PrestoToMySqlOperator warnings.warn( diff --git a/airflow/operators/python.py b/airflow/operators/python.py index fa8020c32c6ef..2aea470896d70 100644 --- a/airflow/operators/python.py +++ b/airflow/operators/python.py @@ -62,7 +62,7 @@ def my_task() """ # To maintain backwards compatibility, we import the task object into this file # This prevents breakages in dags that use `from airflow.operators.python import task` - from airflow.decorators.python import python_task # noqa # pylint: disable=unused-import + from airflow.decorators.python import python_task warnings.warn( """airflow.operators.python.task is deprecated. Please use the following instead @@ -295,7 +295,7 @@ class PythonVirtualenvOperator(PythonOperator): } AIRFLOW_SERIALIZABLE_CONTEXT_KEYS = {'macros', 'conf', 'dag', 'dag_run', 'task'} - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, python_callable: Callable, diff --git a/airflow/operators/python_operator.py b/airflow/operators/python_operator.py index 75d9dbbf48dd5..ac8c6448d241d 100644 --- a/airflow/operators/python_operator.py +++ b/airflow/operators/python_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.operators.python import ( # noqa BranchPythonOperator, PythonOperator, diff --git a/airflow/operators/s3_file_transform_operator.py b/airflow/operators/s3_file_transform_operator.py index a4221ded29402..828031d814102 100644 --- a/airflow/operators/s3_file_transform_operator.py +++ b/airflow/operators/s3_file_transform_operator.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.operators.s3_file_transform import S3FileTransformOperator # noqa warnings.warn( diff --git a/airflow/operators/slack_operator.py b/airflow/operators/slack_operator.py index 311a4ca751bb9..3af49e222218e 100644 --- a/airflow/operators/slack_operator.py +++ b/airflow/operators/slack_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.slack.operators.slack import SlackAPIOperator, SlackAPIPostOperator # noqa warnings.warn( diff --git a/airflow/operators/sqlite_operator.py b/airflow/operators/sqlite_operator.py index 79f7eff226d27..68791d69846c0 100644 --- a/airflow/operators/sqlite_operator.py +++ b/airflow/operators/sqlite_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.sqlite.operators.sqlite import SqliteOperator # noqa warnings.warn( diff --git a/airflow/operators/subdag_operator.py b/airflow/operators/subdag_operator.py index 1323faf3a42d4..bb5a088d23b6d 100644 --- a/airflow/operators/subdag_operator.py +++ b/airflow/operators/subdag_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.operators.subdag import SkippedStatePropagationOptions, SubDagOperator # noqa warnings.warn( diff --git a/airflow/plugins_manager.py b/airflow/plugins_manager.py index 2eeb985304d80..575a37ab68831 100644 --- a/airflow/plugins_manager.py +++ b/airflow/plugins_manager.py @@ -172,7 +172,7 @@ def is_valid_plugin(plugin_obj): :return: Whether or not the obj is a valid subclass of AirflowPlugin """ - global plugins # pylint: disable=global-statement + global plugins if ( inspect.isclass(plugin_obj) @@ -190,7 +190,7 @@ def register_plugin(plugin_instance): :param plugin_instance: subclass of AirflowPlugin """ - global plugins # pylint: disable=global-statement + global plugins plugin_instance.on_load() plugins.append(plugin_instance) @@ -200,7 +200,7 @@ def load_entrypoint_plugins(): Load and register plugins AirflowPlugin subclasses from the entrypoints. The entry_point group should be 'airflow.plugins'. """ - global import_errors # pylint: disable=global-statement + global import_errors log.debug("Loading plugins from entrypoints") @@ -214,14 +214,14 @@ def load_entrypoint_plugins(): plugin_instance = plugin_class() plugin_instance.source = EntryPointSource(entry_point, dist) register_plugin(plugin_instance) - except Exception as e: # pylint: disable=broad-except + except Exception as e: log.exception("Failed to import plugin %s", entry_point.name) import_errors[entry_point.module] = str(e) def load_plugins_from_plugin_directory(): """Load and register Airflow Plugins from plugins directory""" - global import_errors # pylint: disable=global-statement + global import_errors log.debug("Loading plugins from directory: %s", settings.PLUGINS_FOLDER) for file_path in find_path_from_directory(settings.PLUGINS_FOLDER, ".airflowignore"): @@ -243,12 +243,11 @@ def load_plugins_from_plugin_directory(): plugin_instance = mod_attr_value() plugin_instance.source = PluginsDirectorySource(file_path) register_plugin(plugin_instance) - except Exception as e: # pylint: disable=broad-except + except Exception as e: log.exception('Failed to import plugin %s', file_path) import_errors[file_path] = str(e) -# pylint: disable=protected-access def make_module(name: str, objects: List[Any]): """Creates new module.""" if not objects: @@ -262,9 +261,6 @@ def make_module(name: str, objects: List[Any]): return module -# pylint: enable=protected-access - - def ensure_plugins_loaded(): """ Load plugins from plugins directory and entrypoints. @@ -273,7 +269,7 @@ def ensure_plugins_loaded(): """ from airflow.stats import Stats - global plugins, registered_hooks # pylint: disable=global-statement + global plugins, registered_hooks if plugins is not None: log.debug("Plugins are already loaded. Skipping.") @@ -303,12 +299,10 @@ def ensure_plugins_loaded(): def initialize_web_ui_plugins(): """Collect extension points for WEB UI""" - # pylint: disable=global-statement global plugins global flask_blueprints global flask_appbuilder_views global flask_appbuilder_menu_links - # pylint: enable=global-statement if ( flask_blueprints is not None @@ -345,11 +339,9 @@ def initialize_web_ui_plugins(): def initialize_extra_operators_links_plugins(): """Creates modules for loaded extension from extra operators links plugins""" - # pylint: disable=global-statement global global_operator_extra_links global operator_extra_links global registered_operator_link_classes - # pylint: enable=global-statement if ( global_operator_extra_links is not None @@ -383,10 +375,8 @@ def initialize_extra_operators_links_plugins(): def integrate_executor_plugins() -> None: """Integrate executor plugins to the context.""" - # pylint: disable=global-statement global plugins global executors_modules - # pylint: enable=global-statement if executors_modules is not None: return @@ -407,15 +397,14 @@ def integrate_executor_plugins() -> None: executors_module = make_module('airflow.executors.' + plugin_name, plugin.executors) if executors_module: executors_modules.append(executors_module) - sys.modules[executors_module.__name__] = executors_module # pylint: disable=no-member + sys.modules[executors_module.__name__] = executors_module def integrate_macros_plugins() -> None: """Integrates macro plugins.""" - # pylint: disable=global-statement global plugins global macros_modules - # pylint: enable=global-statement + from airflow import macros if macros_modules is not None: @@ -438,7 +427,7 @@ def integrate_macros_plugins() -> None: if macros_module: macros_modules.append(macros_module) - sys.modules[macros_module.__name__] = macros_module # pylint: disable=no-member + sys.modules[macros_module.__name__] = macros_module # Register the newly created module on airflow.macros such that it # can be accessed when rendering templates. setattr(macros, plugin.name, macros_module) diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py index cfc4b2c7cb2d4..9b842c286bcd0 100644 --- a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py +++ b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py @@ -52,7 +52,6 @@ # [END howto_operator_google_api_to_s3_transfer_advanced_env_variables] -# pylint: disable=unused-argument # [START howto_operator_google_api_to_s3_transfer_advanced_task_1_2] def _check_and_transform_video_ids(xcom_key, task_ids, task_instance, **kwargs): video_ids_response = task_instance.xcom_pull(task_ids=task_ids, key=xcom_key) @@ -65,7 +64,7 @@ def _check_and_transform_video_ids(xcom_key, task_ids, task_instance, **kwargs): # [END howto_operator_google_api_to_s3_transfer_advanced_task_1_2] -# pylint: enable=unused-argument + s3_directory, s3_file = S3_DESTINATION_KEY.rsplit('/', 1) s3_file_name, _ = s3_file.rsplit('.', 1) diff --git a/airflow/providers/amazon/aws/hooks/athena.py b/airflow/providers/amazon/aws/hooks/athena.py index c7037fbbfc90c..2b30fd8b1fddd 100644 --- a/airflow/providers/amazon/aws/hooks/athena.py +++ b/airflow/providers/amazon/aws/hooks/athena.py @@ -100,12 +100,12 @@ def check_query_status(self, query_execution_id: str) -> Optional[str]: state = None try: state = response['QueryExecution']['Status']['State'] - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: self.log.error('Exception while getting query state %s', ex) finally: # The error is being absorbed here and is being handled by the caller. # The error is being absorbed to implement retries. - return state # pylint: disable=lost-exception + return state def get_state_change_reason(self, query_execution_id: str) -> Optional[str]: """ @@ -119,12 +119,12 @@ def get_state_change_reason(self, query_execution_id: str) -> Optional[str]: reason = None try: reason = response['QueryExecution']['Status']['StateChangeReason'] - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: self.log.error('Exception while getting query state change reason: %s', ex) finally: # The error is being absorbed here and is being handled by the caller. # The error is being absorbed to implement retries. - return reason # pylint: disable=lost-exception + return reason def get_query_results( self, query_execution_id: str, next_token_id: Optional[str] = None, max_results: int = 1000 diff --git a/airflow/providers/amazon/aws/hooks/aws_dynamodb.py b/airflow/providers/amazon/aws/hooks/aws_dynamodb.py index 92153b9909b59..dedb80073e3e5 100644 --- a/airflow/providers/amazon/aws/hooks/aws_dynamodb.py +++ b/airflow/providers/amazon/aws/hooks/aws_dynamodb.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.hooks.dynamodb import AwsDynamoDBHook # noqa warnings.warn( diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py b/airflow/providers/amazon/aws/hooks/base_aws.py index c1c5b1d02f2b5..5d6e22bf41319 100644 --- a/airflow/providers/amazon/aws/hooks/base_aws.py +++ b/airflow/providers/amazon/aws/hooks/base_aws.py @@ -115,7 +115,7 @@ def _impersonate_to_role( botocore_session = self._assume_role_with_web_identity( role_arn=role_arn, assume_role_kwargs=assume_role_kwargs, - base_session=session._session, # pylint: disable=protected-access + base_session=session._session, ) return boto3.session.Session( region_name=session.region_name, @@ -286,7 +286,7 @@ def _assume_role_with_web_identity(self, role_arn, assume_role_kwargs, base_sess time_fetcher=lambda: datetime.datetime.now(tz=tzlocal()), ) botocore_session = botocore.session.Session() - botocore_session._credentials = aws_creds # pylint: disable=protected-access + botocore_session._credentials = aws_creds return botocore_session def _get_google_identity_token_loader(self): diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/airflow/providers/amazon/aws/hooks/batch_client.py index 6aa7156b5d635..ed392a9869d32 100644 --- a/airflow/providers/amazon/aws/hooks/batch_client.py +++ b/airflow/providers/amazon/aws/hooks/batch_client.py @@ -38,13 +38,6 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.typing_compat import Protocol, runtime_checkable -# Add exceptions to pylint for the boto3 protocol only; ideally the boto3 library -# could provide -# protocols for all their dynamically generated classes (try to migrate this to a PR on botocore). -# Note that the use of invalid-name parameters should be restricted to the boto3 mappings only; -# all the Airflow wrappers of boto3 clients should not adopt invalid-names to match boto3. -# pylint: disable=invalid-name, unused-argument - @runtime_checkable class AwsBatchProtocol(Protocol): @@ -155,7 +148,6 @@ def terminate_job(self, jobId: str, reason: str) -> Dict: # Note that the use of invalid-name parameters should be restricted to the boto3 mappings only; # all the Airflow wrappers of boto3 clients should not adopt invalid-names to match boto3. -# pylint: enable=invalid-name, unused-argument class AwsBatchClientHook(AwsBaseHook): @@ -211,9 +203,9 @@ def __init__( self.status_retries = status_retries or self.STATUS_RETRIES @property - def client(self) -> Union[AwsBatchProtocol, botocore.client.BaseClient]: # noqa: D402 + def client(self) -> Union[AwsBatchProtocol, botocore.client.BaseClient]: """ - An AWS API client for batch services, like ``boto3.client('batch')`` + An AWS API client for batch services. :return: a boto3 'batch' client for the ``.region_name`` :rtype: Union[AwsBatchProtocol, botocore.client.BaseClient] diff --git a/airflow/providers/amazon/aws/hooks/glue.py b/airflow/providers/amazon/aws/hooks/glue.py index 8d1cba28bcf50..d24b968dc2876 100644 --- a/airflow/providers/amazon/aws/hooks/glue.py +++ b/airflow/providers/amazon/aws/hooks/glue.py @@ -64,7 +64,7 @@ def __init__( create_job_kwargs: Optional[dict] = None, *args, **kwargs, - ): # pylint: disable=too-many-arguments + ): self.job_name = job_name self.desc = desc self.concurrent_run_limit = concurrent_run_limit diff --git a/airflow/providers/amazon/aws/hooks/glue_crawler.py b/airflow/providers/amazon/aws/hooks/glue_crawler.py index 0ba35199f6535..0b1ef5b224216 100644 --- a/airflow/providers/amazon/aws/hooks/glue_crawler.py +++ b/airflow/providers/amazon/aws/hooks/glue_crawler.py @@ -141,9 +141,7 @@ def wait_for_crawler_completion(self, crawler_name: str, poll_interval: int = 5) self.log.info("crawler_config: %s", crawler) crawler_status = crawler['LastCrawl']['Status'] if crawler_status in failed_status: - raise AirflowException( - f"Status: {crawler_status}" - ) # pylint: disable=raising-format-tuple + raise AirflowException(f"Status: {crawler_status}") else: metrics = self.glue_client.get_crawler_metrics(CrawlerNameList=[crawler_name])[ 'CrawlerMetricsList' diff --git a/airflow/providers/amazon/aws/hooks/redshift.py b/airflow/providers/amazon/aws/hooks/redshift.py index 33e419cd0c0dc..d85834cd3b04d 100644 --- a/airflow/providers/amazon/aws/hooks/redshift.py +++ b/airflow/providers/amazon/aws/hooks/redshift.py @@ -58,7 +58,7 @@ def cluster_status(self, cluster_identifier: str) -> str: except self.get_conn().exceptions.ClusterNotFoundFault: return 'cluster_not_found' - def delete_cluster( # pylint: disable=invalid-name + def delete_cluster( self, cluster_identifier: str, skip_final_cluster_snapshot: bool = True, diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/airflow/providers/amazon/aws/hooks/s3.py index 2e18754d51d53..11bd9f60739c2 100644 --- a/airflow/providers/amazon/aws/hooks/s3.py +++ b/airflow/providers/amazon/aws/hooks/s3.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=invalid-name + """Interact with AWS S3, using the boto3 library.""" import fnmatch import gzip as gz @@ -38,7 +38,7 @@ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.utils.helpers import chunks -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def provide_bucket_name(func: T) -> T: diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/airflow/providers/amazon/aws/hooks/sagemaker.py index 756d888f8418b..dc4de1e50258e 100644 --- a/airflow/providers/amazon/aws/hooks/sagemaker.py +++ b/airflow/providers/amazon/aws/hooks/sagemaker.py @@ -131,7 +131,7 @@ def secondary_training_status_message( return '\n'.join(status_strs) -class SageMakerHook(AwsBaseHook): # pylint: disable=too-many-public-methods +class SageMakerHook(AwsBaseHook): """ Interact with Amazon SageMaker. @@ -853,9 +853,9 @@ def check_training_status_with_log( def list_training_jobs( self, name_contains: Optional[str] = None, max_results: Optional[int] = None, **kwargs - ) -> List[Dict]: # noqa: D402 + ) -> List[Dict]: """ - This method wraps boto3's list_training_jobs(). The training job name and max results are configurable + This method wraps boto3's `list_training_jobs`. The training job name and max results are configurable via arguments. Other arguments are not, and should be provided via kwargs. Note boto3 expects these in CamelCase format, for example: @@ -892,9 +892,9 @@ def list_training_jobs( ) return results - def list_processing_jobs(self, **kwargs) -> List[Dict]: # noqa: D402 + def list_processing_jobs(self, **kwargs) -> List[Dict]: """ - This method wraps boto3's list_processing_jobs(). All arguments should be provided via kwargs. + This method wraps boto3's `list_processing_jobs`. All arguments should be provided via kwargs. Note boto3 expects these in CamelCase format, for example: .. code-block:: python diff --git a/airflow/providers/amazon/aws/hooks/ses.py b/airflow/providers/amazon/aws/hooks/ses.py index 5c7423f48377e..0c7e39f74c6a7 100644 --- a/airflow/providers/amazon/aws/hooks/ses.py +++ b/airflow/providers/amazon/aws/hooks/ses.py @@ -36,7 +36,7 @@ def __init__(self, *args, **kwargs) -> None: kwargs['client_type'] = 'ses' super().__init__(*args, **kwargs) - def send_email( # pylint: disable=too-many-arguments + def send_email( self, mail_from: str, to: Union[str, Iterable[str]], diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py index 8584c1c49606f..0b0a103105312 100644 --- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py @@ -62,7 +62,7 @@ def hook(self): from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook return AwsLogsHook(aws_conn_id=remote_conn_id, region_name=self.region_name) - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.error( 'Could not create an AwsLogsHook with connection id "%s". ' 'Please make sure that apache-airflow[aws] is installed and ' @@ -122,7 +122,7 @@ def get_cloudwatch_logs(self, stream_name: str) -> str: ) return '\n'.join(self._event_to_str(event) for event in events) - except Exception: # pylint: disable=broad-except + except Exception: msg = 'Could not read remote logs from log_group: {} log_stream: {}.'.format( self.log_group, stream_name ) diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py index f357fa37b6111..7a6b3d888a4ae 100644 --- a/airflow/providers/amazon/aws/log/s3_task_handler.py +++ b/airflow/providers/amazon/aws/log/s3_task_handler.py @@ -50,7 +50,7 @@ def hook(self): from airflow.providers.amazon.aws.hooks.s3 import S3Hook return S3Hook(remote_conn_id, transfer_config_args={"use_threads": False}) - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.exception( 'Could not create an S3Hook with connection id "%s". ' 'Please make sure that apache-airflow[aws] is installed and ' @@ -119,7 +119,7 @@ def _read(self, ti, try_number, metadata=None): try: log_exists = self.s3_log_exists(remote_loc) - except Exception as error: # pylint: disable=broad-except + except Exception as error: self.log.exception("Failed to verify remote log exists %s.", remote_loc) log = f'*** Failed to verify remote log exists {remote_loc}.\n{error}\n' @@ -159,7 +159,7 @@ def s3_read(self, remote_log_location: str, return_error: bool = False) -> str: """ try: return self.hook.read_key(remote_log_location) - except Exception as error: # pylint: disable=broad-except + except Exception as error: msg = f'Could not read logs from {remote_log_location} with error: {error}' self.log.exception(msg) # return error if needed @@ -184,7 +184,7 @@ def s3_write(self, log: str, remote_log_location: str, append: bool = True): if append and self.s3_log_exists(remote_log_location): old_log = self.s3_read(remote_log_location) log = '\n'.join([old_log, log]) if old_log else log - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception('Could not verify previous log to append') try: @@ -194,5 +194,5 @@ def s3_write(self, log: str, remote_log_location: str, append: bool = True): replace=True, encrypt=conf.getboolean('logging', 'ENCRYPT_S3_LOGS'), ) - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception('Could not write logs to %s', remote_log_location) diff --git a/airflow/providers/amazon/aws/operators/athena.py b/airflow/providers/amazon/aws/operators/athena.py index 0b7d77d8e4b6b..633b6a3e702a7 100644 --- a/airflow/providers/amazon/aws/operators/athena.py +++ b/airflow/providers/amazon/aws/operators/athena.py @@ -59,7 +59,7 @@ class AWSAthenaOperator(BaseOperator): template_ext = ('.sql',) template_fields_renderers = {"query": "sql"} - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, query: str, @@ -131,7 +131,7 @@ def on_kill(self) -> None: http_status_code = None try: http_status_code = response['ResponseMetadata']['HTTPStatusCode'] - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: self.log.error('Exception while cancelling query: %s', ex) finally: if http_status_code is None or http_status_code != 200: diff --git a/airflow/providers/amazon/aws/operators/batch.py b/airflow/providers/amazon/aws/operators/batch.py index 296b81e41cb81..46c551530b5c4 100644 --- a/airflow/providers/amazon/aws/operators/batch.py +++ b/airflow/providers/amazon/aws/operators/batch.py @@ -118,7 +118,7 @@ def __init__( region_name: Optional[str] = None, tags: Optional[dict] = None, **kwargs, - ): # pylint: disable=too-many-arguments + ): BaseOperator.__init__(self, **kwargs) self.job_id = job_id @@ -150,7 +150,7 @@ def on_kill(self): response = self.hook.client.terminate_job(jobId=self.job_id, reason="Task killed by the user") self.log.info("AWS Batch job (%s) terminated: %s", self.job_id, response) - def submit_job(self, context: Dict): # pylint: disable=unused-argument + def submit_job(self, context: Dict): """ Submit an AWS Batch job @@ -181,7 +181,7 @@ def submit_job(self, context: Dict): # pylint: disable=unused-argument self.log.error("AWS Batch job (%s) failed submission", self.job_id) raise AirflowException(e) - def monitor_job(self, context: Dict): # pylint: disable=unused-argument + def monitor_job(self, context: Dict): """ Monitor an AWS Batch job diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py index 6c88eb1c339cd..750479bc0a0e3 100644 --- a/airflow/providers/amazon/aws/operators/datasync.py +++ b/airflow/providers/amazon/aws/operators/datasync.py @@ -26,7 +26,6 @@ from airflow.providers.amazon.aws.hooks.datasync import AWSDataSyncHook -# pylint: disable=too-many-instance-attributes, too-many-arguments class AWSDataSyncOperator(BaseOperator): r"""Find, Create, Update, Execute and Delete AWS DataSync Tasks. diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/airflow/providers/amazon/aws/operators/ecs.py index 01b130748bfa8..4c8e75b4baf38 100644 --- a/airflow/providers/amazon/aws/operators/ecs.py +++ b/airflow/providers/amazon/aws/operators/ecs.py @@ -54,7 +54,6 @@ class ECSProtocol(Protocol): - https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html """ - # pylint: disable=C0103, line-too-long def run_task(self, **kwargs) -> Dict: """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.run_task""" # noqa: E501 ... @@ -79,10 +78,8 @@ def list_tasks(self, cluster: str, launchType: str, desiredStatus: str, family: """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.list_tasks""" # noqa: E501 ... - # pylint: enable=C0103, line-too-long - -class ECSOperator(BaseOperator): # pylint: disable=too-many-instance-attributes +class ECSOperator(BaseOperator): """ Execute a task on AWS ECS (Elastic Container Service) @@ -149,7 +146,7 @@ def __init__( *, task_definition: str, cluster: str, - overrides: dict, # pylint: disable=too-many-arguments + overrides: dict, aws_conn_id: Optional[str] = None, region_name: Optional[str] = None, launch_type: str = 'EC2', diff --git a/airflow/providers/amazon/aws/operators/glue.py b/airflow/providers/amazon/aws/operators/glue.py index 254457932d4a2..248c96bb2c69f 100644 --- a/airflow/providers/amazon/aws/operators/glue.py +++ b/airflow/providers/amazon/aws/operators/glue.py @@ -75,7 +75,7 @@ def __init__( iam_role_name: Optional[str] = None, create_job_kwargs: Optional[dict] = None, **kwargs, - ): # pylint: disable=too-many-arguments + ): super().__init__(**kwargs) self.job_name = job_name self.job_desc = job_desc diff --git a/airflow/providers/amazon/aws/operators/s3_file_transform.py b/airflow/providers/amazon/aws/operators/s3_file_transform.py index 3ab861f7ec6ce..b6911a737d618 100644 --- a/airflow/providers/amazon/aws/operators/s3_file_transform.py +++ b/airflow/providers/amazon/aws/operators/s3_file_transform.py @@ -96,7 +96,7 @@ def __init__( replace: bool = False, **kwargs, ) -> None: - # pylint: disable=too-many-arguments + super().__init__(**kwargs) self.source_s3_key = source_s3_key self.source_aws_conn_id = source_aws_conn_id diff --git a/airflow/providers/amazon/aws/operators/sagemaker_base.py b/airflow/providers/amazon/aws/operators/sagemaker_base.py index 8c414c1250373..4614f073d8dcf 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker_base.py +++ b/airflow/providers/amazon/aws/operators/sagemaker_base.py @@ -81,8 +81,8 @@ def parse_config_integers(self): for field in self.integer_fields: self.parse_integer(self.config, field) - def expand_role(self): # noqa: D402 - """Placeholder for calling boto3's expand_role(), which expands an IAM role name into an ARN.""" + def expand_role(self): + """Placeholder for calling boto3's `expand_role`, which expands an IAM role name into an ARN.""" def preprocess_config(self): """Process the config into a usable form.""" diff --git a/airflow/providers/amazon/aws/sensors/sagemaker_base.py b/airflow/providers/amazon/aws/sensors/sagemaker_base.py index fe2af29aba49e..8a0956ecf8e08 100644 --- a/airflow/providers/amazon/aws/sensors/sagemaker_base.py +++ b/airflow/providers/amazon/aws/sensors/sagemaker_base.py @@ -76,7 +76,7 @@ def get_sagemaker_response(self) -> Optional[dict]: """Placeholder for checking status of a SageMaker task.""" raise NotImplementedError('Please implement get_sagemaker_response() in subclass') - def get_failed_reason_from_response(self, response: dict) -> str: # pylint: disable=unused-argument + def get_failed_reason_from_response(self, response: dict) -> str: """Placeholder for extracting the reason for failure from an AWS response.""" return 'Unknown' diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py index 7e0e41062cafc..da25e8b1c2e19 100644 --- a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py @@ -88,7 +88,7 @@ class DynamoDBToS3Operator(BaseOperator): :type s3_bucket_name: str :param file_size: Flush file to s3 if file size >= file_size :type file_size: int - :param dynamodb_scan_kwargs: kwargs pass to # noqa: E501 pylint: disable=line-too-long + :param dynamodb_scan_kwargs: kwargs pass to # noqa: E501 :type dynamodb_scan_kwargs: Optional[Dict[str, Any]] :param s3_key_prefix: Prefix of s3 object key :type s3_key_prefix: Optional[str] @@ -147,6 +147,6 @@ def _scan_dynamodb_and_upload_to_s3(self, temp_file: IO, scan_kwargs: dict, tabl if getsize(temp_file.name) >= self.file_size: _upload_file_to_s3(temp_file, self.s3_bucket_name, self.s3_key_prefix) temp_file.close() - # pylint: disable=consider-using-with + temp_file = NamedTemporaryFile() return temp_file diff --git a/airflow/providers/amazon/aws/transfers/exasol_to_s3.py b/airflow/providers/amazon/aws/transfers/exasol_to_s3.py index 678429262a732..e5b53c6b1ff49 100644 --- a/airflow/providers/amazon/aws/transfers/exasol_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/exasol_to_s3.py @@ -60,7 +60,7 @@ class ExasolToS3Operator(BaseOperator): template_ext = ('.sql',) ui_color = '#ededed' - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, query_or_table: str, diff --git a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py index 0684305ba277e..d4159ba2f8cfa 100644 --- a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py @@ -95,7 +95,7 @@ class GCSToS3Operator(BaseOperator): def __init__( self, - *, # pylint: disable=too-many-arguments + *, bucket: str, prefix: Optional[str] = None, delimiter: Optional[str] = None, diff --git a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py index 5f027b58f6586..52023ce26425d 100644 --- a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +++ b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py @@ -59,7 +59,7 @@ class HiveToDynamoDBOperator(BaseOperator): template_ext = ('.sql',) ui_color = '#a0e08c' - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, sql: str, diff --git a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py index b88036d6fb160..95b2bfc12d58e 100644 --- a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py @@ -59,7 +59,6 @@ class MongoToS3Operator(BaseOperator): template_fields = ('s3_bucket', 's3_key', 'mongo_query', 'mongo_collection') ui_color = '#589636' template_fields_renderers = {"mongo_query": "py"} - # pylint: disable=too-many-instance-attributes def __init__( self, diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py index 838d22df4db8b..bbd2c4069ac11 100644 --- a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py @@ -75,7 +75,7 @@ class RedshiftToS3Operator(BaseOperator): template_ext = () ui_color = '#ededed' - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, s3_bucket: str, diff --git a/airflow/providers/apache/beam/hooks/beam.py b/airflow/providers/apache/beam/hooks/beam.py index 1cf7a8ba15a80..4bd27ebc4fdf4 100644 --- a/airflow/providers/apache/beam/hooks/beam.py +++ b/airflow/providers/apache/beam/hooks/beam.py @@ -94,7 +94,7 @@ def __init__( self.log.info("Running command: %s", " ".join(shlex.quote(c) for c in cmd)) self.process_line_callback = process_line_callback self.job_id: Optional[str] = None - # pylint: disable=consider-using-with + self._proc = subprocess.Popen( cmd, shell=False, @@ -185,7 +185,7 @@ def _start_pipeline( ) cmd_runner.wait_for_done() - def start_python_pipeline( # pylint: disable=too-many-arguments + def start_python_pipeline( self, variables: dict, py_file: str, diff --git a/airflow/providers/apache/beam/operators/beam.py b/airflow/providers/apache/beam/operators/beam.py index 7ff87bfa02b1a..da57feae61de9 100644 --- a/airflow/providers/apache/beam/operators/beam.py +++ b/airflow/providers/apache/beam/operators/beam.py @@ -234,9 +234,7 @@ def execute(self, context): with ExitStack() as exit_stack: if self.py_file.lower().startswith("gs://"): gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to) - tmp_gcs_file = exit_stack.enter_context( # pylint: disable=no-member - gcs_hook.provide_file(object_url=self.py_file) - ) + tmp_gcs_file = exit_stack.enter_context(gcs_hook.provide_file(object_url=self.py_file)) self.py_file = tmp_gcs_file.name self.beam_hook.start_python_pipeline( @@ -250,7 +248,7 @@ def execute(self, context): ) if is_dataflow: - self.dataflow_hook.wait_for_done( # pylint: disable=no-value-for-parameter + self.dataflow_hook.wait_for_done( job_name=dataflow_job_name, location=self.dataflow_config.location, job_id=self.dataflow_job_id, @@ -268,7 +266,6 @@ def on_kill(self) -> None: ) -# pylint: disable=too-many-instance-attributes class BeamRunJavaPipelineOperator(BaseOperator, BeamDataflowMixin): """ Launching Apache Beam pipelines written in Java. @@ -391,9 +388,7 @@ def execute(self, context): with ExitStack() as exit_stack: if self.jar.lower().startswith("gs://"): gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to) - tmp_gcs_file = exit_stack.enter_context( # pylint: disable=no-member - gcs_hook.provide_file(object_url=self.jar) - ) + tmp_gcs_file = exit_stack.enter_context(gcs_hook.provide_file(object_url=self.jar)) self.jar = tmp_gcs_file.name if is_dataflow: @@ -405,7 +400,7 @@ def execute(self, context): # This method is wrapped by @_fallback_to_project_id_from_variables decorator which # fallback project_id value from variables and raise error if project_id is # defined both in variables and as parameter (here is already defined in variables) - self.dataflow_hook.is_job_dataflow_running( # pylint: disable=no-value-for-parameter + self.dataflow_hook.is_job_dataflow_running( name=self.dataflow_config.job_name, variables=pipeline_options, ) @@ -416,7 +411,7 @@ def execute(self, context): # This method is wrapped by @_fallback_to_project_id_from_variables decorator which # fallback project_id value from variables and raise error if project_id is # defined both in variables and as parameter (here is already defined in variables) - # pylint: disable=no-value-for-parameter + is_running = self.dataflow_hook.is_job_dataflow_running( name=self.dataflow_config.job_name, variables=pipeline_options, diff --git a/airflow/providers/apache/cassandra/hooks/cassandra.py b/airflow/providers/apache/cassandra/hooks/cassandra.py index 26265d3e0c067..461c857db40be 100644 --- a/airflow/providers/apache/cassandra/hooks/cassandra.py +++ b/airflow/providers/apache/cassandra/hooks/cassandra.py @@ -210,5 +210,5 @@ def record_exists(self, table: str, keys: Dict[str, str]) -> bool: try: result = self.get_conn().execute(query, keys) return result.one() is not None - except Exception: # pylint: disable=broad-except + except Exception: return False diff --git a/airflow/providers/apache/druid/transfers/hive_to_druid.py b/airflow/providers/apache/druid/transfers/hive_to_druid.py index 7c77ae582a417..dcd4363d78881 100644 --- a/airflow/providers/apache/druid/transfers/hive_to_druid.py +++ b/airflow/providers/apache/druid/transfers/hive_to_druid.py @@ -79,7 +79,7 @@ class HiveToDruidOperator(BaseOperator): template_fields = ('sql', 'intervals') template_ext = ('.sql',) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, sql: str, diff --git a/airflow/providers/apache/hdfs/hooks/hdfs.py b/airflow/providers/apache/hdfs/hooks/hdfs.py index 5cd352370ce5e..034490d3db000 100644 --- a/airflow/providers/apache/hdfs/hooks/hdfs.py +++ b/airflow/providers/apache/hdfs/hooks/hdfs.py @@ -23,7 +23,7 @@ from airflow.hooks.base import BaseHook try: - from snakebite.client import AutoConfigClient, Client, HAClient, Namenode # pylint: disable=syntax-error + from snakebite.client import AutoConfigClient, Client, HAClient, Namenode snakebite_loaded = True except ImportError: diff --git a/airflow/providers/apache/hdfs/hooks/webhdfs.py b/airflow/providers/apache/hdfs/hooks/webhdfs.py index 81750f8dbc606..fce445f949382 100644 --- a/airflow/providers/apache/hdfs/hooks/webhdfs.py +++ b/airflow/providers/apache/hdfs/hooks/webhdfs.py @@ -32,7 +32,7 @@ _kerberos_security_mode = conf.get("core", "security") == "kerberos" if _kerberos_security_mode: try: - from hdfs.ext.kerberos import KerberosClient # pylint: disable=ungrouped-imports + from hdfs.ext.kerberos import KerberosClient except ImportError: log.error("Could not load the Kerberos extension for the WebHDFSHook.") raise diff --git a/airflow/providers/apache/hdfs/sensors/hdfs.py b/airflow/providers/apache/hdfs/sensors/hdfs.py index fb296eb2fd614..a37877683ceca 100644 --- a/airflow/providers/apache/hdfs/sensors/hdfs.py +++ b/airflow/providers/apache/hdfs/sensors/hdfs.py @@ -130,7 +130,7 @@ def poke(self, context: Dict[Any, Any]) -> bool: result = self.filter_for_ignored_ext(result, self.ignored_ext, self.ignore_copying) result = self.filter_for_filesize(result, self.file_size) return bool(result) - except Exception: # pylint: disable=broad-except + except Exception: e = sys.exc_info() self.log.debug("Caught an exception !: %s", str(e)) return False diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py index e2f3030d53bfb..bcb31f97349d5 100644 --- a/airflow/providers/apache/hive/hooks/hive.py +++ b/airflow/providers/apache/hive/hooks/hive.py @@ -777,7 +777,7 @@ def table_exists(self, table_name: str, db: str = 'default') -> bool: try: self.get_table(table_name, db) return True - except Exception: # pylint: disable=broad-except + except Exception: return False def drop_partitions(self, table_name, part_vals, delete_data=False, db='default'): @@ -838,7 +838,7 @@ def get_conn(self, schema: Optional[str] = None) -> Any: """Returns a Hive connection object.""" username: Optional[str] = None password: Optional[str] = None - # pylint: disable=no-member + db = self.get_connection(self.hiveserver2_conn_id) # type: ignore auth_mechanism = db.extra_dejson.get('authMechanism', 'NONE') @@ -874,8 +874,6 @@ def get_conn(self, schema: Optional[str] = None) -> Any: database=schema or db.schema or 'default', ) - # pylint: enable=no-member - def _get_results( self, hql: Union[str, str, List[str]], @@ -893,9 +891,9 @@ def _get_results( cur.arraysize = fetch_size or 1000 # not all query services (e.g. impala AIRFLOW-4434) support the set command - # pylint: disable=no-member + db = self.get_connection(self.hiveserver2_conn_id) # type: ignore - # pylint: enable=no-member + if db.extra_dejson.get('run_set_variable_statements', True): env_context = get_context_from_env_var() if hive_conf: diff --git a/airflow/providers/apache/hive/operators/hive.py b/airflow/providers/apache/hive/operators/hive.py index a0d8a663daca5..7017ea1929938 100644 --- a/airflow/providers/apache/hive/operators/hive.py +++ b/airflow/providers/apache/hive/operators/hive.py @@ -77,7 +77,6 @@ class HiveOperator(BaseOperator): ) ui_color = '#f0e4ec' - # pylint: disable=too-many-arguments def __init__( self, *, diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py index 1c404edecd0b7..01c8b19fc936b 100644 --- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-member + """This module contains operator to move data from MSSQL to Hive.""" from collections import OrderedDict @@ -100,7 +100,6 @@ def __init__( self.tblproperties = tblproperties @classmethod - # pylint: disable=c-extension-no-member,no-member def type_map(cls, mssql_type: int) -> str: """Maps MsSQL type to Hive type.""" map_dict = { diff --git a/airflow/providers/apache/hive/transfers/mysql_to_hive.py b/airflow/providers/apache/hive/transfers/mysql_to_hive.py index 9c828aa32f94b..381ea2e2161d1 100644 --- a/airflow/providers/apache/hive/transfers/mysql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mysql_to_hive.py @@ -81,7 +81,7 @@ class MySqlToHiveOperator(BaseOperator): template_ext = ('.sql',) ui_color = '#a0e08c' - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, sql: str, diff --git a/airflow/providers/apache/hive/transfers/s3_to_hive.py b/airflow/providers/apache/hive/transfers/s3_to_hive.py index 4e833d73f5c5b..04b0fb5539585 100644 --- a/airflow/providers/apache/hive/transfers/s3_to_hive.py +++ b/airflow/providers/apache/hive/transfers/s3_to_hive.py @@ -32,7 +32,7 @@ from airflow.utils.compression import uncompress_file -class S3ToHiveOperator(BaseOperator): # pylint: disable=too-many-instance-attributes +class S3ToHiveOperator(BaseOperator): """ Moves data from S3 to Hive. The operator downloads a file from S3, stores the file locally before loading it into a Hive table. @@ -103,7 +103,7 @@ class S3ToHiveOperator(BaseOperator): # pylint: disable=too-many-instance-attri template_ext = () ui_color = '#a0e08c' - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, s3_key: str, diff --git a/airflow/providers/apache/kylin/operators/kylin_cube.py b/airflow/providers/apache/kylin/operators/kylin_cube.py index f5730d8a09c5b..76b57089b62a5 100644 --- a/airflow/providers/apache/kylin/operators/kylin_cube.py +++ b/airflow/providers/apache/kylin/operators/kylin_cube.py @@ -108,7 +108,6 @@ class KylinCubeOperator(BaseOperator): } jobs_end_status = {"FINISHED", "ERROR", "DISCARDED", "KILLED", "SUICIDAL", "STOPPED"} - # pylint: disable=too-many-arguments,inconsistent-return-statements def __init__( self, *, diff --git a/airflow/providers/apache/livy/hooks/livy.py b/airflow/providers/apache/livy/hooks/livy.py index 75d08af56883f..2aab543450569 100644 --- a/airflow/providers/apache/livy/hooks/livy.py +++ b/airflow/providers/apache/livy/hooks/livy.py @@ -281,7 +281,6 @@ def build_post_batch_body( Build the post batch request body. For more information about the format refer to .. seealso:: https://livy.apache.org/docs/latest/rest-api.html - :param file: Path of the file containing the application to execute (required). :type file: str :param proxy_user: User to impersonate when running the job. @@ -317,8 +316,6 @@ def build_post_batch_body( :return: request body :rtype: dict """ - # pylint: disable-msg=too-many-arguments - body: Dict[str, Any] = {'file': file} if proxy_user: diff --git a/airflow/providers/apache/livy/operators/livy.py b/airflow/providers/apache/livy/operators/livy.py index 6b06754f2b3d3..9936cf7f02ebd 100644 --- a/airflow/providers/apache/livy/operators/livy.py +++ b/airflow/providers/apache/livy/operators/livy.py @@ -95,7 +95,6 @@ def __init__( extra_options: Optional[Dict[str, Any]] = None, **kwargs: Any, ) -> None: - # pylint: disable-msg=too-many-arguments super().__init__(**kwargs) diff --git a/airflow/providers/apache/pinot/hooks/pinot.py b/airflow/providers/apache/pinot/hooks/pinot.py index b20d0262446f0..f48fd1b68f248 100644 --- a/airflow/providers/apache/pinot/hooks/pinot.py +++ b/airflow/providers/apache/pinot/hooks/pinot.py @@ -107,7 +107,6 @@ def add_table(self, file_path: str, with_exec: bool = True) -> Any: cmd += ["-exec"] self.run_cli(cmd) - # pylint: disable=too-many-arguments def create_segment( self, generator_config_file: Optional[str] = None, @@ -262,9 +261,8 @@ class PinotDbApiHook(DbApiHook): def get_conn(self) -> Any: """Establish a connection to pinot broker through pinot dbapi.""" - # pylint: disable=no-member conn = self.get_connection(self.pinot_broker_conn_id) # type: ignore - # pylint: enable=no-member + pinot_broker_conn = connect( host=conn.host, port=conn.port, diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc.py b/airflow/providers/apache/spark/hooks/spark_jdbc.py index 9638f8c40c9a1..d5dfcdcddb8b8 100644 --- a/airflow/providers/apache/spark/hooks/spark_jdbc.py +++ b/airflow/providers/apache/spark/hooks/spark_jdbc.py @@ -23,7 +23,6 @@ from airflow.providers.apache.spark.hooks.spark_submit import SparkSubmitHook -# pylint: disable=too-many-instance-attributes class SparkJDBCHook(SparkSubmitHook): """ This hook extends the SparkSubmitHook specifically for performing data @@ -118,7 +117,6 @@ class SparkJDBCHook(SparkSubmitHook): conn_type = 'spark_jdbc' hook_name = 'Spark JDBC' - # pylint: disable=too-many-arguments,too-many-locals def __init__( self, spark_app_name: str = 'airflow-spark-jdbc', diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc_script.py b/airflow/providers/apache/spark/hooks/spark_jdbc_script.py index 0a0a720752fe1..c354de6ab0ca4 100644 --- a/airflow/providers/apache/spark/hooks/spark_jdbc_script.py +++ b/airflow/providers/apache/spark/hooks/spark_jdbc_script.py @@ -54,7 +54,6 @@ def set_common_options( return spark_source -# pylint: disable=too-many-arguments def spark_write_to_jdbc( spark_session: SparkSession, url: str, @@ -87,7 +86,6 @@ def spark_write_to_jdbc( writer.save(mode=save_mode) -# pylint: disable=too-many-arguments def spark_read_from_jdbc( spark_session: SparkSession, url: str, diff --git a/airflow/providers/apache/spark/hooks/spark_sql.py b/airflow/providers/apache/spark/hooks/spark_sql.py index b690f2cf78637..945de8f24eabf 100644 --- a/airflow/providers/apache/spark/hooks/spark_sql.py +++ b/airflow/providers/apache/spark/hooks/spark_sql.py @@ -61,7 +61,6 @@ class SparkSqlHook(BaseHook): conn_type = 'spark_sql' hook_name = 'Spark SQL' - # pylint: disable=too-many-arguments def __init__( self, sql: str, @@ -158,7 +157,7 @@ def run_query(self, cmd: str = "", **kwargs: Any) -> None: :type kwargs: dict """ spark_sql_cmd = self._prepare_command(cmd) - # pylint: disable=consider-using-with + self._sp = subprocess.Popen(spark_sql_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) for line in iter(self._sp.stdout): # type: ignore diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py index d27fa640e751f..821604b92efb2 100644 --- a/airflow/providers/apache/spark/hooks/spark_submit.py +++ b/airflow/providers/apache/spark/hooks/spark_submit.py @@ -34,7 +34,6 @@ pass -# pylint: disable=too-many-instance-attributes class SparkSubmitHook(BaseHook, LoggingMixin): """ This hook is a wrapper around the spark-submit binary to kick off a spark-submit job. @@ -118,7 +117,6 @@ def get_ui_field_behaviour() -> Dict: "relabeling": {}, } - # pylint: disable=too-many-arguments,too-many-locals,too-many-branches def __init__( self, conf: Optional[Dict[str, Any]] = None, @@ -427,7 +425,6 @@ def submit(self, application: str = "", **kwargs: Any) -> None: env.update(self._env) kwargs["env"] = env - # pylint: disable=consider-using-with self._submit_sp = subprocess.Popen( spark_submit_cmd, stdout=subprocess.PIPE, diff --git a/airflow/providers/apache/spark/operators/spark_jdbc.py b/airflow/providers/apache/spark/operators/spark_jdbc.py index b15ffca1429f5..280d428e25a54 100644 --- a/airflow/providers/apache/spark/operators/spark_jdbc.py +++ b/airflow/providers/apache/spark/operators/spark_jdbc.py @@ -22,7 +22,6 @@ from airflow.providers.apache.spark.operators.spark_submit import SparkSubmitOperator -# pylint: disable=too-many-instance-attributes class SparkJDBCOperator(SparkSubmitOperator): """ This operator extends the SparkSubmitOperator specifically for performing data @@ -118,7 +117,6 @@ class SparkJDBCOperator(SparkSubmitOperator): types. """ - # pylint: disable=too-many-arguments,too-many-locals def __init__( self, *, diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/airflow/providers/apache/spark/operators/spark_sql.py index 6c52fa2d58c03..536d830b70cb9 100644 --- a/airflow/providers/apache/spark/operators/spark_sql.py +++ b/airflow/providers/apache/spark/operators/spark_sql.py @@ -61,7 +61,6 @@ class SparkSqlOperator(BaseOperator): template_fields = ["_sql"] template_ext = [".sql", ".hql"] - # pylint: disable=too-many-arguments def __init__( self, *, diff --git a/airflow/providers/apache/spark/operators/spark_submit.py b/airflow/providers/apache/spark/operators/spark_submit.py index 090f1046e0d4d..f1e933cb8f6e3 100644 --- a/airflow/providers/apache/spark/operators/spark_submit.py +++ b/airflow/providers/apache/spark/operators/spark_submit.py @@ -23,7 +23,6 @@ from airflow.settings import WEB_COLORS -# pylint: disable=too-many-instance-attributes class SparkSubmitOperator(BaseOperator): """ This hook is a wrapper around the spark-submit binary to kick off a spark-submit job. @@ -113,7 +112,6 @@ class SparkSubmitOperator(BaseOperator): ) ui_color = WEB_COLORS['LIGHTORANGE'] - # pylint: disable=too-many-arguments,too-many-locals def __init__( self, *, diff --git a/airflow/providers/apache/sqoop/hooks/sqoop.py b/airflow/providers/apache/sqoop/hooks/sqoop.py index ed5378af20a07..21684a5a52c54 100644 --- a/airflow/providers/apache/sqoop/hooks/sqoop.py +++ b/airflow/providers/apache/sqoop/hooks/sqoop.py @@ -206,7 +206,6 @@ def _import_cmd( return cmd - # pylint: disable=too-many-arguments def import_table( self, table: str, @@ -280,7 +279,6 @@ def import_query( self.popen(cmd) - # pylint: disable=too-many-arguments def _export_cmd( self, table: str, @@ -348,7 +346,6 @@ def _export_cmd( return cmd - # pylint: disable=too-many-arguments def export_table( self, table: str, diff --git a/airflow/providers/apache/sqoop/operators/sqoop.py b/airflow/providers/apache/sqoop/operators/sqoop.py index a790e4995cdd0..242ed25a2fa55 100644 --- a/airflow/providers/apache/sqoop/operators/sqoop.py +++ b/airflow/providers/apache/sqoop/operators/sqoop.py @@ -26,7 +26,6 @@ from airflow.providers.apache.sqoop.hooks.sqoop import SqoopHook -# pylint: disable=too-many-instance-attributes class SqoopOperator(BaseOperator): """ Execute a Sqoop job. @@ -108,7 +107,6 @@ class SqoopOperator(BaseOperator): ) ui_color = '#7D8CA4' - # pylint: disable=too-many-arguments,too-many-locals def __init__( self, *, diff --git a/airflow/providers/asana/hooks/asana.py b/airflow/providers/asana/hooks/asana.py index ca8c4c3d3f16b..b1623f8a42272 100644 --- a/airflow/providers/asana/hooks/asana.py +++ b/airflow/providers/asana/hooks/asana.py @@ -91,7 +91,7 @@ def create_task(self, task_name: str, params: dict) -> dict: """ merged_params = self._merge_create_task_parameters(task_name, params) self._validate_create_task_parameters(merged_params) - response = self.client.tasks.create(params=merged_params) # pylint: disable=no-member + response = self.client.tasks.create(params=merged_params) return response def _merge_create_task_parameters(self, task_name: str, task_params: dict) -> dict: @@ -134,7 +134,7 @@ def delete_task(self, task_id: str) -> dict: :return: A dict containing the response from Asana """ try: - response = self.client.tasks.delete_task(task_id) # pylint: disable=no-member + response = self.client.tasks.delete_task(task_id) return response except NotFoundError: self.log.info("Asana task %s not found for deletion.", task_id) @@ -150,7 +150,7 @@ def find_task(self, params: dict) -> list: """ merged_params = self._merge_find_task_parameters(params) self._validate_find_task_parameters(merged_params) - response = self.client.tasks.find_all(params=merged_params) # pylint: disable=no-member + response = self.client.tasks.find_all(params=merged_params) return list(response) def _merge_find_task_parameters(self, search_parameters: dict) -> dict: @@ -198,7 +198,7 @@ def update_task(self, task_id: str, params: dict) -> dict: https://developers.asana.com/docs/update-a-task :return: A dict containing the updated task's attributes """ - response = self.client.tasks.update(task_id, params) # pylint: disable=no-member + response = self.client.tasks.update(task_id, params) return response def create_project(self, params: dict) -> dict: @@ -212,7 +212,7 @@ def create_project(self, params: dict) -> dict: """ merged_params = self._merge_project_parameters(params) self._validate_create_project_parameters(merged_params) - response = self.client.projects.create(merged_params) # pylint: disable=no-member + response = self.client.projects.create(merged_params) return response @staticmethod @@ -251,7 +251,7 @@ def find_project(self, params: dict) -> list: :return: A list of dicts containing attributes of matching Asana projects """ merged_params = self._merge_project_parameters(params) - response = self.client.projects.find_all(merged_params) # pylint: disable=no-member + response = self.client.projects.find_all(merged_params) return list(response) def update_project(self, project_id: str, params: dict) -> dict: @@ -264,7 +264,7 @@ def update_project(self, project_id: str, params: dict) -> dict: for a list of possible parameters :return: A dict containing the updated project's attributes """ - response = self.client.projects.update(project_id, params) # pylint: disable=no-member + response = self.client.projects.update(project_id, params) return response def delete_project(self, project_id: str) -> dict: @@ -275,7 +275,7 @@ def delete_project(self, project_id: str) -> dict: :return: A dict containing the response from Asana """ try: - response = self.client.projects.delete(project_id) # pylint: disable=no-member + response = self.client.projects.delete(project_id) return response except NotFoundError: self.log.info("Asana project %s not found for deletion.", project_id) diff --git a/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py b/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py index dae4c8a5077f3..7c28105fe6564 100644 --- a/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py +++ b/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py @@ -42,7 +42,7 @@ def _convert_from_dict(obj, new_class): return obj elif isinstance(obj, dict): api_client = ApiClient() - return api_client._ApiClient__deserialize_model(obj, new_class) # pylint: disable=W0212 + return api_client._ApiClient__deserialize_model(obj, new_class) else: raise AirflowException(f"Expected dict or {new_class}, got {type(obj)}") diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py index 32cc9c92eef5a..af1828d479841 100644 --- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py @@ -53,7 +53,7 @@ import jinja2 -class KubernetesPodOperator(BaseOperator): # pylint: disable=too-many-instance-attributes +class KubernetesPodOperator(BaseOperator): """ Execute a task in a Kubernetes Pod @@ -173,7 +173,7 @@ class KubernetesPodOperator(BaseOperator): # pylint: disable=too-many-instance- ) # fmt: off - def __init__( # pylint: disable=too-many-arguments,too-many-locals + def __init__( # fmt: on self, *, diff --git a/airflow/providers/databricks/hooks/databricks.py b/airflow/providers/databricks/hooks/databricks.py index 350e5b4c015d6..f83191ec834e8 100644 --- a/airflow/providers/databricks/hooks/databricks.py +++ b/airflow/providers/databricks/hooks/databricks.py @@ -86,7 +86,7 @@ def __repr__(self) -> str: return str(self.__dict__) -class DatabricksHook(BaseHook): # noqa +class DatabricksHook(BaseHook): """ Interact with Databricks. diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py index ed37f753386ea..8956b43be1de7 100644 --- a/airflow/providers/databricks/operators/databricks.py +++ b/airflow/providers/databricks/operators/databricks.py @@ -247,7 +247,6 @@ class DatabricksSubmitRunOperator(BaseOperator): ui_color = '#1CB1C2' ui_fgcolor = '#fff' - # pylint: disable=too-many-arguments def __init__( self, *, @@ -457,7 +456,6 @@ class DatabricksRunNowOperator(BaseOperator): ui_color = '#1CB1C2' ui_fgcolor = '#fff' - # pylint: disable=too-many-arguments def __init__( self, *, diff --git a/airflow/providers/datadog/hooks/datadog.py b/airflow/providers/datadog/hooks/datadog.py index 2b993b09d5b0e..538a7757832b2 100644 --- a/airflow/providers/datadog/hooks/datadog.py +++ b/airflow/providers/datadog/hooks/datadog.py @@ -110,7 +110,6 @@ def query_metric(self, query: str, from_seconds_ago: int, to_seconds_ago: int) - self.validate_response(response) return response - # pylint: disable=too-many-arguments def post_event( self, title: str, diff --git a/airflow/providers/docker/example_dags/example_docker_copy_data.py b/airflow/providers/docker/example_dags/example_docker_copy_data.py index 7adb5ca840850..9dc8478041b1b 100644 --- a/airflow/providers/docker/example_dags/example_docker_copy_data.py +++ b/airflow/providers/docker/example_dags/example_docker_copy_data.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=missing-function-docstring + """ This sample "listen to directory". move the new file and print it, using docker-containers. diff --git a/airflow/providers/docker/operators/docker.py b/airflow/providers/docker/operators/docker.py index 29a6f79298b8d..7b5d8c664d840 100644 --- a/airflow/providers/docker/operators/docker.py +++ b/airflow/providers/docker/operators/docker.py @@ -28,7 +28,6 @@ from airflow.providers.docker.hooks.docker import DockerHook -# pylint: disable=too-many-instance-attributes class DockerOperator(BaseOperator): """ Execute a command inside a docker container. @@ -135,7 +134,6 @@ class DockerOperator(BaseOperator): '.bash', ) - # pylint: disable=too-many-arguments,too-many-locals def __init__( self, *, @@ -296,7 +294,7 @@ def execute(self, context) -> Optional[str]: raise Exception("The 'cli' should be initialized before!") # Pull the docker image if `force_pull` is set or image does not exist locally - # pylint: disable=too-many-nested-blocks + if self.force_pull or not self.cli.images(name=self.image): self.log.info('Pulling docker image %s', self.image) latest_status = {} @@ -354,7 +352,7 @@ def __get_tls_config(self) -> Optional[tls.TLSConfig]: ca_cert=self.tls_ca_cert, client_cert=(self.tls_client_cert, self.tls_client_key), verify=True, - ssl_version=self.tls_ssl_version, # noqa + ssl_version=self.tls_ssl_version, assert_hostname=self.tls_hostname, ) self.docker_url = self.docker_url.replace('tcp://', 'https://') diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/airflow/providers/elasticsearch/log/es_task_handler.py index ae08ecdb6a7e6..e20038c49d3bb 100644 --- a/airflow/providers/elasticsearch/log/es_task_handler.py +++ b/airflow/providers/elasticsearch/log/es_task_handler.py @@ -63,7 +63,7 @@ class ElasticsearchTaskHandler(FileTaskHandler, LoggingMixin): MAX_LINE_PER_PAGE = 1000 LOG_NAME = 'Elasticsearch' - def __init__( # pylint: disable=too-many-arguments + def __init__( self, base_log_folder: str, filename_template: str, @@ -210,9 +210,9 @@ def _format_msg(self, log_line): # if we change the formatter style from '%' to '{' or '$', this will still work if self.json_format: try: - # pylint: disable=protected-access + return self.formatter._style.format(_ESJsonLogFmt(self.json_fields, **log_line.to_dict())) - except Exception: # noqa pylint: disable=broad-except + except Exception: pass # Just a safe-guard to preserve backwards-compatibility @@ -243,7 +243,7 @@ def es_read(self, log_id: str, offset: str, metadata: dict) -> list: ) else: metadata['max_offset'] = 0 - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception('Could not get current log size with log_id: %s', log_id) logs = [] @@ -251,7 +251,7 @@ def es_read(self, log_id: str, offset: str, metadata: dict) -> list: try: logs = search[self.MAX_LINE_PER_PAGE * self.PAGE : self.MAX_LINE_PER_PAGE].execute() - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception('Could not read log with log_id: %s', log_id) return logs @@ -266,7 +266,7 @@ def set_context(self, ti: TaskInstance) -> None: if self.json_format: self.formatter = JSONFormatter( - fmt=self.formatter._fmt, # pylint: disable=protected-access + fmt=self.formatter._fmt, json_fields=self.json_fields, extras={ 'dag_id': str(ti.dag_id), @@ -311,7 +311,7 @@ def close(self) -> None: # Reopen the file stream, because FileHandler.close() would be called # first in logging.shutdown() and the stream in it would be set to None. if self.handler.stream is None or self.handler.stream.closed: - self.handler.stream = self.handler._open() # pylint: disable=protected-access + self.handler.stream = self.handler._open() # Mark the end of file using end of log mark, # so we know where to stop while auto-tailing. diff --git a/airflow/providers/ftp/hooks/ftp.py b/airflow/providers/ftp/hooks/ftp.py index a03e461a6169a..2db2a335c797d 100644 --- a/airflow/providers/ftp/hooks/ftp.py +++ b/airflow/providers/ftp/hooks/ftp.py @@ -174,7 +174,7 @@ def write_to_file_with_progress(data): # file-like buffer if not callback: if is_path: - # pylint: disable=consider-using-with + output_handle = open(local_full_path_or_buffer, 'wb') else: output_handle = local_full_path_or_buffer @@ -210,7 +210,7 @@ def store_file(self, remote_full_path: str, local_full_path_or_buffer: Any) -> N is_path = isinstance(local_full_path_or_buffer, str) if is_path: - # pylint: disable=consider-using-with + input_handle = open(local_full_path_or_buffer, 'rb') else: input_handle = local_full_path_or_buffer diff --git a/airflow/providers/google/cloud/example_dags/example_automl_tables.py b/airflow/providers/google/cloud/example_dags/example_automl_tables.py index 4f3555282f719..39f479b6a774b 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_tables.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_tables.py @@ -176,14 +176,14 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str: ) ( - create_dataset_task # noqa - >> import_dataset_task # noqa - >> list_tables_spec_task # noqa - >> list_columns_spec_task # noqa - >> update_dataset_task # noqa - >> create_model_task # noqa - >> delete_model_task # noqa - >> delete_datasets_task # noqa + create_dataset_task + >> import_dataset_task + >> list_tables_spec_task + >> list_columns_spec_task + >> update_dataset_task + >> create_model_task + >> delete_model_task + >> delete_datasets_task ) @@ -243,12 +243,12 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str: # [END howto_operator_delete_dataset] ( - create_dataset_task # noqa - >> import_dataset_task # noqa - >> list_tables_spec_task # noqa - >> list_columns_spec_task # noqa - >> list_datasets_task # noqa - >> delete_datasets_task # noqa + create_dataset_task + >> import_dataset_task + >> list_tables_spec_task + >> list_columns_spec_task + >> list_datasets_task + >> delete_datasets_task ) with models.DAG( diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py index b0e2b86a9f141..965f3f918e9b8 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py @@ -105,8 +105,8 @@ # [END howto_bigquery_delete_data_transfer] ( - gcp_bigquery_create_transfer # noqa - >> gcp_bigquery_start_transfer # noqa - >> gcp_run_sensor # noqa - >> gcp_bigquery_delete_transfer # noqa + gcp_bigquery_create_transfer + >> gcp_bigquery_start_transfer + >> gcp_run_sensor + >> gcp_bigquery_delete_transfer ) diff --git a/airflow/providers/google/cloud/example_dags/example_bigtable.py b/airflow/providers/google/cloud/example_dags/example_bigtable.py index fc62cdf5b5c82..2f3cfd0329655 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigtable.py +++ b/airflow/providers/google/cloud/example_dags/example_bigtable.py @@ -30,7 +30,7 @@ * CBT_INSTANCE_ID - desired ID of a Cloud Bigtable instance * CBT_INSTANCE_DISPLAY_NAME - desired human-readable display name of the Instance * CBT_INSTANCE_TYPE - type of the Instance, e.g. 1 for DEVELOPMENT - See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance # noqa E501 # pylint: disable=line-too-long + See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance # noqa E501 * CBT_INSTANCE_LABELS - labels to add for the Instance * CBT_CLUSTER_ID - desired ID of the main Cluster created for the Instance * CBT_CLUSTER_ZONE - zone in which main Cluster will be created. e.g. europe-west1-b @@ -38,7 +38,7 @@ * CBT_CLUSTER_NODES - initial amount of nodes of the Cluster * CBT_CLUSTER_NODES_UPDATED - amount of nodes for BigtableClusterUpdateOperator * CBT_CLUSTER_STORAGE_TYPE - storage for the Cluster, e.g. 1 for SSD - See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance.cluster # noqa E501 # pylint: disable=line-too-long + See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance.cluster # noqa E501 * CBT_TABLE_ID - desired ID of the Table * CBT_POKE_INTERVAL - number of seconds between every attempt of Sensor check diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_build.py b/airflow/providers/google/cloud/example_dags/example_cloud_build.py index cfaf364eb4323..1591fe0a64621 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_build.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_build.py @@ -110,6 +110,6 @@ params={'name': 'Airflow'}, ) # [END howto_operator_gcp_create_build_from_yaml_body] - create_build_from_storage >> create_build_from_storage_result # pylint: disable=pointless-statement + create_build_from_storage >> create_build_from_storage_result - create_build_from_repo >> create_build_from_repo_result # pylint: disable=pointless-statement + create_build_from_repo >> create_build_from_repo_result diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_sql.py b/airflow/providers/google/cloud/example_dags/example_cloud_sql.py index 286ecf38f0799..d1d9fd45022e3 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_sql.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_sql.py @@ -309,26 +309,26 @@ ) ( - sql_instance_create_task # noqa - >> sql_instance_create_2_task # noqa - >> sql_instance_read_replica_create # noqa - >> sql_instance_patch_task # noqa - >> sql_instance_patch_task2 # noqa - >> sql_db_create_task # noqa - >> sql_db_create_task2 # noqa - >> sql_db_patch_task # noqa - >> sql_db_patch_task2 # noqa - >> sql_gcp_add_bucket_permission_task # noqa - >> sql_export_task # noqa - >> sql_export_task2 # noqa - >> sql_gcp_add_object_permission_task # noqa - >> sql_gcp_add_bucket_permission_2_task # noqa - >> sql_import_task # noqa - >> sql_import_task2 # noqa - >> sql_db_delete_task # noqa - >> sql_db_delete_task2 # noqa - >> sql_instance_failover_replica_delete_task # noqa - >> sql_instance_read_replica_delete_task # noqa - >> sql_instance_delete_task # noqa - >> sql_instance_delete_2_task # noqa + sql_instance_create_task + >> sql_instance_create_2_task + >> sql_instance_read_replica_create + >> sql_instance_patch_task + >> sql_instance_patch_task2 + >> sql_db_create_task + >> sql_db_create_task2 + >> sql_db_patch_task + >> sql_db_patch_task2 + >> sql_gcp_add_bucket_permission_task + >> sql_export_task + >> sql_export_task2 + >> sql_gcp_add_object_permission_task + >> sql_gcp_add_bucket_permission_2_task + >> sql_import_task + >> sql_import_task2 + >> sql_db_delete_task + >> sql_db_delete_task2 + >> sql_instance_failover_replica_delete_task + >> sql_instance_read_replica_delete_task + >> sql_instance_delete_task + >> sql_instance_delete_2_task ) diff --git a/airflow/providers/google/cloud/example_dags/example_tasks.py b/airflow/providers/google/cloud/example_dags/example_tasks.py index 82335de613251..dc2759fd2c2dc 100644 --- a/airflow/providers/google/cloud/example_dags/example_tasks.py +++ b/airflow/providers/google/cloud/example_dags/example_tasks.py @@ -49,7 +49,7 @@ from airflow.utils.dates import days_ago timestamp = timestamp_pb2.Timestamp() -timestamp.FromDatetime(datetime.now() + timedelta(hours=12)) # pylint: disable=no-member +timestamp.FromDatetime(datetime.now() + timedelta(hours=12)) LOCATION = "europe-west1" QUEUE_ID = os.environ.get('GCP_TASKS_QUEUE_ID', "cloud-tasks-queue") diff --git a/airflow/providers/google/cloud/example_dags/example_vision.py b/airflow/providers/google/cloud/example_dags/example_vision.py index 516a70dfaf53d..9e38a314dc5c5 100644 --- a/airflow/providers/google/cloud/example_dags/example_vision.py +++ b/airflow/providers/google/cloud/example_dags/example_vision.py @@ -57,23 +57,23 @@ from airflow.utils.dates import days_ago # [START howto_operator_vision_retry_import] -from google.api_core.retry import Retry # isort:skip pylint: disable=wrong-import-order +from google.api_core.retry import Retry # isort:skip # [END howto_operator_vision_retry_import] # [START howto_operator_vision_product_set_import] -from google.cloud.vision_v1.types import ProductSet # isort:skip pylint: disable=wrong-import-order +from google.cloud.vision_v1.types import ProductSet # isort:skip # [END howto_operator_vision_product_set_import] # [START howto_operator_vision_product_import] -from google.cloud.vision_v1.types import Product # isort:skip pylint: disable=wrong-import-order +from google.cloud.vision_v1.types import Product # isort:skip # [END howto_operator_vision_product_import] # [START howto_operator_vision_reference_image_import] -from google.cloud.vision_v1.types import ReferenceImage # isort:skip pylint: disable=wrong-import-order +from google.cloud.vision_v1.types import ReferenceImage # isort:skip # [END howto_operator_vision_reference_image_import] # [START howto_operator_vision_enums_import] -from google.cloud.vision import enums # isort:skip pylint: disable=wrong-import-order +from google.cloud.vision import enums # isort:skip # [END howto_operator_vision_enums_import] diff --git a/airflow/providers/google/cloud/hooks/automl.py b/airflow/providers/google/cloud/hooks/automl.py index a21fa0a2f085e..63a002a6ae7e3 100644 --- a/airflow/providers/google/cloud/hooks/automl.py +++ b/airflow/providers/google/cloud/hooks/automl.py @@ -332,7 +332,7 @@ def import_data( return result @GoogleBaseHook.fallback_to_default_project_id - def list_column_specs( # pylint: disable=too-many-arguments + def list_column_specs( self, dataset_id: str, table_spec_id: str, diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py index e36baf548c9fd..20afbc086d58c 100644 --- a/airflow/providers/google/cloud/hooks/bigquery.py +++ b/airflow/providers/google/cloud/hooks/bigquery.py @@ -63,7 +63,6 @@ BigQueryJob = Union[CopyJob, QueryJob, LoadJob, ExtractJob] -# pylint: disable=too-many-public-methods class BigQueryHook(GoogleBaseHook, DbApiHook): """ Interact with BigQuery. This hook uses the Google Cloud connection. @@ -285,7 +284,7 @@ def table_partition_exists( return False @GoogleBaseHook.fallback_to_default_project_id - def create_empty_table( # pylint: disable=too-many-arguments + def create_empty_table( self, project_id: Optional[str] = None, dataset_id: Optional[str] = None, @@ -533,7 +532,7 @@ def delete_dataset( ) @GoogleBaseHook.fallback_to_default_project_id - def create_external_table( # pylint: disable=too-many-locals,too-many-arguments + def create_external_table( self, external_project_dataset_table: str, schema_fields: List, @@ -753,7 +752,7 @@ def update_table( return table_object.to_api_repr() @GoogleBaseHook.fallback_to_default_project_id - def patch_table( # pylint: disable=too-many-arguments + def patch_table( self, dataset_id: str, table_id: str, @@ -1012,7 +1011,7 @@ def patch_dataset( self.log.info('Start patching dataset: %s:%s', dataset_project_id, dataset_id) dataset = ( - service.datasets() # pylint: disable=no-member + service.datasets() .patch( datasetId=dataset_id, projectId=dataset_project_id, @@ -1619,14 +1618,14 @@ def insert_job( "configuration": configuration, "jobReference": {"jobId": job_id, "projectId": project_id, "location": location}, } - # pylint: disable=protected-access + supported_jobs = { LoadJob._JOB_TYPE: LoadJob, CopyJob._JOB_TYPE: CopyJob, ExtractJob._JOB_TYPE: ExtractJob, QueryJob._JOB_TYPE: QueryJob, } - # pylint: enable=protected-access + job = None for job_type, job_object in supported_jobs.items(): if job_type in configuration: @@ -1659,7 +1658,7 @@ def run_with_configuration(self, configuration: dict) -> str: self.running_job_id = job.job_id return job.job_id - def run_load( # pylint: disable=too-many-locals,too-many-arguments,invalid-name + def run_load( self, destination_project_dataset_table: str, source_uris: List, @@ -1784,7 +1783,7 @@ def run_load( # pylint: disable=too-many-locals,too-many-arguments,invalid-name # we check to make sure the passed source format is valid # if it's not, we raise a ValueError # Refer to this link for more details: - # https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).sourceFormat # noqa # pylint: disable=line-too-long + # https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).sourceFormat # noqa if schema_fields is None and not autodetect: raise ValueError('You must either pass a schema or autodetect=True.') @@ -1922,7 +1921,7 @@ def run_load( # pylint: disable=too-many-locals,too-many-arguments,invalid-name self.running_job_id = job.job_id return job.job_id - def run_copy( # pylint: disable=invalid-name + def run_copy( self, source_project_dataset_tables: Union[List, str], destination_project_dataset_table: str, @@ -2090,7 +2089,6 @@ def run_extract( self.running_job_id = job.job_id return job.job_id - # pylint: disable=too-many-locals,too-many-arguments, too-many-branches def run_query( self, sql: str, @@ -2224,7 +2222,7 @@ def run_query( # BigQuery also allows you to define how you want a table's schema to change # as a side effect of a query job # for more details: - # https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.schemaUpdateOptions # noqa # pylint: disable=line-too-long + # https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.schemaUpdateOptions # noqa allowed_schema_update_options = ['ALLOW_FIELD_ADDITION', "ALLOW_FIELD_RELAXATION"] @@ -2366,18 +2364,18 @@ def __init__(self, *args, **kwargs) -> None: self._args = args self._kwargs = kwargs - def close(self) -> None: # noqa: D403 - """BigQueryConnection does not have anything to close""" + def close(self) -> None: + """The BigQueryConnection does not have anything to close""" - def commit(self) -> None: # noqa: D403 - """BigQueryConnection does not support transactions""" + def commit(self) -> None: + """The BigQueryConnection does not support transactions""" - def cursor(self) -> "BigQueryCursor": # noqa: D403 + def cursor(self) -> "BigQueryCursor": """Return a new :py:class:`Cursor` object using the connection""" return BigQueryCursor(*self._args, **self._kwargs) - def rollback(self) -> NoReturn: # noqa: D403 - """BigQueryConnection does not have transactions""" + def rollback(self) -> NoReturn: + """The BigQueryConnection does not have transactions""" raise NotImplementedError("BigQueryConnection does not have transactions") @@ -2659,7 +2657,7 @@ def cancel_query(self, *args, **kwargs) -> None: DeprecationWarning, stacklevel=3, ) - return self.hook.cancel_query(*args, **kwargs) # type: ignore # noqa + return self.hook.cancel_query(*args, **kwargs) # type: ignore def run_with_configuration(self, *args, **kwargs) -> str: """ @@ -2807,7 +2805,6 @@ def flush_results(self) -> None: def fetchone(self) -> Union[List, None]: """Fetch the next row of a query result set""" - # pylint: disable=not-callable return self.next() def next(self) -> Union[List, None]: diff --git a/airflow/providers/google/cloud/hooks/bigtable.py b/airflow/providers/google/cloud/hooks/bigtable.py index 60e309d84aa73..148e5e42b0e9a 100644 --- a/airflow/providers/google/cloud/hooks/bigtable.py +++ b/airflow/providers/google/cloud/hooks/bigtable.py @@ -38,7 +38,6 @@ class BigtableHook(GoogleBaseHook): keyword arguments rather than positional. """ - # pylint: disable=too-many-arguments def __init__( self, gcp_conn_id: str = "google_cloud_default", diff --git a/airflow/providers/google/cloud/hooks/cloud_build.py b/airflow/providers/google/cloud/hooks/cloud_build.py index 07669d3d030d8..c228dfaca576b 100644 --- a/airflow/providers/google/cloud/hooks/cloud_build.py +++ b/airflow/providers/google/cloud/hooks/cloud_build.py @@ -100,7 +100,7 @@ def create_build(self, body: dict, project_id: str) -> dict: # Create build response = ( - service.projects() # pylint: disable=no-member + service.projects() .builds() .create(projectId=project_id, body=body) .execute(num_retries=self.num_retries) @@ -114,7 +114,7 @@ def create_build(self, body: dict, project_id: str) -> dict: build_id = response["metadata"]["build"]["id"] result = ( - service.projects() # pylint: disable=no-member + service.projects() .builds() .get(projectId=project_id, id=build_id) .execute(num_retries=self.num_retries) @@ -136,10 +136,7 @@ def _wait_for_operation_to_complete(self, operation_name: str) -> None: service = self.get_conn() while True: operation_response = ( - # pylint: disable=no-member - service.operations() - .get(name=operation_name) - .execute(num_retries=self.num_retries) + service.operations().get(name=operation_name).execute(num_retries=self.num_retries) ) if operation_response.get("done"): response = operation_response.get("response") diff --git a/airflow/providers/google/cloud/hooks/cloud_sql.py b/airflow/providers/google/cloud/hooks/cloud_sql.py index 3fa00c57a3eae..f16c097d4d3bf 100644 --- a/airflow/providers/google/cloud/hooks/cloud_sql.py +++ b/airflow/providers/google/cloud/hooks/cloud_sql.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-lines + """This module contains a Google Cloud SQL Hook.""" import errno @@ -132,7 +132,7 @@ def get_instance(self, instance: str, project_id: str) -> dict: :rtype: dict """ return ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .instances() .get(project=project_id, instance=instance) .execute(num_retries=self.num_retries) @@ -153,7 +153,7 @@ def create_instance(self, body: Dict, project_id: str) -> None: :return: None """ response = ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .instances() .insert(project=project_id, body=body) .execute(num_retries=self.num_retries) @@ -181,7 +181,7 @@ def patch_instance(self, body: dict, instance: str, project_id: str) -> None: :return: None """ response = ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .instances() .patch(project=project_id, instance=instance, body=body) .execute(num_retries=self.num_retries) @@ -203,7 +203,7 @@ def delete_instance(self, instance: str, project_id: str) -> None: :return: None """ response = ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .instances() .delete(project=project_id, instance=instance) .execute(num_retries=self.num_retries) @@ -228,7 +228,7 @@ def get_database(self, instance: str, database: str, project_id: str) -> dict: :rtype: dict """ return ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .databases() .get(project=project_id, instance=instance, database=database) .execute(num_retries=self.num_retries) @@ -251,7 +251,7 @@ def create_database(self, instance: str, body: Dict, project_id: str) -> None: :return: None """ response = ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .databases() .insert(project=project_id, instance=instance, body=body) .execute(num_retries=self.num_retries) @@ -287,7 +287,7 @@ def patch_database( :return: None """ response = ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .databases() .patch(project=project_id, instance=instance, database=database, body=body) .execute(num_retries=self.num_retries) @@ -311,7 +311,7 @@ def delete_database(self, instance: str, database: str, project_id: str) -> None :return: None """ response = ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .databases() .delete(project=project_id, instance=instance, database=database) .execute(num_retries=self.num_retries) @@ -338,7 +338,7 @@ def export_instance(self, instance: str, body: Dict, project_id: str) -> None: :return: None """ response = ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .instances() .export(project=project_id, instance=instance, body=body) .execute(num_retries=self.num_retries) @@ -365,7 +365,7 @@ def import_instance(self, instance: str, body: Dict, project_id: str) -> None: """ try: response = ( - self.get_conn() # noqa # pylint: disable=no-member + self.get_conn() .instances() .import_(project=project_id, instance=instance, body=body) .execute(num_retries=self.num_retries) @@ -389,7 +389,7 @@ def _wait_for_operation_to_complete(self, project_id: str, operation_name: str) service = self.get_conn() while True: operation_response = ( - service.operations() # noqa # pylint: disable=no-member + service.operations() .get(project=project_id, operation=operation_name) .execute(num_retries=self.num_retries) ) @@ -565,9 +565,9 @@ def start_proxy(self) -> None: command_to_run.extend(self.command_line_parameters) self.log.info("Creating directory %s", self.cloud_sql_proxy_socket_directory) Path(self.cloud_sql_proxy_socket_directory).mkdir(parents=True, exist_ok=True) - command_to_run.extend(self._get_credential_parameters()) # pylint: disable=no-value-for-parameter + command_to_run.extend(self._get_credential_parameters()) self.log.info("Running the command: `%s`", " ".join(command_to_run)) - # pylint: disable=consider-using-with + self.sql_proxy_process = Popen(command_to_run, stdin=PIPE, stdout=PIPE, stderr=PIPE) self.log.info("The pid of cloud_sql_proxy: %s", self.sql_proxy_process.pid) while True: @@ -625,7 +625,7 @@ def get_proxy_version(self) -> Optional[str]: self._download_sql_proxy_if_needed() command_to_run = [self.sql_proxy_path] command_to_run.extend(['--version']) - command_to_run.extend(self._get_credential_parameters()) # pylint: disable=no-value-for-parameter + command_to_run.extend(self._get_credential_parameters()) result = subprocess.check_output(command_to_run).decode('utf-8') pattern = re.compile("^.*[V|v]ersion ([^;]*);.*$") matched = pattern.match(result) @@ -674,10 +674,8 @@ def get_socket_path(self) -> str: CLOUD_SQL_VALID_DATABASE_TYPES = ['postgres', 'mysql'] -class CloudSQLDatabaseHook(BaseHook): # noqa - # pylint: disable=too-many-instance-attributes - """ - Serves DB connection configuration for Google Cloud SQL (Connections +class CloudSQLDatabaseHook(BaseHook): + """Serves DB connection configuration for Google Cloud SQL (Connections of *gcpcloudsqldb://* type). The hook is a "meta" one. It does not perform an actual connection. @@ -725,6 +723,7 @@ class CloudSQLDatabaseHook(BaseHook): # noqa in the connection URL :type default_gcp_project_id: str """ + conn_name_attr = 'gcp_cloudsql_conn_id' default_conn_name = 'google_cloud_sql_default' conn_type = 'gcpcloudsqldb' diff --git a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py index c42bd24dcfbaa..36905fb3bf10b 100644 --- a/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py @@ -166,7 +166,7 @@ def create_transfer_job(self, body: dict) -> dict: """ body = self._inject_project_id(body, BODY, PROJECT_ID) try: - # pylint: disable=no-member + transfer_job = ( self.get_conn().transferJobs().create(body=body).execute(num_retries=self.num_retries) ) @@ -186,7 +186,7 @@ def create_transfer_job(self, body: dict) -> dict: job_name, {body[JOB_NAME]}, ) - # pylint: disable=no-member + return ( self.get_conn().transferJobs().create(body=body).execute(num_retries=self.num_retries) ) @@ -213,7 +213,7 @@ def get_transfer_job(self, job_name: str, project_id: str) -> dict: :rtype: dict """ return ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .transferJobs() .get(jobName=job_name, projectId=project_id) .execute(num_retries=self.num_retries) @@ -243,14 +243,13 @@ def list_transfer_job(self, request_filter: Optional[dict] = None, **kwargs) -> conn = self.get_conn() request_filter = self._inject_project_id(request_filter, FILTER, FILTER_PROJECT_ID) - request = conn.transferJobs().list(filter=json.dumps(request_filter)) # pylint: disable=no-member + request = conn.transferJobs().list(filter=json.dumps(request_filter)) jobs: List[dict] = [] while request is not None: response = request.execute(num_retries=self.num_retries) jobs.extend(response[TRANSFER_JOBS]) - # pylint: disable=no-member request = conn.transferJobs().list_next(previous_request=request, previous_response=response) return jobs @@ -270,7 +269,7 @@ def enable_transfer_job(self, job_name: str, project_id: str) -> dict: :rtype: dict """ return ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .transferJobs() .patch( jobName=job_name, @@ -297,7 +296,7 @@ def update_transfer_job(self, job_name: str, body: dict) -> dict: """ body = self._inject_project_id(body, BODY, PROJECT_ID) return ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .transferJobs() .patch(jobName=job_name, body=body) .execute(num_retries=self.num_retries) @@ -320,7 +319,7 @@ def delete_transfer_job(self, job_name: str, project_id: str) -> None: :rtype: None """ ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .transferJobs() .patch( jobName=job_name, @@ -341,9 +340,7 @@ def cancel_transfer_operation(self, operation_name: str) -> None: :type operation_name: str :rtype: None """ - self.get_conn().transferOperations().cancel(name=operation_name).execute( # pylint: disable=no-member - num_retries=self.num_retries - ) + self.get_conn().transferOperations().cancel(name=operation_name).execute(num_retries=self.num_retries) def get_transfer_operation(self, operation_name: str) -> dict: """ @@ -357,7 +354,7 @@ def get_transfer_operation(self, operation_name: str) -> dict: :rtype: dict """ return ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .transferOperations() .get(name=operation_name) .execute(num_retries=self.num_retries) @@ -398,16 +395,14 @@ def list_transfer_operations(self, request_filter: Optional[dict] = None, **kwar operations: List[dict] = [] - request = conn.transferOperations().list( # pylint: disable=no-member - name=TRANSFER_OPERATIONS, filter=json.dumps(request_filter) - ) + request = conn.transferOperations().list(name=TRANSFER_OPERATIONS, filter=json.dumps(request_filter)) while request is not None: response = request.execute(num_retries=self.num_retries) if OPERATIONS in response: operations.extend(response[OPERATIONS]) - request = conn.transferOperations().list_next( # pylint: disable=no-member + request = conn.transferOperations().list_next( previous_request=request, previous_response=response ) @@ -421,9 +416,7 @@ def pause_transfer_operation(self, operation_name: str) -> None: :type operation_name: str :rtype: None """ - self.get_conn().transferOperations().pause(name=operation_name).execute( # pylint: disable=no-member - num_retries=self.num_retries - ) + self.get_conn().transferOperations().pause(name=operation_name).execute(num_retries=self.num_retries) def resume_transfer_operation(self, operation_name: str) -> None: """ @@ -433,9 +426,7 @@ def resume_transfer_operation(self, operation_name: str) -> None: :type operation_name: str :rtype: None """ - self.get_conn().transferOperations().resume(name=operation_name).execute( # pylint: disable=no-member - num_retries=self.num_retries - ) + self.get_conn().transferOperations().resume(name=operation_name).execute(num_retries=self.num_retries) def wait_for_transfer_job( self, diff --git a/airflow/providers/google/cloud/hooks/compute.py b/airflow/providers/google/cloud/hooks/compute.py index c4da00a6e562c..dd5d76134cd01 100644 --- a/airflow/providers/google/cloud/hooks/compute.py +++ b/airflow/providers/google/cloud/hooks/compute.py @@ -45,8 +45,6 @@ class ComputeEngineHook(GoogleBaseHook): keyword arguments rather than positional. """ - _conn = None # type: Optional[Any] - def __init__( self, api_version: str = 'v1', @@ -61,10 +59,11 @@ def __init__( ) self.api_version = api_version + _conn: Optional[Any] = None + def get_conn(self): """ Retrieves connection to Google Compute Engine. - :return: Google Compute Engine services object :rtype: dict """ @@ -89,7 +88,6 @@ def start_instance(self, zone: str, resource_id: str, project_id: str) -> None: :type project_id: str :return: None """ - # noqa pylint: disable=no-member response = ( self.get_conn() .instances() @@ -118,7 +116,6 @@ def stop_instance(self, zone: str, resource_id: str, project_id: str) -> None: :type project_id: str :return: None """ - # noqa pylint: disable=no-member response = ( self.get_conn() .instances() @@ -159,7 +156,7 @@ def set_machine_type(self, zone: str, resource_id: str, body: dict, project_id: self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name, zone=zone) def _execute_set_machine_type(self, zone: str, resource_id: str, body: dict, project_id: str) -> dict: - # noqa pylint: disable=no-member + return ( self.get_conn() .instances() @@ -183,7 +180,6 @@ def get_instance_template(self, resource_id: str, project_id: str) -> dict: https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates :rtype: dict """ - # noqa pylint: disable=no-member response = ( self.get_conn() .instanceTemplates() @@ -217,7 +213,6 @@ def insert_instance_template( :type project_id: str :return: None """ - # noqa pylint: disable=no-member response = ( self.get_conn() .instanceTemplates() @@ -253,7 +248,6 @@ def get_instance_group_manager( https://cloud.google.com/compute/docs/reference/rest/beta/instanceGroupManagers :rtype: dict """ - # noqa pylint: disable=no-member response = ( self.get_conn() .instanceGroupManagers() @@ -294,7 +288,6 @@ def patch_instance_group_manager( :type project_id: str :return: None """ - # noqa pylint: disable=no-member response = ( self.get_conn() .instanceGroupManagers() @@ -384,7 +377,7 @@ def get_instance_info(self, zone: str, resource_id: str, project_id: str) -> Dic :type project_id: str """ instance_info = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .instances() .get(project=project_id, instance=resource_id, zone=zone) .execute(num_retries=self.num_retries) @@ -437,11 +430,9 @@ def set_instance_metadata( :type project_id: str """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .instances() - .setMetadata( # pylint: disable=no-member - project=project_id, zone=zone, instance=resource_id, body=metadata - ) + .setMetadata(project=project_id, zone=zone, instance=resource_id, body=metadata) .execute(num_retries=self.num_retries) ) operation_name = response["name"] diff --git a/airflow/providers/google/cloud/hooks/compute_ssh.py b/airflow/providers/google/cloud/hooks/compute_ssh.py index 04589316b6ab7..b0d24eb9fee68 100644 --- a/airflow/providers/google/cloud/hooks/compute_ssh.py +++ b/airflow/providers/google/cloud/hooks/compute_ssh.py @@ -42,7 +42,7 @@ def __init__(self, google_hook, *args, **kwargs): self.google_hook = google_hook self.decorator = None - def connect(self, *args, **kwargs): # pylint: disable=signature-differs + def connect(self, *args, **kwargs): self.decorator = self.google_hook.provide_authorized_gcloud() self.decorator.__enter__() return super().connect(*args, **kwargs) @@ -106,7 +106,7 @@ def get_ui_field_behaviour() -> Dict: "relabeling": {}, } - def __init__( # pylint: disable=too-many-arguments + def __init__( self, gcp_conn_id: str = 'google_cloud_default', instance_name: Optional[str] = None, @@ -121,7 +121,7 @@ def __init__( # pylint: disable=too-many-arguments delegate_to: Optional[str] = None, ) -> None: # Ignore original constructor - # super().__init__() # pylint: disable=super-init-not-called + # super().__init__() self.instance_name = instance_name self.zone = zone self.user = user @@ -170,25 +170,17 @@ def intify(key, value, default): conn = self.get_connection(self.gcp_conn_id) if conn and conn.conn_type == "gcpssh": - self.instance_name = self._compute_hook._get_field( # pylint: disable=protected-access - "instance_name", self.instance_name - ) - self.zone = self._compute_hook._get_field("zone", self.zone) # pylint: disable=protected-access + self.instance_name = self._compute_hook._get_field("instance_name", self.instance_name) + self.zone = self._compute_hook._get_field("zone", self.zone) self.user = conn.login if conn.login else self.user # self.project_id is skipped intentionally self.hostname = conn.host if conn.host else self.hostname - self.use_internal_ip = _boolify( - self._compute_hook._get_field("use_internal_ip") # pylint: disable=protected-access - ) - self.use_iap_tunnel = _boolify( - self._compute_hook._get_field("use_iap_tunnel") # pylint: disable=protected-access - ) - self.use_oslogin = _boolify( - self._compute_hook._get_field("use_oslogin") # pylint: disable=protected-access - ) + self.use_internal_ip = _boolify(self._compute_hook._get_field("use_internal_ip")) + self.use_iap_tunnel = _boolify(self._compute_hook._get_field("use_iap_tunnel")) + self.use_oslogin = _boolify(self._compute_hook._get_field("use_oslogin")) self.expire_time = intify( "expire_time", - self._compute_hook._get_field("expire_time"), # pylint: disable=protected-access + self._compute_hook._get_field("expire_time"), self.expire_time, ) @@ -300,7 +292,7 @@ def _authorize_compute_engine_instance_metadata(self, pubkey): ) def _authorize_os_login(self, pubkey): - username = self._oslogin_hook._get_credentials_email() # pylint: disable=protected-access + username = self._oslogin_hook._get_credentials_email() self.log.info("Importing SSH public key using OSLogin: user=%s", username) expiration = int((time.time() + self.expire_time) * 1000000) ssh_public_key = {"key": pubkey, "expiration_time_usec": expiration} diff --git a/airflow/providers/google/cloud/hooks/datacatalog.py b/airflow/providers/google/cloud/hooks/datacatalog.py index 0d6cc7505fd86..00db784d05a84 100644 --- a/airflow/providers/google/cloud/hooks/datacatalog.py +++ b/airflow/providers/google/cloud/hooks/datacatalog.py @@ -1093,7 +1093,7 @@ def update_entry( return result @GoogleBaseHook.fallback_to_default_project_id - def update_tag( # pylint: disable=too-many-arguments + def update_tag( self, tag: Union[Dict, Tag], update_mask: Union[Dict, FieldMask], @@ -1255,7 +1255,7 @@ def update_tag_template( return result @GoogleBaseHook.fallback_to_default_project_id - def update_tag_template_field( # pylint: disable=too-many-arguments + def update_tag_template_field( self, tag_template_field: Union[dict, TagTemplateField], update_mask: Union[dict, FieldMask], diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/airflow/providers/google/cloud/hooks/dataflow.py index ebdbfa33e30db..60b734239128f 100644 --- a/airflow/providers/google/cloud/hooks/dataflow.py +++ b/airflow/providers/google/cloud/hooks/dataflow.py @@ -44,7 +44,7 @@ r"Submitted job: (?P.*)|Created job with id: \[(?P.*)\]" ) -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def process_line_and_extract_dataflow_job_id_callback( @@ -184,7 +184,7 @@ class _DataflowJobsController(LoggingMixin): * for the batch pipeline, wait for the jobs to complete. """ - def __init__( # pylint: disable=too-many-arguments + def __init__( self, dataflow: Any, project_number: str, @@ -230,7 +230,6 @@ def is_job_running(self) -> bool: return True return False - # pylint: disable=too-many-nested-blocks def _get_current_jobs(self) -> List[dict]: """ Helper method to get list of jobs that start with job name or id @@ -605,7 +604,7 @@ def start_java_dataflow( job_class=job_class, process_line_callback=process_line_and_extract_dataflow_job_id_callback(on_new_job_id_callback), ) - self.wait_for_done( # pylint: disable=no-value-for-parameter + self.wait_for_done( job_name=name, location=location, job_id=self.job_id, @@ -696,7 +695,7 @@ def start_template_dataflow( environment.update({key: variables[key]}) service = self.get_conn() - # pylint: disable=no-member + request = ( service.projects() .locations() @@ -755,7 +754,7 @@ def start_flex_template( """ service = self.get_conn() request = ( - service.projects() # pylint: disable=no-member + service.projects() .locations() .flexTemplates() .launch(projectId=project_id, body=body, location=location) @@ -782,7 +781,7 @@ def start_flex_template( @_fallback_to_location_from_variables @_fallback_to_project_id_from_variables @GoogleBaseHook.fallback_to_default_project_id - def start_python_dataflow( # pylint: disable=too-many-arguments + def start_python_dataflow( self, job_name: str, variables: dict, @@ -860,7 +859,7 @@ def start_python_dataflow( # pylint: disable=too-many-arguments process_line_callback=process_line_and_extract_dataflow_job_id_callback(on_new_job_id_callback), ) - self.wait_for_done( # pylint: disable=no-value-for-parameter + self.wait_for_done( job_name=name, location=location, job_id=self.job_id, @@ -1008,9 +1007,7 @@ def start_sql_job( ] self.log.info("Executing command: %s", " ".join(shlex.quote(c) for c in cmd)) with self.provide_authorized_gcloud(): - proc = subprocess.run( # pylint: disable=subprocess-run-check - cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) + proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.log.info("Output: %s", proc.stdout.decode()) self.log.warning("Stderr: %s", proc.stderr.decode()) self.log.info("Exit code %d", proc.returncode) diff --git a/airflow/providers/google/cloud/hooks/datafusion.py b/airflow/providers/google/cloud/hooks/datafusion.py index 8e686e8499fb2..4cc8f50dbb728 100644 --- a/airflow/providers/google/cloud/hooks/datafusion.py +++ b/airflow/providers/google/cloud/hooks/datafusion.py @@ -74,12 +74,7 @@ def wait_for_operation(self, operation: Dict[str, Any]) -> Dict[str, Any]: for time_to_wait in exponential_sleep_generator(initial=10, maximum=120): sleep(time_to_wait) operation = ( - self.get_conn() # pylint: disable=no-member - .projects() - .locations() - .operations() - .get(name=operation.get("name")) - .execute() + self.get_conn().projects().locations().operations().get(name=operation.get("name")).execute() ) if operation.get("done"): break @@ -181,7 +176,7 @@ def restart_instance(self, instance_name: str, location: str, project_id: str) - :type project_id: str """ operation = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .projects() .locations() .instances() @@ -203,7 +198,7 @@ def delete_instance(self, instance_name: str, location: str, project_id: str) -> :type project_id: str """ operation = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .projects() .locations() .instances() @@ -234,7 +229,7 @@ def create_instance( :type project_id: str """ operation = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .projects() .locations() .instances() @@ -260,7 +255,7 @@ def get_instance(self, instance_name: str, location: str, project_id: str) -> Di :type project_id: str """ instance = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .projects() .locations() .instances() @@ -299,7 +294,7 @@ def patch_instance( :type project_id: str """ operation = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .projects() .locations() .instances() diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/airflow/providers/google/cloud/hooks/dataproc.py index 976d4ab7c8c91..a5ccd3a4e218e 100644 --- a/airflow/providers/google/cloud/hooks/dataproc.py +++ b/airflow/providers/google/cloud/hooks/dataproc.py @@ -25,7 +25,7 @@ from google.api_core.exceptions import ServerError from google.api_core.retry import Retry -from google.cloud.dataproc_v1beta2 import ( # pylint: disable=no-name-in-module +from google.cloud.dataproc_v1beta2 import ( Cluster, ClusterControllerClient, Job, @@ -477,7 +477,7 @@ def list_clusters( return result @GoogleBaseHook.fallback_to_default_project_id - def update_cluster( # pylint: disable=too-many-arguments + def update_cluster( self, location: str, cluster_name: str, @@ -820,7 +820,7 @@ def submit( project_id: str, job: dict, region: str = 'global', - job_error_states: Optional[Iterable[str]] = None, # pylint: disable=unused-argument + job_error_states: Optional[Iterable[str]] = None, ) -> None: """ Submits Google Cloud Dataproc job. diff --git a/airflow/providers/google/cloud/hooks/datastore.py b/airflow/providers/google/cloud/hooks/datastore.py index d36c295a3919d..2c8699ce842f1 100644 --- a/airflow/providers/google/cloud/hooks/datastore.py +++ b/airflow/providers/google/cloud/hooks/datastore.py @@ -96,7 +96,7 @@ def allocate_ids(self, partial_keys: list, project_id: str) -> list: conn = self.get_conn() # type: Any resp = ( - conn.projects() # pylint: disable=no-member + conn.projects() .allocateIds(projectId=project_id, body={'keys': partial_keys}) .execute(num_retries=self.num_retries) ) @@ -121,7 +121,7 @@ def begin_transaction(self, project_id: str, transaction_options: Dict[str, Any] conn = self.get_conn() # type: Any resp = ( - conn.projects() # pylint: disable=no-member + conn.projects() .beginTransaction(projectId=project_id, body={"transactionOptions": transaction_options}) .execute(num_retries=self.num_retries) ) @@ -145,11 +145,7 @@ def commit(self, body: dict, project_id: str) -> dict: """ conn = self.get_conn() # type: Any - resp = ( - conn.projects() # pylint: disable=no-member - .commit(projectId=project_id, body=body) - .execute(num_retries=self.num_retries) - ) + resp = conn.projects().commit(projectId=project_id, body=body).execute(num_retries=self.num_retries) return resp @@ -186,11 +182,7 @@ def lookup( body['readConsistency'] = read_consistency if transaction: body['transaction'] = transaction - resp = ( - conn.projects() # pylint: disable=no-member - .lookup(projectId=project_id, body=body) - .execute(num_retries=self.num_retries) - ) + resp = conn.projects().lookup(projectId=project_id, body=body).execute(num_retries=self.num_retries) return resp @@ -209,9 +201,9 @@ def rollback(self, transaction: str, project_id: str) -> None: """ conn: Any = self.get_conn() - conn.projects().rollback( # pylint: disable=no-member - projectId=project_id, body={'transaction': transaction} - ).execute(num_retries=self.num_retries) + conn.projects().rollback(projectId=project_id, body={'transaction': transaction}).execute( + num_retries=self.num_retries + ) @GoogleBaseHook.fallback_to_default_project_id def run_query(self, body: dict, project_id: str) -> dict: @@ -230,11 +222,7 @@ def run_query(self, body: dict, project_id: str) -> dict: """ conn = self.get_conn() # type: Any - resp = ( - conn.projects() # pylint: disable=no-member - .runQuery(projectId=project_id, body=body) - .execute(num_retries=self.num_retries) - ) + resp = conn.projects().runQuery(projectId=project_id, body=body).execute(num_retries=self.num_retries) return resp['batch'] @@ -252,12 +240,7 @@ def get_operation(self, name: str) -> dict: """ conn: Any = self.get_conn() - resp = ( - conn.projects() # pylint: disable=no-member - .operations() - .get(name=name) - .execute(num_retries=self.num_retries) - ) + resp = conn.projects().operations().get(name=name).execute(num_retries=self.num_retries) return resp @@ -275,12 +258,7 @@ def delete_operation(self, name: str) -> dict: """ conn = self.get_conn() # type: Any - resp = ( - conn.projects() # pylint: disable=no-member - .operations() - .delete(name=name) - .execute(num_retries=self.num_retries) - ) + resp = conn.projects().operations().delete(name=name).execute(num_retries=self.num_retries) return resp @@ -351,7 +329,7 @@ def export_to_storage_bucket( 'labels': labels, } # type: Dict resp = ( - admin_conn.projects() # pylint: disable=no-member + admin_conn.projects() .export(projectId=project_id, body=body) .execute(num_retries=self.num_retries) ) @@ -405,7 +383,7 @@ def import_from_storage_bucket( 'labels': labels, } # type: Dict resp = ( - admin_conn.projects() # pylint: disable=no-member + admin_conn.projects() .import_(projectId=project_id, body=body) .execute(num_retries=self.num_retries) ) diff --git a/airflow/providers/google/cloud/hooks/dlp.py b/airflow/providers/google/cloud/hooks/dlp.py index 7cf5da23586b3..a03f2f1495979 100644 --- a/airflow/providers/google/cloud/hooks/dlp.py +++ b/airflow/providers/google/cloud/hooks/dlp.py @@ -55,7 +55,6 @@ DLP_JOB_PATH_PATTERN = "^projects/[^/]+/dlpJobs/(?P.*?)$" -# pylint: disable=R0904, C0302 class CloudDLPHook(GoogleBaseHook): """ Hook for Google Cloud Data Loss Prevention (DLP) APIs. diff --git a/airflow/providers/google/cloud/hooks/functions.py b/airflow/providers/google/cloud/hooks/functions.py index 49a6e2c696e09..2110a6a6efc83 100644 --- a/airflow/providers/google/cloud/hooks/functions.py +++ b/airflow/providers/google/cloud/hooks/functions.py @@ -91,7 +91,7 @@ def get_function(self, name: str) -> dict: :rtype: dict """ # fmt: off - return self.get_conn().projects().locations().functions().get( # pylint: disable=no-member + return self.get_conn().projects().locations().functions().get( name=name).execute(num_retries=self.num_retries) # fmt: on @@ -110,7 +110,7 @@ def create_new_function(self, location: str, body: dict, project_id: str) -> Non :return: None """ # fmt: off - response = self.get_conn().projects().locations().functions().create( # pylint: disable=no-member + response = self.get_conn().projects().locations().functions().create( location=self._full_location(project_id, location), body=body ).execute(num_retries=self.num_retries) @@ -131,7 +131,7 @@ def update_function(self, name: str, body: dict, update_mask: List[str]) -> None :return: None """ # fmt: off - response = self.get_conn().projects().locations().functions().patch( # pylint: disable=no-member + response = self.get_conn().projects().locations().functions().patch( updateMask=",".join(update_mask), name=name, body=body @@ -156,7 +156,7 @@ def upload_function_zip(self, location: str, zip_path: str, project_id: str) -> :rtype: str """ # fmt: off - # pylint: disable=no-member # noqa + response = \ self.get_conn().projects().locations().functions().generateUploadUrl( parent=self._full_location(project_id, location) @@ -187,7 +187,7 @@ def delete_function(self, name: str) -> None: :return: None """ # fmt: off - response = self.get_conn().projects().locations().functions().delete( # pylint: disable=no-member + response = self.get_conn().projects().locations().functions().delete( name=name).execute(num_retries=self.num_retries) # fmt: on operation_name = response["name"] @@ -218,7 +218,7 @@ def call_function( """ name = f"projects/{project_id}/locations/{location}/functions/{function_id}" # fmt: off - response = self.get_conn().projects().locations().functions().call( # pylint: disable=no-member + response = self.get_conn().projects().locations().functions().call( name=name, body=input_data ).execute(num_retries=self.num_retries) @@ -241,7 +241,7 @@ def _wait_for_operation_to_complete(self, operation_name: str) -> dict: service = self.get_conn() while True: # fmt: off - operation_response = service.operations().get( # pylint: disable=no-member + operation_response = service.operations().get( name=operation_name, ).execute(num_retries=self.num_retries) # fmt: on diff --git a/airflow/providers/google/cloud/hooks/gcs.py b/airflow/providers/google/cloud/hooks/gcs.py index 63e6081d78462..5aba40f135d30 100644 --- a/airflow/providers/google/cloud/hooks/gcs.py +++ b/airflow/providers/google/cloud/hooks/gcs.py @@ -41,8 +41,8 @@ from airflow.utils import timezone from airflow.version import version -RT = TypeVar('RT') # pylint: disable=invalid-name -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +RT = TypeVar('RT') +T = TypeVar("T", bound=Callable) # Use default timeout from google-cloud-storage DEFAULT_TIMEOUT = 60 @@ -334,7 +334,7 @@ def provide_file( self, bucket_name: Optional[str] = None, object_name: Optional[str] = None, - object_url: Optional[str] = None, # pylint: disable=unused-argument + object_url: Optional[str] = None, ): """ Downloads the file to a temporary directory and returns a file handle @@ -364,7 +364,7 @@ def provide_file_and_upload( self, bucket_name: Optional[str] = None, object_name: Optional[str] = None, - object_url: Optional[str] = None, # pylint: disable=unused-argument + object_url: Optional[str] = None, ): """ Creates temporary file, returns a file handle and uploads the files content @@ -390,7 +390,7 @@ def provide_file_and_upload( tmp_file.flush() self.upload(bucket_name=bucket_name, object_name=object_name, filename=tmp_file.name) - def upload( # pylint: disable=too-many-arguments + def upload( self, bucket_name: str, object_name: str, @@ -896,9 +896,7 @@ def create_bucket( for item in bucket_resource: if item != "name": - bucket._patch_property( # pylint: disable=protected-access - name=item, value=resource[item] # type: ignore[index] - ) + bucket._patch_property(name=item, value=resource[item]) # type: ignore[index] bucket.storage_class = storage_class bucket.labels = labels diff --git a/airflow/providers/google/cloud/hooks/gdm.py b/airflow/providers/google/cloud/hooks/gdm.py index 6ff733838b0fe..9cda909dde03d 100644 --- a/airflow/providers/google/cloud/hooks/gdm.py +++ b/airflow/providers/google/cloud/hooks/gdm.py @@ -25,7 +25,7 @@ from airflow.providers.google.common.hooks.base_google import GoogleBaseHook -class GoogleDeploymentManagerHook(GoogleBaseHook): # pylint: disable=abstract-method +class GoogleDeploymentManagerHook(GoogleBaseHook): """ Interact with Google Cloud Deployment Manager using the Google Cloud connection. This allows for scheduled and programmatic inspection and deletion fo resources managed by GDM. @@ -55,7 +55,7 @@ def get_conn(self) -> Resource: @GoogleBaseHook.fallback_to_default_project_id def list_deployments( self, - project_id: Optional[str] = None, # pylint: disable=too-many-arguments + project_id: Optional[str] = None, deployment_filter: Optional[str] = None, order_by: Optional[str] = None, ) -> List[Dict[str, Any]]: @@ -72,15 +72,13 @@ def list_deployments( """ deployments = [] # type: List[Dict] conn = self.get_conn() - # pylint: disable=no-member + request = conn.deployments().list(project=project_id, filter=deployment_filter, orderBy=order_by) while request is not None: response = request.execute(num_retries=self.num_retries) deployments.extend(response.get("deployments", [])) - request = conn.deployments().list_next( # pylint: disable=no-member - previous_request=request, previous_response=response - ) + request = conn.deployments().list_next(previous_request=request, previous_response=response) return deployments @@ -101,7 +99,7 @@ def delete_deployment( :rtype: None """ conn = self.get_conn() - # pylint: disable=no-member + request = conn.deployments().delete( project=project_id, deployment=deployment, deletePolicy=delete_policy ) diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/airflow/providers/google/cloud/hooks/kubernetes_engine.py index 604815d01feb8..d4fb38584516b 100644 --- a/airflow/providers/google/cloud/hooks/kubernetes_engine.py +++ b/airflow/providers/google/cloud/hooks/kubernetes_engine.py @@ -75,7 +75,7 @@ def get_conn(self) -> container_v1.ClusterManagerClient: # To preserve backward compatibility # TODO: remove one day - def get_client(self) -> container_v1.ClusterManagerClient: # pylint: disable=missing-docstring + def get_client(self) -> container_v1.ClusterManagerClient: warnings.warn( "The get_client method has been deprecated. " "You should use the get_conn method.", DeprecationWarning, diff --git a/airflow/providers/google/cloud/hooks/life_sciences.py b/airflow/providers/google/cloud/hooks/life_sciences.py index 0b550a0f41806..c87f0b2ad1398 100644 --- a/airflow/providers/google/cloud/hooks/life_sciences.py +++ b/airflow/providers/google/cloud/hooks/life_sciences.py @@ -100,12 +100,7 @@ def run_pipeline(self, body: dict, location: str, project_id: str) -> dict: parent = self._location_path(project_id=project_id, location=location) service = self.get_conn() - request = ( - service.projects() # pylint: disable=no-member - .locations() - .pipelines() - .run(parent=parent, body=body) - ) + request = service.projects().locations().pipelines().run(parent=parent, body=body) response = request.execute(num_retries=self.num_retries) @@ -147,7 +142,7 @@ def _wait_for_operation_to_complete(self, operation_name: str) -> None: service = self.get_conn() while True: operation_response = ( - service.projects() # pylint: disable=no-member + service.projects() .locations() .operations() .get(name=operation_name) diff --git a/airflow/providers/google/cloud/hooks/mlengine.py b/airflow/providers/google/cloud/hooks/mlengine.py index c1c42351d97d6..2e00312f77377 100644 --- a/airflow/providers/google/cloud/hooks/mlengine.py +++ b/airflow/providers/google/cloud/hooks/mlengine.py @@ -127,7 +127,7 @@ def create_job(self, job: dict, project_id: str, use_existing_job_fn: Optional[C self._append_label(job) self.log.info("Creating job.") - # pylint: disable=no-member + request = hook.projects().jobs().create(parent=f'projects/{project_id}', body=job) job_id = job['jobId'] @@ -173,7 +173,7 @@ def cancel_job( :raises: googleapiclient.errors.HttpError """ hook = self.get_conn() - # pylint: disable=no-member + request = hook.projects().jobs().cancel(name=f'projects/{project_id}/jobs/{job_id}') try: @@ -204,7 +204,7 @@ def _get_job(self, project_id: str, job_id: str) -> dict: """ hook = self.get_conn() job_name = f'projects/{project_id}/jobs/{job_id}' - request = hook.projects().jobs().get(name=job_name) # pylint: disable=no-member + request = hook.projects().jobs().get(name=job_name) while True: try: return request.execute(num_retries=self.num_retries) @@ -270,10 +270,9 @@ def create_version( self._append_label(version_spec) - # pylint: disable=no-member create_request = hook.projects().models().versions().create(parent=parent_name, body=version_spec) response = create_request.execute(num_retries=self.num_retries) - get_request = hook.projects().operations().get(name=response['name']) # pylint: disable=no-member + get_request = hook.projects().operations().get(name=response['name']) return _poll_with_exponential_delay( request=get_request, @@ -308,7 +307,7 @@ def set_default_version( """ hook = self.get_conn() full_version_name = f'projects/{project_id}/models/{model_name}/versions/{version_name}' - # pylint: disable=no-member + request = hook.projects().models().versions().setDefault(name=full_version_name, body={}) try: @@ -341,13 +340,13 @@ def list_versions( hook = self.get_conn() result = [] # type: List[Dict] full_parent_name = f'projects/{project_id}/models/{model_name}' - # pylint: disable=no-member + request = hook.projects().models().versions().list(parent=full_parent_name, pageSize=100) while request is not None: response = request.execute(num_retries=self.num_retries) result.extend(response.get('versions', [])) - # pylint: disable=no-member + request = ( hook.projects() .models() @@ -379,11 +378,9 @@ def delete_version( """ hook = self.get_conn() full_name = f'projects/{project_id}/models/{model_name}/versions/{version_name}' - delete_request = ( - hook.projects().models().versions().delete(name=full_name) # pylint: disable=no-member - ) + delete_request = hook.projects().models().versions().delete(name=full_name) response = delete_request.execute(num_retries=self.num_retries) - get_request = hook.projects().operations().get(name=response['name']) # pylint: disable=no-member + get_request = hook.projects().operations().get(name=response['name']) return _poll_with_exponential_delay( request=get_request, @@ -419,7 +416,7 @@ def create_model( self._append_label(model) try: - request = hook.projects().models().create(parent=project, body=model) # pylint: disable=no-member + request = hook.projects().models().create(parent=project, body=model) response = request.execute(num_retries=self.num_retries) except HttpError as e: if e.resp.status != 409: @@ -468,7 +465,7 @@ def get_model( if not model_name: raise ValueError("Model name must be provided and " "it could not be an empty string") full_model_name = f'projects/{project_id}/models/{model_name}' - request = hook.projects().models().get(name=full_model_name) # pylint: disable=no-member + request = hook.projects().models().get(name=full_model_name) try: return request.execute(num_retries=self.num_retries) except HttpError as e: @@ -505,7 +502,7 @@ def delete_model( model_path = f'projects/{project_id}/models/{model_name}' if delete_contents: self._delete_all_versions(model_name, project_id) - request = hook.projects().models().delete(name=model_path) # pylint: disable=no-member + request = hook.projects().models().delete(name=model_path) try: request.execute(num_retries=self.num_retries) except HttpError as e: diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/airflow/providers/google/cloud/hooks/pubsub.py index ed5eaadfb54a2..8d33578a3e216 100644 --- a/airflow/providers/google/cloud/hooks/pubsub.py +++ b/airflow/providers/google/cloud/hooks/pubsub.py @@ -158,7 +158,6 @@ def _validate_messages(messages) -> None: "Wrong message. If 'data' is not provided 'attributes' must be a non empty dictionary." ) - # pylint: disable=too-many-arguments @GoogleBaseHook.fallback_to_default_project_id def create_topic( self, @@ -217,7 +216,7 @@ def create_topic( self.log.info("Creating topic (path) %s", topic_path) try: - # pylint: disable=no-member + publisher.create_topic( request={ "name": topic_path, @@ -275,7 +274,7 @@ def delete_topic( self.log.info("Deleting topic (path) %s", topic_path) try: - # pylint: disable=no-member + publisher.delete_topic( request={"topic": topic_path}, retry=retry, timeout=timeout, metadata=metadata or () ) @@ -287,7 +286,6 @@ def delete_topic( raise PubSubException(f'Error deleting topic {topic}', e) self.log.info("Deleted topic (path) %s", topic_path) - # pylint: disable=too-many-arguments @GoogleBaseHook.fallback_to_default_project_id def create_subscription( self, @@ -402,7 +400,6 @@ def create_subscription( labels = labels or {} labels['airflow-version'] = 'v' + version.replace('.', '-').replace('+', '-') - # pylint: disable=no-member subscription_path = f"projects/{subscription_project_id}/subscriptions/{subscription}" topic_path = f"projects/{project_id}/topics/{topic}" @@ -469,12 +466,12 @@ def delete_subscription( :type metadata: Sequence[Tuple[str, str]]] """ subscriber = self.subscriber_client - # noqa E501 # pylint: disable=no-member + # E501 subscription_path = f"projects/{project_id}/subscriptions/{subscription}" self.log.info("Deleting subscription (path) %s", subscription_path) try: - # pylint: disable=no-member + subscriber.delete_subscription( request={"subscription": subscription_path}, retry=retry, @@ -533,12 +530,12 @@ def pull( https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull#ReceivedMessage """ subscriber = self.subscriber_client - # noqa E501 # pylint: disable=no-member,line-too-long + # E501 subscription_path = f"projects/{project_id}/subscriptions/{subscription}" self.log.info("Pulling max %d messages from subscription (path) %s", max_messages, subscription_path) try: - # pylint: disable=no-member + response = subscriber.pull( request={ "subscription": subscription_path, @@ -599,12 +596,12 @@ def acknowledge( raise ValueError("One and only one of 'ack_ids' and 'messages' arguments have to be provided") subscriber = self.subscriber_client - # noqa E501 # pylint: disable=no-member + # E501 subscription_path = f"projects/{project_id}/subscriptions/{subscription}" self.log.info("Acknowledging %d ack_ids from subscription (path) %s", len(ack_ids), subscription_path) try: - # pylint: disable=no-member + subscriber.acknowledge( request={"subscription": subscription_path, "ack_ids": ack_ids}, retry=retry, diff --git a/airflow/providers/google/cloud/hooks/secret_manager.py b/airflow/providers/google/cloud/hooks/secret_manager.py index ecd70c40d36cc..0561e6ade58c5 100644 --- a/airflow/providers/google/cloud/hooks/secret_manager.py +++ b/airflow/providers/google/cloud/hooks/secret_manager.py @@ -18,7 +18,7 @@ """Hook for Secrets Manager service""" from typing import Optional, Sequence, Union -from airflow.providers.google.cloud._internal_client.secret_manager_client import _SecretManagerClient # noqa +from airflow.providers.google.cloud._internal_client.secret_manager_client import _SecretManagerClient from airflow.providers.google.common.hooks.base_google import GoogleBaseHook diff --git a/airflow/providers/google/cloud/hooks/spanner.py b/airflow/providers/google/cloud/hooks/spanner.py index 90cdb3255e099..06830a752d8c3 100644 --- a/airflow/providers/google/cloud/hooks/spanner.py +++ b/airflow/providers/google/cloud/hooks/spanner.py @@ -23,7 +23,7 @@ from google.cloud.spanner_v1.database import Database from google.cloud.spanner_v1.instance import Instance from google.cloud.spanner_v1.transaction import Transaction -from google.longrunning.operations_grpc_pb2 import Operation # noqa: F401 +from google.longrunning.operations_grpc_pb2 import Operation from airflow.exceptions import AirflowException from airflow.providers.google.common.hooks.base_google import GoogleBaseHook @@ -365,7 +365,7 @@ def delete_database(self, instance_id: str, database_id, project_id: str) -> boo ) return False try: - database.drop() # pylint: disable=E1111 + database.drop() except GoogleAPICallError as e: self.log.error('An error occurred: %s. Exiting.', e.message) raise e diff --git a/airflow/providers/google/cloud/hooks/text_to_speech.py b/airflow/providers/google/cloud/hooks/text_to_speech.py index ca5904e54eb1e..25e39ca1f5007 100644 --- a/airflow/providers/google/cloud/hooks/text_to_speech.py +++ b/airflow/providers/google/cloud/hooks/text_to_speech.py @@ -75,11 +75,10 @@ def get_conn(self) -> TextToSpeechClient: :rtype: google.cloud.texttospeech_v1.TextToSpeechClient """ if not self._client: - # pylint: disable=unexpected-keyword-arg + self._client = TextToSpeechClient( credentials=self._get_credentials(), client_info=self.client_info ) - # pylint: enable=unexpected-keyword-arg return self._client @@ -116,8 +115,7 @@ def synthesize_speech( """ client = self.get_conn() self.log.info("Synthesizing input: %s", input_data) - # pylint: disable=unexpected-keyword-arg + return client.synthesize_speech( input_=input_data, voice=voice, audio_config=audio_config, retry=retry, timeout=timeout ) - # pylint: enable=unexpected-keyword-arg diff --git a/airflow/providers/google/cloud/hooks/vision.py b/airflow/providers/google/cloud/hooks/vision.py index e5d94c9306591..52c3a9a09824a 100644 --- a/airflow/providers/google/cloud/hooks/vision.py +++ b/airflow/providers/google/cloud/hooks/vision.py @@ -440,7 +440,7 @@ def delete_reference_image( name = ProductSearchClient.reference_image_path( project=project_id, location=location, product=product_id, reference_image=reference_image_id ) - # pylint: disable=assignment-from-no-return + response = client.delete_reference_image( name=name, retry=retry, @@ -492,7 +492,7 @@ def remove_product_from_product_set( ) -> None: """ For the documentation see: - :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionRemoveProductFromProductSetOperator` # pylint: disable=line-too-long # noqa + :py:class:`~airflow.providers.google.cloud.operators.vision.CloudVisionRemoveProductFromProductSetOperator` """ client = self.get_conn() @@ -521,7 +521,6 @@ def annotate_image( self.log.info('Annotating image') - # pylint: disable=no-member response = client.annotate_image(request=request, retry=retry, timeout=timeout) self.log.info('Image annotated') @@ -543,9 +542,7 @@ def batch_annotate_images( self.log.info('Annotating images') - response = client.batch_annotate_images( - requests=requests, retry=retry, timeout=timeout # pylint: disable=no-member - ) + response = client.batch_annotate_images(requests=requests, retry=retry, timeout=timeout) self.log.info('Images annotated') @@ -571,7 +568,7 @@ def text_detection( if additional_properties is None: additional_properties = {} - response = client.text_detection( # pylint: disable=no-member + response = client.text_detection( image=image, max_results=max_results, retry=retry, timeout=timeout, **additional_properties ) response = MessageToDict(response) @@ -601,7 +598,7 @@ def document_text_detection( if additional_properties is None: additional_properties = {} - response = client.document_text_detection( # pylint: disable=no-member + response = client.document_text_detection( image=image, max_results=max_results, retry=retry, timeout=timeout, **additional_properties ) response = MessageToDict(response) @@ -631,7 +628,7 @@ def label_detection( if additional_properties is None: additional_properties = {} - response = client.label_detection( # pylint: disable=no-member + response = client.label_detection( image=image, max_results=max_results, retry=retry, timeout=timeout, **additional_properties ) response = MessageToDict(response) @@ -661,7 +658,7 @@ def safe_search_detection( if additional_properties is None: additional_properties = {} - response = client.safe_search_detection( # pylint: disable=no-member + response = client.safe_search_detection( image=image, max_results=max_results, retry=retry, timeout=timeout, **additional_properties ) response = MessageToDict(response) diff --git a/airflow/providers/google/cloud/hooks/workflows.py b/airflow/providers/google/cloud/hooks/workflows.py index 6c7835032892e..87bc924f44e12 100644 --- a/airflow/providers/google/cloud/hooks/workflows.py +++ b/airflow/providers/google/cloud/hooks/workflows.py @@ -19,8 +19,6 @@ from google.api_core.operation import Operation from google.api_core.retry import Retry - -# pylint: disable=no-name-in-module from google.cloud.workflows.executions_v1beta import Execution, ExecutionsClient from google.cloud.workflows.executions_v1beta.services.executions.pagers import ListExecutionsPager from google.cloud.workflows_v1beta import Workflow, WorkflowsClient @@ -29,8 +27,6 @@ from airflow.providers.google.common.hooks.base_google import GoogleBaseHook -# pylint: enable=no-name-in-module - class WorkflowsHook(GoogleBaseHook): """ diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/airflow/providers/google/cloud/log/gcs_task_handler.py index c483f782960aa..38c025a67ab96 100644 --- a/airflow/providers/google/cloud/log/gcs_task_handler.py +++ b/airflow/providers/google/cloud/log/gcs_task_handler.py @@ -74,8 +74,7 @@ def __init__( filename_template: str, gcp_key_path: Optional[str] = None, gcp_keyfile_dict: Optional[dict] = None, - # See: https://github.com/PyCQA/pylint/issues/2377 - gcp_scopes: Optional[Collection[str]] = _DEFAULT_SCOPESS, # pylint: disable=unsubscriptable-object + gcp_scopes: Optional[Collection[str]] = _DEFAULT_SCOPESS, project_id: Optional[str] = None, ): super().__init__(base_log_folder, filename_template) @@ -158,7 +157,7 @@ def _read(self, ti, try_number, metadata=None): remote_log = blob.download_as_bytes().decode() log = f'*** Reading remote log from {remote_loc}.\n{remote_log}\n' return log, {'end_of_log': True} - except Exception as e: # pylint: disable=broad-except + except Exception as e: log = f'*** Unable to read remote log from {remote_loc}\n*** {str(e)}\n\n' self.log.error(log) local_log, metadata = super()._read(ti, try_number) @@ -179,13 +178,13 @@ def gcs_write(self, log, remote_log_location): blob = storage.Blob.from_string(remote_log_location, self.client) old_log = blob.download_as_bytes().decode() log = '\n'.join([old_log, log]) if old_log else log - except Exception as e: # pylint: disable=broad-except - if not hasattr(e, 'resp') or e.resp.get('status') != '404': # pylint: disable=no-member + except Exception as e: + if not hasattr(e, 'resp') or e.resp.get('status') != '404': log = f'*** Previous log discarded: {str(e)}\n\n' + log self.log.info("Previous log discarded: %s", e) try: blob = storage.Blob.from_string(remote_log_location, self.client) blob.upload_from_string(log, content_type="text/plain") - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log.error('Could not write logs to %s: %s', remote_log_location, e) diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/airflow/providers/google/cloud/log/stackdriver_task_handler.py index c0fe85bd65c64..dd1ca9ffd77d9 100644 --- a/airflow/providers/google/cloud/log/stackdriver_task_handler.py +++ b/airflow/providers/google/cloud/log/stackdriver_task_handler.py @@ -89,8 +89,7 @@ class StackdriverTaskHandler(logging.Handler): def __init__( self, gcp_key_path: Optional[str] = None, - # See: https://github.com/PyCQA/pylint/issues/2377 - scopes: Optional[Collection[str]] = _DEFAULT_SCOPESS, # pylint: disable=unsubscriptable-object + scopes: Optional[Collection[str]] = _DEFAULT_SCOPESS, name: str = DEFAULT_LOGGER_NAME, transport: Type[Transport] = BackgroundThreadTransport, resource: Resource = _GLOBAL_RESOURCE, @@ -98,8 +97,7 @@ def __init__( ): super().__init__() self.gcp_key_path: Optional[str] = gcp_key_path - # See: https://github.com/PyCQA/pylint/issues/2377 - self.scopes: Optional[Collection[str]] = scopes # pylint: disable=unsubscriptable-object + self.scopes: Optional[Collection[str]] = scopes self.name: str = name self.transport_type: Type[Transport] = transport self.resource: Resource = resource diff --git a/airflow/providers/google/cloud/operators/automl.py b/airflow/providers/google/cloud/operators/automl.py index 2493e67befef8..bdd4bb5c79c61 100644 --- a/airflow/providers/google/cloud/operators/automl.py +++ b/airflow/providers/google/cloud/operators/automl.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. # -# pylint: disable=too-many-lines + """This module contains Google AutoML operators.""" import ast from typing import Dict, List, Optional, Sequence, Tuple, Union @@ -280,7 +280,7 @@ class AutoMLBatchPredictOperator(BaseOperator): "impersonation_chain", ) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, model_id: str, @@ -577,7 +577,7 @@ class AutoMLTablesListColumnSpecsOperator(BaseOperator): "impersonation_chain", ) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, dataset_id: str, diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py index 9d482fdf3632c..f8d95599f51ab 100644 --- a/airflow/providers/google/cloud/operators/bigquery.py +++ b/airflow/providers/google/cloud/operators/bigquery.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-lines + """This module contains Google BigQuery operators.""" import enum import hashlib @@ -484,7 +484,6 @@ def execute(self, context) -> list: return table_data -# pylint: disable=too-many-instance-attributes class BigQueryExecuteQueryOperator(BaseOperator): """ Executes BigQuery SQL queries in a specific BigQuery database. @@ -604,7 +603,6 @@ def operator_extra_links(self): return (BigQueryConsoleLink(),) return (BigQueryConsoleIndexableLink(i) for i, _ in enumerate(self.sql)) - # pylint: disable=too-many-arguments, too-many-locals def __init__( self, *, @@ -882,7 +880,6 @@ class BigQueryCreateEmptyTableOperator(BaseOperator): template_fields_renderers = {"table_resource": "json", "materialized_view": "json"} ui_color = BigQueryUIColors.TABLE.value - # pylint: disable=too-many-arguments def __init__( self, *, @@ -969,7 +966,6 @@ def execute(self, context) -> None: self.log.info('Table %s.%s already exists.', self.dataset_id, self.table_id) -# pylint: disable=too-many-instance-attributes class BigQueryCreateExternalTableOperator(BaseOperator): """ Creates a new external table in the dataset with the data from Google Cloud @@ -1081,7 +1077,6 @@ class BigQueryCreateExternalTableOperator(BaseOperator): template_fields_renderers = {"table_resource": "json"} ui_color = BigQueryUIColors.TABLE.value - # pylint: disable=too-many-arguments,too-many-locals def __init__( self, *, @@ -2133,7 +2128,6 @@ def execute(self, context): ) -# pylint: disable=too-many-arguments class BigQueryInsertJobOperator(BaseOperator): """ Executes a BigQuery job. Waits for the job to complete and returns job id. diff --git a/airflow/providers/google/cloud/operators/bigtable.py b/airflow/providers/google/cloud/operators/bigtable.py index 7c97187b430b8..90b9d20f1a8c3 100644 --- a/airflow/providers/google/cloud/operators/bigtable.py +++ b/airflow/providers/google/cloud/operators/bigtable.py @@ -111,7 +111,7 @@ class BigtableCreateInstanceOperator(BaseOperator, BigtableValidationMixin): def __init__( self, - *, # pylint: disable=too-many-arguments + *, instance_id: str, main_cluster_id: str, main_cluster_zone: str, diff --git a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py index 6c803ef35a24f..ae920265134fd 100644 --- a/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +++ b/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py @@ -820,7 +820,7 @@ class CloudDataTransferServiceS3ToGCSOperator(BaseOperator): ) ui_color = '#e09411' - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, s3_bucket: str, @@ -989,7 +989,7 @@ class CloudDataTransferServiceGCSToGCSOperator(BaseOperator): ) ui_color = '#e09411' - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, source_bucket: str, diff --git a/airflow/providers/google/cloud/operators/datacatalog.py b/airflow/providers/google/cloud/operators/datacatalog.py index c33efa82eb9e7..59ae7c2b1d1f9 100644 --- a/airflow/providers/google/cloud/operators/datacatalog.py +++ b/airflow/providers/google/cloud/operators/datacatalog.py @@ -328,7 +328,7 @@ class CloudDataCatalogCreateTagOperator(BaseOperator): "impersonation_chain", ) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, location: str, @@ -1857,7 +1857,7 @@ class CloudDataCatalogUpdateEntryOperator(BaseOperator): "impersonation_chain", ) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, entry: Union[Dict, Entry], @@ -1968,7 +1968,7 @@ class CloudDataCatalogUpdateTagOperator(BaseOperator): "impersonation_chain", ) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, tag: Union[Dict, Tag], @@ -2200,7 +2200,7 @@ class CloudDataCatalogUpdateTagTemplateFieldOperator(BaseOperator): "impersonation_chain", ) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, tag_template_field: Union[Dict, TagTemplateField], diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/airflow/providers/google/cloud/operators/dataflow.py index cb31c11879ecb..c5ed3c38202fe 100644 --- a/airflow/providers/google/cloud/operators/dataflow.py +++ b/airflow/providers/google/cloud/operators/dataflow.py @@ -42,9 +42,9 @@ class CheckJobRunning(Enum): WaitForRun - wait for job to finish and then continue with new job """ - IgnoreJob = 1 # pylint: disable=invalid-name - FinishIfRunning = 2 # pylint: disable=invalid-name - WaitForRun = 3 # pylint: disable=invalid-name + IgnoreJob = 1 + FinishIfRunning = 2 + WaitForRun = 3 class DataflowConfiguration: @@ -169,7 +169,6 @@ def __init__( self.check_if_running = check_if_running -# pylint: disable=too-many-instance-attributes class DataflowCreateJavaJobOperator(BaseOperator): """ Start a Java Cloud Dataflow batch job. The parameters of the operation @@ -347,7 +346,6 @@ class DataflowCreateJavaJobOperator(BaseOperator): template_fields = ["options", "jar", "job_name"] ui_color = "#0273d4" - # pylint: disable=too-many-arguments def __init__( self, *, @@ -432,21 +430,17 @@ def set_current_job_id(job_id): with ExitStack() as exit_stack: if self.jar.lower().startswith("gs://"): gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to) - tmp_gcs_file = exit_stack.enter_context( # pylint: disable=no-member - gcs_hook.provide_file(object_url=self.jar) - ) + tmp_gcs_file = exit_stack.enter_context(gcs_hook.provide_file(object_url=self.jar)) self.jar = tmp_gcs_file.name is_running = False if self.check_if_running != CheckJobRunning.IgnoreJob: - is_running = ( - self.dataflow_hook.is_job_dataflow_running( # pylint: disable=no-value-for-parameter - name=self.job_name, - variables=pipeline_options, - ) + is_running = self.dataflow_hook.is_job_dataflow_running( + name=self.job_name, + variables=pipeline_options, ) while is_running and self.check_if_running == CheckJobRunning.WaitForRun: - # pylint: disable=no-value-for-parameter + is_running = self.dataflow_hook.is_job_dataflow_running( name=self.job_name, variables=pipeline_options, @@ -459,7 +453,7 @@ def set_current_job_id(job_id): job_class=self.job_class, process_line_callback=process_line_callback, ) - self.dataflow_hook.wait_for_done( # pylint: disable=no-value-for-parameter + self.dataflow_hook.wait_for_done( job_name=job_name, location=self.location, job_id=self.job_id, @@ -476,7 +470,6 @@ def on_kill(self) -> None: ) -# pylint: disable=too-many-instance-attributes class DataflowTemplatedJobStartOperator(BaseOperator): """ Start a Templated Cloud Dataflow job. The parameters of the operation @@ -633,7 +626,7 @@ class DataflowTemplatedJobStartOperator(BaseOperator): ] ui_color = "#0273d4" - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, template: str, @@ -926,7 +919,6 @@ def on_kill(self) -> None: self.hook.cancel_job(job_id=self.job_id, project_id=self.project_id) -# pylint: disable=too-many-instance-attributes class DataflowCreatePythonJobOperator(BaseOperator): """ Launching Cloud Dataflow jobs written in python. Note that both @@ -1046,7 +1038,7 @@ class DataflowCreatePythonJobOperator(BaseOperator): template_fields = ["options", "dataflow_default_options", "job_name", "py_file"] - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, py_file: str, @@ -1134,9 +1126,7 @@ def set_current_job_id(job_id): with ExitStack() as exit_stack: if self.py_file.lower().startswith("gs://"): gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to) - tmp_gcs_file = exit_stack.enter_context( # pylint: disable=no-member - gcs_hook.provide_file(object_url=self.py_file) - ) + tmp_gcs_file = exit_stack.enter_context(gcs_hook.provide_file(object_url=self.py_file)) self.py_file = tmp_gcs_file.name self.beam_hook.start_python_pipeline( @@ -1149,7 +1139,7 @@ def set_current_job_id(job_id): process_line_callback=process_line_callback, ) - self.dataflow_hook.wait_for_done( # pylint: disable=no-value-for-parameter + self.dataflow_hook.wait_for_done( job_name=job_name, location=self.location, job_id=self.job_id, diff --git a/airflow/providers/google/cloud/operators/datafusion.py b/airflow/providers/google/cloud/operators/datafusion.py index c8283a60ca8d2..0ba967322c560 100644 --- a/airflow/providers/google/cloud/operators/datafusion.py +++ b/airflow/providers/google/cloud/operators/datafusion.py @@ -789,7 +789,7 @@ class CloudDataFusionStartPipelineOperator(BaseOperator): "impersonation_chain", ) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, pipeline_name: str, diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py index fea5acc883b7a..56e88d28782e6 100644 --- a/airflow/providers/google/cloud/operators/dataproc.py +++ b/airflow/providers/google/cloud/operators/dataproc.py @@ -30,7 +30,7 @@ from google.api_core.exceptions import AlreadyExists, NotFound from google.api_core.retry import Retry, exponential_sleep_generator -from google.cloud.dataproc_v1beta2 import Cluster # pylint: disable=no-name-in-module +from google.cloud.dataproc_v1beta2 import Cluster from google.protobuf.duration_pb2 import Duration from google.protobuf.field_mask_pb2 import FieldMask @@ -86,7 +86,6 @@ def get_link(self, operator, dttm): ) -# pylint: disable=too-many-instance-attributes class ClusterGenerator: """ Create a new Dataproc Cluster. @@ -191,11 +190,10 @@ class ClusterGenerator: A duration in seconds. (If auto_delete_time is set this parameter will be ignored) :type auto_delete_ttl: int :param customer_managed_key: The customer-managed key used for disk encryption - ``projects/[PROJECT_STORING_KEYS]/locations/[LOCATION]/keyRings/[KEY_RING_NAME]/cryptoKeys/[KEY_NAME]`` # noqa # pylint: disable=line-too-long + ``projects/[PROJECT_STORING_KEYS]/locations/[LOCATION]/keyRings/[KEY_RING_NAME]/cryptoKeys/[KEY_NAME]`` # noqa :type customer_managed_key: str """ - # pylint: disable=too-many-arguments,too-many-locals def __init__( self, project_id: str, @@ -447,7 +445,6 @@ def make(self): return self._build_cluster_data() -# pylint: disable=too-many-instance-attributes class DataprocCreateClusterOperator(BaseOperator): """ Create a new cluster on Google Cloud Dataproc. The operator will wait until the @@ -524,7 +521,7 @@ class DataprocCreateClusterOperator(BaseOperator): operator_extra_links = (DataprocClusterLink(),) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, cluster_name: str, @@ -1747,7 +1744,7 @@ class DataprocInstantiateWorkflowTemplateOperator(BaseOperator): template_fields = ['template_id', 'impersonation_chain', 'request_id', 'parameters'] template_fields_renderers = {"parameters": "json"} - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, template_id: str, @@ -2067,7 +2064,7 @@ class DataprocUpdateClusterOperator(BaseOperator): template_fields = ('impersonation_chain', 'cluster_name') operator_extra_links = (DataprocClusterLink(),) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, location: str, diff --git a/airflow/providers/google/cloud/operators/datastore.py b/airflow/providers/google/cloud/operators/datastore.py index e300d7b35b85b..029696fee704c 100644 --- a/airflow/providers/google/cloud/operators/datastore.py +++ b/airflow/providers/google/cloud/operators/datastore.py @@ -79,7 +79,7 @@ class CloudDatastoreExportEntitiesOperator(BaseOperator): ] def __init__( - self, # pylint: disable=too-many-arguments + self, *, bucket: str, namespace: Optional[str] = None, diff --git a/airflow/providers/google/cloud/operators/dlp.py b/airflow/providers/google/cloud/operators/dlp.py index 42564d4d08b3a..7b348243e2e1a 100644 --- a/airflow/providers/google/cloud/operators/dlp.py +++ b/airflow/providers/google/cloud/operators/dlp.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=R0913, C0302 + """ This module contains various Google Cloud DLP operators which allow you to perform basic operations using diff --git a/airflow/providers/google/cloud/operators/mlengine.py b/airflow/providers/google/cloud/operators/mlengine.py index b8de5253805c9..047a39e770ec6 100644 --- a/airflow/providers/google/cloud/operators/mlengine.py +++ b/airflow/providers/google/cloud/operators/mlengine.py @@ -62,7 +62,6 @@ def _normalize_mlengine_job_id(job_id: str) -> str: return cleansed_job_id -# pylint: disable=too-many-instance-attributes class MLEngineStartBatchPredictionJobOperator(BaseOperator): """ Start a Google Cloud ML Engine prediction job. @@ -177,7 +176,7 @@ class MLEngineStartBatchPredictionJobOperator(BaseOperator): ] def __init__( - self, # pylint: disable=too-many-arguments + self, *, job_id: str, region: str, @@ -1057,7 +1056,6 @@ def get_link(self, operator, dttm): return console_link -# pylint: disable=too-many-instance-attributes class MLEngineStartTrainingJobOperator(BaseOperator): """ Operator for launching a MLEngine training job. @@ -1157,7 +1155,7 @@ class MLEngineStartTrainingJobOperator(BaseOperator): operator_extra_links = (AIPlatformConsoleLink(),) def __init__( - self, # pylint: disable=too-many-arguments + self, *, job_id: str, region: str, diff --git a/airflow/providers/google/cloud/operators/pubsub.py b/airflow/providers/google/cloud/operators/pubsub.py index ebef1b0b480ad..f590256317b88 100644 --- a/airflow/providers/google/cloud/operators/pubsub.py +++ b/airflow/providers/google/cloud/operators/pubsub.py @@ -124,7 +124,6 @@ class PubSubCreateTopicOperator(BaseOperator): ] ui_color = '#0273d4' - # pylint: disable=too-many-arguments def __init__( self, *, @@ -189,7 +188,6 @@ def execute(self, context) -> None: self.log.info("Created topic %s", self.topic) -# pylint: disable=too-many-instance-attributes class PubSubCreateSubscriptionOperator(BaseOperator): """Create a PubSub subscription. @@ -343,7 +341,6 @@ class PubSubCreateSubscriptionOperator(BaseOperator): ] ui_color = '#0273d4' - # pylint: disable=too-many-arguments, too-many-locals def __init__( self, *, @@ -939,7 +936,7 @@ def execute(self, context) -> list: def _default_message_callback( self, pulled_messages: List[ReceivedMessage], - context: Dict[str, Any], # pylint: disable=unused-argument + context: Dict[str, Any], ) -> list: """ This method can be overridden by subclasses or by `messages_callback` constructor argument. diff --git a/airflow/providers/google/cloud/operators/stackdriver.py b/airflow/providers/google/cloud/operators/stackdriver.py index 672cf38f27498..96fb7b658e2da 100644 --- a/airflow/providers/google/cloud/operators/stackdriver.py +++ b/airflow/providers/google/cloud/operators/stackdriver.py @@ -91,7 +91,6 @@ class StackdriverListAlertPoliciesOperator(BaseOperator): ) ui_color = "#e5ffcc" - # pylint: disable=too-many-arguments def __init__( self, *, @@ -576,7 +575,6 @@ class StackdriverListNotificationChannelsOperator(BaseOperator): ui_color = "#e5ffcc" - # pylint: disable=too-many-arguments def __init__( self, *, diff --git a/airflow/providers/google/cloud/operators/tasks.py b/airflow/providers/google/cloud/operators/tasks.py index 1d187866e6a45..895dcbdc4ebf0 100644 --- a/airflow/providers/google/cloud/operators/tasks.py +++ b/airflow/providers/google/cloud/operators/tasks.py @@ -784,7 +784,7 @@ class CloudTasksTaskCreateOperator(BaseOperator): "impersonation_chain", ) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, location: str, diff --git a/airflow/providers/google/cloud/operators/workflows.py b/airflow/providers/google/cloud/operators/workflows.py index c7fc96d844d8f..8b1b49ca428de 100644 --- a/airflow/providers/google/cloud/operators/workflows.py +++ b/airflow/providers/google/cloud/operators/workflows.py @@ -24,12 +24,8 @@ import pytz from google.api_core.exceptions import AlreadyExists from google.api_core.retry import Retry - -# pylint: disable=no-name-in-module from google.cloud.workflows.executions_v1beta import Execution from google.cloud.workflows_v1beta import Workflow - -# pylint: enable=no-name-in-module from google.protobuf.field_mask_pb2 import FieldMask from airflow.models import BaseOperator diff --git a/airflow/providers/google/cloud/secrets/secret_manager.py b/airflow/providers/google/cloud/secrets/secret_manager.py index 60c00276515e1..919927fae8ecf 100644 --- a/airflow/providers/google/cloud/secrets/secret_manager.py +++ b/airflow/providers/google/cloud/secrets/secret_manager.py @@ -24,7 +24,7 @@ from cached_property import cached_property from airflow.exceptions import AirflowException -from airflow.providers.google.cloud._internal_client.secret_manager_client import _SecretManagerClient # noqa +from airflow.providers.google.cloud._internal_client.secret_manager_client import _SecretManagerClient from airflow.providers.google.cloud.utils.credentials_provider import get_credentials_and_project_id from airflow.secrets import BaseSecretsBackend from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/google/cloud/sensors/dataproc.py b/airflow/providers/google/cloud/sensors/dataproc.py index b4a05472b7060..c02a076927625 100644 --- a/airflow/providers/google/cloud/sensors/dataproc.py +++ b/airflow/providers/google/cloud/sensors/dataproc.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. """This module contains a Dataproc Job sensor.""" -# pylint: disable=C0302 + from google.cloud.dataproc_v1beta2.types import JobStatus diff --git a/airflow/providers/google/cloud/sensors/gcs.py b/airflow/providers/google/cloud/sensors/gcs.py index 69f4fd89fee5e..b715022b32dc8 100644 --- a/airflow/providers/google/cloud/sensors/gcs.py +++ b/airflow/providers/google/cloud/sensors/gcs.py @@ -65,7 +65,7 @@ def __init__( self, *, bucket: str, - object: str, # pylint: disable=redefined-builtin + object: str, google_cloud_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, impersonation_chain: Optional[Union[str, Sequence[str]]] = None, @@ -139,7 +139,7 @@ class GCSObjectUpdateSensor(BaseSensorOperator): def __init__( self, bucket: str, - object: str, # pylint: disable=redefined-builtin + object: str, ts_func: Callable = ts_function, google_cloud_conn_id: str = 'google_cloud_default', delegate_to: Optional[str] = None, diff --git a/airflow/providers/google/cloud/sensors/pubsub.py b/airflow/providers/google/cloud/sensors/pubsub.py index 4945e063b5c8c..49c9547be8041 100644 --- a/airflow/providers/google/cloud/sensors/pubsub.py +++ b/airflow/providers/google/cloud/sensors/pubsub.py @@ -186,7 +186,7 @@ def poke(self, context: dict) -> bool: def _default_message_callback( self, pulled_messages: List[ReceivedMessage], - context: Dict[str, Any], # pylint: disable=unused-argument + context: Dict[str, Any], ): """ This method can be overridden by subclasses or by `messages_callback` constructor argument. diff --git a/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py b/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py index 4ee2c0772a5c5..d628a9dc58e89 100644 --- a/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py @@ -127,7 +127,6 @@ def execute(self, context): dest_gcs_bucket, dest_gcs_object_prefix = _parse_gcs_url(self.dest_gcs) - # pylint: disable=too-many-nested-blocks if not self.replace: # if we are not replacing -> list all files in the GCS bucket # and only keep those files which are present in diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py b/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py index c0e3b6a6b11aa..2695a6a03cbf0 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py @@ -87,7 +87,7 @@ class BigQueryToBigQueryOperator(BaseOperator): def __init__( self, - *, # pylint: disable=too-many-arguments + *, source_project_dataset_tables: Union[List[str], str], destination_project_dataset_table: str, write_disposition: str = 'WRITE_EMPTY', diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py b/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py index e0ead8cd79b83..af5828d753520 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py @@ -87,7 +87,7 @@ class BigQueryToGCSOperator(BaseOperator): def __init__( self, - *, # pylint: disable=too-many-arguments + *, source_project_dataset_table: str, destination_cloud_storage_uris: List[str], compression: str = 'NONE', diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py index 6acf0828345fd..6adfd5e792c71 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py @@ -88,7 +88,7 @@ class BigQueryToMySqlOperator(BaseOperator): def __init__( self, - *, # pylint: disable=too-many-arguments + *, dataset_table: str, mysql_table: str, selected_fields: Optional[str] = None, diff --git a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py index e7d0bf4585220..2a383c702a177 100644 --- a/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py @@ -98,7 +98,7 @@ class CassandraToGCSOperator(BaseOperator): def __init__( self, - *, # pylint: disable=too-many-arguments + *, cql: str, bucket: str, filename: str, @@ -183,7 +183,6 @@ def execute(self, context: Dict[str, str]): # Close all sessions and connection associated with this Cassandra cluster hook.shutdown_cluster() - # pylint: disable=consider-using-with def _write_local_data_files(self, cursor): """ Takes a cursor, and writes results to a local file. @@ -210,7 +209,6 @@ def _write_local_data_files(self, cursor): return tmp_file_handles - # pylint: disable=consider-using-with def _write_local_schema_file(self, cursor): """ Takes a cursor, and writes the BigQuery schema for the results to a @@ -251,9 +249,7 @@ def generate_data_dict(cls, names: Iterable[str], values: Any) -> Dict[str, Any] return {n: cls.convert_value(v) for n, v in zip(names, values)} @classmethod - def convert_value( # pylint: disable=too-many-return-statements - cls, value: Optional[Any] - ) -> Optional[Any]: + def convert_value(cls, value: Optional[Any]) -> Optional[Any]: """Convert value to BQ type.""" if not value: return value diff --git a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py index b9251100ed999..d454396cfb507 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py @@ -26,7 +26,6 @@ from airflow.providers.google.cloud.hooks.gcs import GCSHook -# pylint: disable=too-many-instance-attributes class GCSToBigQueryOperator(BaseOperator): """ Loads files from Google Cloud Storage into BigQuery. @@ -172,7 +171,6 @@ class GCSToBigQueryOperator(BaseOperator): template_ext = ('.sql',) ui_color = '#f0eee4' - # pylint: disable=too-many-locals,too-many-arguments def __init__( self, *, diff --git a/airflow/providers/google/cloud/transfers/gcs_to_gcs.py b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py index c7af735a7d3b6..9ab43704792c4 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py @@ -186,7 +186,7 @@ class GCSToGCSOperator(BaseOperator): def __init__( self, - *, # pylint: disable=too-many-arguments + *, source_bucket, source_object=None, source_objects=None, diff --git a/airflow/providers/google/cloud/transfers/gcs_to_local.py b/airflow/providers/google/cloud/transfers/gcs_to_local.py index a76502691aaef..74ddcb07fcfde 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_local.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_local.py @@ -118,8 +118,8 @@ def __init__( super().__init__(**kwargs) self.bucket = bucket self.object_name = object_name - self.filename = filename # noqa - self.store_to_xcom_key = store_to_xcom_key # noqa + self.filename = filename + self.store_to_xcom_key = store_to_xcom_key self.gcp_conn_id = gcp_conn_id self.delegate_to = delegate_to self.impersonation_chain = impersonation_chain diff --git a/airflow/providers/google/cloud/transfers/gcs_to_sftp.py b/airflow/providers/google/cloud/transfers/gcs_to_sftp.py index 11edc54051e1e..3504a891cdb88 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_sftp.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_sftp.py @@ -109,7 +109,6 @@ class GCSToSFTPOperator(BaseOperator): ) ui_color = "#f0eee4" - # pylint: disable=too-many-arguments def __init__( self, *, diff --git a/airflow/providers/google/cloud/transfers/oracle_to_gcs.py b/airflow/providers/google/cloud/transfers/oracle_to_gcs.py index d4ede0e41bef3..462047be5bebf 100644 --- a/airflow/providers/google/cloud/transfers/oracle_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/oracle_to_gcs.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=c-extension-no-member + import base64 import calendar from datetime import date, datetime, timedelta diff --git a/airflow/providers/google/cloud/transfers/s3_to_gcs.py b/airflow/providers/google/cloud/transfers/s3_to_gcs.py index 81cd6c48b4c9c..336255ff76b8c 100644 --- a/airflow/providers/google/cloud/transfers/s3_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/s3_to_gcs.py @@ -109,7 +109,6 @@ class S3ToGCSOperator(S3ListOperator): ) ui_color = '#e09411' - # pylint: disable=too-many-arguments def __init__( self, *, @@ -167,7 +166,6 @@ def execute(self, context): impersonation_chain=self.google_impersonation_chain, ) - # pylint: disable=too-many-nested-blocks if not self.replace: # if we are not replacing -> list all files in the GCS bucket # and only keep those files which are present in diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py b/airflow/providers/google/cloud/transfers/sql_to_gcs.py index 975a845d62a02..450c3beef0131 100644 --- a/airflow/providers/google/cloud/transfers/sql_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/sql_to_gcs.py @@ -102,7 +102,7 @@ class BaseSQLToGCSOperator(BaseOperator): def __init__( self, - *, # pylint: disable=too-many-arguments + *, sql: str, bucket: str, filename: str, @@ -184,7 +184,7 @@ def _write_local_data_files(self, cursor): schema = list(map(lambda schema_tuple: schema_tuple[0], cursor.description)) col_type_dict = self._get_col_type_dict() file_no = 0 - # pylint: disable=consider-using-with + tmp_file_handle = NamedTemporaryFile(delete=True) if self.export_format == 'csv': file_mime_type = 'text/csv' @@ -235,7 +235,7 @@ def _write_local_data_files(self, cursor): # Stop if the file exceeds the file size limit. if tmp_file_handle.tell() >= self.approx_max_file_size_bytes: file_no += 1 - # pylint: disable=consider-using-with + tmp_file_handle = NamedTemporaryFile(delete=True) files_to_upload.append( { @@ -339,7 +339,6 @@ def _write_local_schema_file(self, cursor): self.log.info('Using schema for %s', self.schema_filename) self.log.debug("Current schema: %s", schema) - # pylint: disable=consider-using-with tmp_schema_file_handle = NamedTemporaryFile(delete=True) tmp_schema_file_handle.write(schema.encode('utf-8')) schema_file_to_upload = { diff --git a/airflow/providers/google/cloud/utils/credentials_provider.py b/airflow/providers/google/cloud/utils/credentials_provider.py index e39b0a814a7d0..414c9c145588f 100644 --- a/airflow/providers/google/cloud/utils/credentials_provider.py +++ b/airflow/providers/google/cloud/utils/credentials_provider.py @@ -157,17 +157,13 @@ def provide_gcp_conn_and_credentials( """ with ExitStack() as stack: if key_file_path: - stack.enter_context( # type; ignore # pylint: disable=no-member - provide_gcp_credentials(key_file_path) - ) + stack.enter_context(provide_gcp_credentials(key_file_path)) # type; ignore if project_id: - stack.enter_context( # type; ignore # pylint: disable=no-member + stack.enter_context( # type; ignore patch_environ({PROJECT: project_id, LEGACY_PROJECT: project_id}) ) - stack.enter_context( # type; ignore # pylint: disable=no-member - provide_gcp_connection(key_file_path, scopes, project_id) - ) + stack.enter_context(provide_gcp_connection(key_file_path, scopes, project_id)) # type; ignore yield @@ -206,8 +202,7 @@ def __init__( self, key_path: Optional[str] = None, keyfile_dict: Optional[Dict[str, str]] = None, - # See: https://github.com/PyCQA/pylint/issues/2377 - scopes: Optional[Collection[str]] = None, # pylint: disable=unsubscriptable-object + scopes: Optional[Collection[str]] = None, delegate_to: Optional[str] = None, disable_logging: bool = False, target_principal: Optional[str] = None, diff --git a/airflow/providers/google/cloud/utils/field_sanitizer.py b/airflow/providers/google/cloud/utils/field_sanitizer.py index 53d2b26d5db16..3065b24f01184 100644 --- a/airflow/providers/google/cloud/utils/field_sanitizer.py +++ b/airflow/providers/google/cloud/utils/field_sanitizer.py @@ -123,7 +123,7 @@ def __init__(self, sanitize_specs: List[str]) -> None: def _sanitize(self, dictionary, remaining_field_spec, current_path): field_split = remaining_field_spec.split(".", 1) - if len(field_split) == 1: # pylint: disable=too-many-nested-blocks + if len(field_split) == 1: field_name = field_split[0] if field_name in dictionary: self.log.info("Deleted %s [%s]", field_name, current_path) diff --git a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py b/airflow/providers/google/cloud/utils/mlengine_operator_utils.py index bd292c9f4f3ea..bff950da5a0c4 100644 --- a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py +++ b/airflow/providers/google/cloud/utils/mlengine_operator_utils.py @@ -34,10 +34,10 @@ from airflow.providers.google.cloud.operators.dataflow import DataflowCreatePythonJobOperator from airflow.providers.google.cloud.operators.mlengine import MLEngineStartBatchPredictionJobOperator -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) -def create_evaluate_ops( # pylint: disable=too-many-arguments +def create_evaluate_ops( task_prefix: str, data_format: str, input_paths: List[str], diff --git a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py b/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py index d95d8a6a785c1..a3359e290ca21 100644 --- a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py +++ b/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py @@ -1,4 +1,3 @@ -# flake8: noqa: F841 # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -108,7 +107,7 @@ def metric_fn(inst): import os import apache_beam as beam -import dill # pylint: disable=wrong-import-order +import dill class JsonCoder: @@ -126,7 +125,7 @@ def decode(x): @beam.ptransform_fn -def MakeSummary(pcoll, metric_fn, metric_keys): # pylint: disable=invalid-name +def MakeSummary(pcoll, metric_fn, metric_keys): """Summary PTransform used in Dataflow.""" return ( pcoll @@ -183,7 +182,7 @@ def run(argv=None): metric_keys = known_args.metric_keys.split(",") with beam.Pipeline(options=beam.pipeline.PipelineOptions(pipeline_args)) as pipe: - # pylint: disable=no-value-for-parameter + prediction_result_pattern = os.path.join(known_args.prediction_path, "prediction.results-*-of-*") prediction_summary_path = os.path.join(known_args.prediction_path, "prediction.summary.json") # This is apache-beam ptransform's convention diff --git a/airflow/providers/google/common/auth_backend/google_openid.py b/airflow/providers/google/common/auth_backend/google_openid.py index 8ce6ef5715433..496ac29616686 100644 --- a/airflow/providers/google/common/auth_backend/google_openid.py +++ b/airflow/providers/google/common/auth_backend/google_openid.py @@ -105,7 +105,7 @@ def _set_current_user(user): ctx.user = user -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def requires_authentication(function: T): diff --git a/airflow/providers/google/common/hooks/base_google.py b/airflow/providers/google/common/hooks/base_google.py index 03cbebcac2ed6..f6e11a279cd24 100644 --- a/airflow/providers/google/common/hooks/base_google.py +++ b/airflow/providers/google/common/hooks/base_google.py @@ -97,22 +97,22 @@ def is_operation_in_progress_exception(exception: Exception) -> bool: return False -class retry_if_temporary_quota(tenacity.retry_if_exception): # pylint: disable=invalid-name +class retry_if_temporary_quota(tenacity.retry_if_exception): """Retries if there was an exception for exceeding the temporary quote limit.""" def __init__(self): super().__init__(is_soft_quota_exception) -class retry_if_operation_in_progress(tenacity.retry_if_exception): # pylint: disable=invalid-name +class retry_if_operation_in_progress(tenacity.retry_if_exception): """Retries if there was an exception for exceeding the temporary quote limit.""" def __init__(self): super().__init__(is_operation_in_progress_exception) -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name -RT = TypeVar('RT') # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) +RT = TypeVar('RT') class GoogleBaseHook(BaseHook): @@ -270,7 +270,7 @@ def _get_credentials_email(self) -> str: http_authorized = self._authorize() oauth2_client = discovery.build('oauth2', "v1", http=http_authorized, cache_discovery=False) - return oauth2_client.tokeninfo().execute()['email'] # pylint: disable=no-member + return oauth2_client.tokeninfo().execute()['email'] def _authorize(self) -> google_auth_httplib2.AuthorizedHttp: """ @@ -453,12 +453,8 @@ def provide_gcp_credential_file_as_context(self): It can be used to provide credentials for external programs (e.g. gcloud) that expect authorization file in ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable. """ - key_path = self._get_field( - 'key_path', None - ) # type: Optional[str] # noqa: E501 # pylint: disable=protected-access - keyfile_dict = self._get_field( - 'keyfile_dict', None - ) # type: Optional[Dict] # noqa: E501 # pylint: disable=protected-access + key_path = self._get_field('key_path', None) # type: Optional[str] # + keyfile_dict = self._get_field('keyfile_dict', None) # type: Optional[Dict] if key_path and keyfile_dict: raise AirflowException( "The `keyfile_dict` and `key_path` fields are mutually exclusive. " diff --git a/airflow/providers/google/firebase/hooks/firestore.py b/airflow/providers/google/firebase/hooks/firestore.py index 86f2c3fe3ea7f..59b9ad968dada 100644 --- a/airflow/providers/google/firebase/hooks/firestore.py +++ b/airflow/providers/google/firebase/hooks/firestore.py @@ -86,9 +86,7 @@ def get_conn(self): # > Request contains an invalid argument. # At the same time, the Non-Authorized Client has no problems. non_authorized_conn = build("firestore", self.api_version, cache_discovery=False) - self._conn = build_from_document( - non_authorized_conn._rootDesc, http=http_authorized # pylint: disable=protected-access - ) + self._conn = build_from_document(non_authorized_conn._rootDesc, http=http_authorized) return self._conn @GoogleBaseHook.fallback_to_default_project_id @@ -113,7 +111,7 @@ def export_documents( name = f"projects/{project_id}/databases/{database_id}" operation = ( - service.projects() # pylint: disable=no-member + service.projects() .databases() .exportDocuments(name=name, body=body) .execute(num_retries=self.num_retries) @@ -135,7 +133,7 @@ def _wait_for_operation_to_complete(self, operation_name: str) -> None: service = self.get_conn() while True: operation_response = ( - service.projects() # pylint: disable=no-member + service.projects() .databases() .operations() .get(name=operation_name) diff --git a/airflow/providers/google/marketing_platform/hooks/analytics.py b/airflow/providers/google/marketing_platform/hooks/analytics.py index 70b6979bfe224..26a6b6c79902d 100644 --- a/airflow/providers/google/marketing_platform/hooks/analytics.py +++ b/airflow/providers/google/marketing_platform/hooks/analytics.py @@ -36,7 +36,7 @@ def _paginate(self, resource: Resource, list_args: Optional[Dict[str, Any]] = No result: List[dict] = [] while True: # start index has value 1 - request = resource.list(start_index=len(result) + 1, **list_args) # pylint: disable=no-member + request = resource.list(start_index=len(result) + 1, **list_args) response = request.execute(num_retries=self.num_retries) result.extend(response.get("items", [])) # result is the number of fetched links from Analytics @@ -62,7 +62,7 @@ def list_accounts(self) -> List[Dict[str, Any]]: """Lists accounts list from Google Analytics 360.""" self.log.info("Retrieving accounts list...") conn = self.get_conn() - accounts = conn.management().accounts() # pylint: disable=no-member + accounts = conn.management().accounts() result = self._paginate(accounts) return result @@ -84,7 +84,7 @@ def get_ad_words_link( """ self.log.info("Retrieving ad words links...") ad_words_link = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .management() .webPropertyAdWordsLinks() .get( @@ -110,7 +110,7 @@ def list_ad_words_links(self, account_id: str, web_property_id: str) -> List[Dic """ self.log.info("Retrieving ad words list...") conn = self.get_conn() - ads_links = conn.management().webPropertyAdWordsLinks() # pylint: disable=no-member + ads_links = conn.management().webPropertyAdWordsLinks() list_args = {"accountId": account_id, "webPropertyId": web_property_id} result = self._paginate(ads_links, list_args) return result @@ -151,7 +151,7 @@ def upload_data( custom_data_source_id, ) - self.get_conn().management().uploads().uploadData( # pylint: disable=no-member + self.get_conn().management().uploads().uploadData( accountId=account_id, webPropertyId=web_property_id, customDataSourceId=custom_data_source_id, @@ -185,7 +185,7 @@ def delete_upload_data( custom_data_source_id, ) - self.get_conn().management().uploads().deleteUploadData( # pylint: disable=no-member + self.get_conn().management().uploads().deleteUploadData( accountId=account_id, webPropertyId=web_property_id, customDataSourceId=custom_data_source_id, @@ -210,7 +210,7 @@ def list_uploads(self, account_id, web_property_id, custom_data_source_id) -> Li custom_data_source_id, ) - uploads = self.get_conn().management().uploads() # pylint: disable=no-member + uploads = self.get_conn().management().uploads() list_args = { "accountId": account_id, "webPropertyId": web_property_id, diff --git a/airflow/providers/google/marketing_platform/hooks/campaign_manager.py b/airflow/providers/google/marketing_platform/hooks/campaign_manager.py index 280cd1cc9a8f4..5f6d25d33d089 100644 --- a/airflow/providers/google/marketing_platform/hooks/campaign_manager.py +++ b/airflow/providers/google/marketing_platform/hooks/campaign_manager.py @@ -66,7 +66,7 @@ def delete_report(self, profile_id: str, report_id: str) -> Any: :type report_id: str """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .reports() .delete(profileId=profile_id, reportId=report_id) .execute(num_retries=self.num_retries) @@ -83,7 +83,7 @@ def insert_report(self, profile_id: str, report: Dict[str, Any]) -> Any: :type report: Dict[str, Any] """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .reports() .insert(profileId=profile_id, body=report) .execute(num_retries=self.num_retries) @@ -114,7 +114,7 @@ def list_reports( """ reports: List[dict] = [] conn = self.get_conn() - request = conn.reports().list( # pylint: disable=no-member + request = conn.reports().list( profileId=profile_id, maxResults=max_results, scope=scope, @@ -124,9 +124,7 @@ def list_reports( while request is not None: response = request.execute(num_retries=self.num_retries) reports.extend(response.get("items", [])) - request = conn.reports().list_next( # pylint: disable=no-member - previous_request=request, previous_response=response - ) + request = conn.reports().list_next(previous_request=request, previous_response=response) return reports @@ -143,7 +141,7 @@ def patch_report(self, profile_id: str, report_id: str, update_mask: dict) -> An :type update_mask: Dict """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .reports() .patch(profileId=profile_id, reportId=report_id, body=update_mask) .execute(num_retries=self.num_retries) @@ -162,7 +160,7 @@ def run_report(self, profile_id: str, report_id: str, synchronous: Optional[bool :type synchronous: Optional[bool] """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .reports() .run(profileId=profile_id, reportId=report_id, synchronous=synchronous) .execute(num_retries=self.num_retries) @@ -179,7 +177,7 @@ def update_report(self, profile_id: str, report_id: str) -> Any: :type report_id: str """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .reports() .update(profileId=profile_id, reportId=report_id) .execute(num_retries=self.num_retries) @@ -198,7 +196,7 @@ def get_report(self, file_id: str, profile_id: str, report_id: str) -> Any: :type file_id: str """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .reports() .files() .get(fileId=file_id, profileId=profile_id, reportId=report_id) @@ -219,7 +217,7 @@ def get_report_file(self, file_id: str, profile_id: str, report_id: str) -> http :return: googleapiclient.http.HttpRequest """ request = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .reports() .files() .get_media(fileId=file_id, profileId=profile_id, reportId=report_id) @@ -275,7 +273,7 @@ def conversions_batch_insert( :type max_failed_inserts: int """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .conversions() .batchinsert( profileId=profile_id, @@ -325,7 +323,7 @@ def conversions_batch_update( :type max_failed_updates: int """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .conversions() .batchupdate( profileId=profile_id, diff --git a/airflow/providers/google/marketing_platform/hooks/display_video.py b/airflow/providers/google/marketing_platform/hooks/display_video.py index 4cb8c36d2e09f..498a8bee043c7 100644 --- a/airflow/providers/google/marketing_platform/hooks/display_video.py +++ b/airflow/providers/google/marketing_platform/hooks/display_video.py @@ -95,12 +95,7 @@ def create_query(self, query: Dict[str, Any]) -> dict: :param query: Query object to be passed to request body. :type query: Dict[str, Any] """ - response = ( - self.get_conn() # pylint: disable=no-member - .queries() - .createquery(body=query) - .execute(num_retries=self.num_retries) - ) + response = self.get_conn().queries().createquery(body=query).execute(num_retries=self.num_retries) return response def delete_query(self, query_id: str) -> None: @@ -110,12 +105,7 @@ def delete_query(self, query_id: str) -> None: :param query_id: Query ID to delete. :type query_id: str """ - ( - self.get_conn() # pylint: disable=no-member - .queries() - .deletequery(queryId=query_id) - .execute(num_retries=self.num_retries) - ) + (self.get_conn().queries().deletequery(queryId=query_id).execute(num_retries=self.num_retries)) def get_query(self, query_id: str) -> dict: """ @@ -124,24 +114,14 @@ def get_query(self, query_id: str) -> dict: :param query_id: Query ID to retrieve. :type query_id: str """ - response = ( - self.get_conn() # pylint: disable=no-member - .queries() - .getquery(queryId=query_id) - .execute(num_retries=self.num_retries) - ) + response = self.get_conn().queries().getquery(queryId=query_id).execute(num_retries=self.num_retries) return response def list_queries( self, ) -> List[Dict]: """Retrieves stored queries.""" - response = ( - self.get_conn() # pylint: disable=no-member - .queries() - .listqueries() - .execute(num_retries=self.num_retries) - ) + response = self.get_conn().queries().listqueries().execute(num_retries=self.num_retries) return response.get('queries', []) def run_query(self, query_id: str, params: Dict[str, Any]) -> None: @@ -154,7 +134,7 @@ def run_query(self, query_id: str, params: Dict[str, Any]) -> None: :type params: Dict[str, Any] """ ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .queries() .runquery(queryId=query_id, body=params) .execute(num_retries=self.num_retries) @@ -176,7 +156,7 @@ def upload_line_items(self, line_items: Any) -> List[Dict[str, Any]]: } response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .lineitems() .uploadlineitems(body=request_body) .execute(num_retries=self.num_retries) @@ -193,7 +173,7 @@ def download_line_items(self, request_body: Dict[str, Any]) -> List[Any]: :type request_body: Dict[str, Any] """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .lineitems() .downloadlineitems(body=request_body) .execute(num_retries=self.num_retries) @@ -211,7 +191,7 @@ def create_sdf_download_operation(self, body_request: Dict[str, Any]) -> Dict[st https://developers.google.com/display-video/api/reference/rest/v1/sdfdownloadtasks/create """ result = ( - self.get_conn_to_display_video() # pylint: disable=no-member + self.get_conn_to_display_video() .sdfdownloadtasks() .create(body=body_request) .execute(num_retries=self.num_retries) @@ -226,7 +206,7 @@ def get_sdf_download_operation(self, operation_name: str): :type operation_name: str """ result = ( - self.get_conn_to_display_video() # pylint: disable=no-member + self.get_conn_to_display_video() .sdfdownloadtasks() .operations() .get(name=operation_name) @@ -241,9 +221,5 @@ def download_media(self, resource_name: str): :param resource_name: of the media that is being downloaded. :type resource_name: str """ - request = ( - self.get_conn_to_display_video() # pylint: disable=no-member - .media() - .download_media(resource_name=resource_name) - ) + request = self.get_conn_to_display_video().media().download_media(resource_name=resource_name) return request diff --git a/airflow/providers/google/marketing_platform/hooks/search_ads.py b/airflow/providers/google/marketing_platform/hooks/search_ads.py index 760ab927f90ff..57b8bca1c503f 100644 --- a/airflow/providers/google/marketing_platform/hooks/search_ads.py +++ b/airflow/providers/google/marketing_platform/hooks/search_ads.py @@ -61,12 +61,7 @@ def insert_report(self, report: Dict[str, Any]) -> Any: :param report: Report to be generated. :type report: Dict[str, Any] """ - response = ( - self.get_conn() # pylint: disable=no-member - .reports() - .request(body=report) - .execute(num_retries=self.num_retries) - ) + response = self.get_conn().reports().request(body=report).execute(num_retries=self.num_retries) return response def get(self, report_id: str) -> Any: @@ -76,12 +71,7 @@ def get(self, report_id: str) -> Any: :param report_id: ID of the report request being polled. :type report_id: str """ - response = ( - self.get_conn() # pylint: disable=no-member - .reports() - .get(reportId=report_id) - .execute(num_retries=self.num_retries) - ) + response = self.get_conn().reports().get(reportId=report_id).execute(num_retries=self.num_retries) return response def get_file(self, report_fragment: int, report_id: str) -> Any: @@ -94,7 +84,7 @@ def get_file(self, report_fragment: int, report_id: str) -> Any: :type report_id: str """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .reports() .getFile(reportFragment=report_fragment, reportId=report_id) .execute(num_retries=self.num_retries) diff --git a/airflow/providers/google/marketing_platform/operators/campaign_manager.py b/airflow/providers/google/marketing_platform/operators/campaign_manager.py index 6a77d9fb3bd07..ccca975729209 100644 --- a/airflow/providers/google/marketing_platform/operators/campaign_manager.py +++ b/airflow/providers/google/marketing_platform/operators/campaign_manager.py @@ -181,7 +181,7 @@ class GoogleCampaignManagerDownloadReportOperator(BaseOperator): "impersonation_chain", ) - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, profile_id: str, diff --git a/airflow/providers/google/suite/hooks/drive.py b/airflow/providers/google/suite/hooks/drive.py index ad2653b96da2e..7eac490dc85b0 100644 --- a/airflow/providers/google/suite/hooks/drive.py +++ b/airflow/providers/google/suite/hooks/drive.py @@ -89,7 +89,7 @@ def _ensure_folders_exists(self, path: str) -> str: f"'{current_parent}' in parents", ] result = ( - service.files() # pylint: disable=no-member + service.files() .list(q=" and ".join(conditions), spaces="drive", fields="files(id, name)") .execute(num_retries=self.num_retries) ) @@ -111,7 +111,7 @@ def _ensure_folders_exists(self, path: str) -> str: "parents": [current_parent], } file = ( - service.files() # pylint: disable=no-member + service.files() .create(body=file_metadata, fields="id") .execute(num_retries=self.num_retries) ) @@ -131,7 +131,7 @@ def get_media_request(self, file_id: str) -> HttpRequest: :rtype: HttpRequest """ service = self.get_conn() - request = service.files().get_media(fileId=file_id) # pylint: disable=no-member + request = service.files().get_media(fileId=file_id) return request def exists(self, folder_id: str, file_name: str, drive_id: Optional[str] = None): @@ -168,7 +168,7 @@ def get_file_id(self, folder_id: str, file_name: str, drive_id: Optional[str] = service = self.get_conn() if drive_id: files = ( - service.files() # pylint: disable=no-member + service.files() .list( q=query, spaces="drive", @@ -183,7 +183,7 @@ def get_file_id(self, folder_id: str, file_name: str, drive_id: Optional[str] = ) else: files = ( - service.files() # pylint: disable=no-member + service.files() .list(q=query, spaces="drive", fields="files(id, mimeType)", orderBy="modifiedTime desc") .execute(num_retries=self.num_retries) ) @@ -213,7 +213,7 @@ def upload_file(self, local_location: str, remote_location: str) -> str: file_metadata = {"name": file_name, "parents": [parent]} media = MediaFileUpload(local_location) file = ( - service.files() # pylint: disable=no-member + service.files() .create(body=file_metadata, media_body=media, fields="id") .execute(num_retries=self.num_retries) ) diff --git a/airflow/providers/google/suite/hooks/sheets.py b/airflow/providers/google/suite/hooks/sheets.py index 2c5723189a77f..ce4218c36fbd5 100644 --- a/airflow/providers/google/suite/hooks/sheets.py +++ b/airflow/providers/google/suite/hooks/sheets.py @@ -110,7 +110,7 @@ def get_values( :rtype: List """ service = self.get_conn() - # pylint: disable=no-member + response = ( service.spreadsheets() .values() @@ -155,7 +155,7 @@ def batch_get_values( :rtype: Dict """ service = self.get_conn() - # pylint: disable=no-member + response = ( service.spreadsheets() .values() @@ -212,7 +212,7 @@ def update_values( """ service = self.get_conn() body = {"range": range_, "majorDimension": major_dimension, "values": values} - # pylint: disable=no-member + response = ( service.spreadsheets() .values() @@ -289,7 +289,7 @@ def batch_update_values( "responseValueRenderOption": value_render_option, "responseDateTimeRenderOption": date_time_render_option, } - # pylint: disable=no-member + response = ( service.spreadsheets() .values() @@ -344,7 +344,7 @@ def append_values( """ service = self.get_conn() body = {"range": range_, "majorDimension": major_dimension, "values": values} - # pylint: disable=no-member + response = ( service.spreadsheets() .values() @@ -376,7 +376,7 @@ def clear(self, spreadsheet_id: str, range_: str) -> dict: :rtype: Dict """ service = self.get_conn() - # pylint: disable=no-member + response = ( service.spreadsheets() .values() @@ -400,7 +400,7 @@ def batch_clear(self, spreadsheet_id: str, ranges: list) -> dict: """ service = self.get_conn() body = {"ranges": ranges} - # pylint: disable=no-member + response = ( service.spreadsheets() .values() @@ -419,7 +419,7 @@ def get_spreadsheet(self, spreadsheet_id: str): :return: An spreadsheet that matches the sheet filter. """ response = ( - self.get_conn() # pylint: disable=no-member + self.get_conn() .spreadsheets() .get(spreadsheetId=spreadsheet_id) .execute(num_retries=self.num_retries) @@ -459,7 +459,7 @@ def create_spreadsheet(self, spreadsheet: Dict[str, Any]) -> Dict[str, Any]: :return: An spreadsheet object. """ self.log.info("Creating spreadsheet: %s", spreadsheet['properties']['title']) - # pylint: disable=no-member + response = ( self.get_conn().spreadsheets().create(body=spreadsheet).execute(num_retries=self.num_retries) ) diff --git a/airflow/providers/grpc/hooks/grpc.py b/airflow/providers/grpc/hooks/grpc.py index 299a5a2a5e4e2..e58fdc49abad4 100644 --- a/airflow/providers/grpc/hooks/grpc.py +++ b/airflow/providers/grpc/hooks/grpc.py @@ -148,8 +148,8 @@ def run( status code: %s, error details: %s", stub.__class__.__name__, call_func, - ex.code(), # pylint: disable=no-member - ex.details(), # pylint: disable=no-member + ex.code(), + ex.details(), ) raise ex diff --git a/airflow/providers/hashicorp/_internal_client/vault_client.py b/airflow/providers/hashicorp/_internal_client/vault_client.py index 5b6d8de38dacf..64dc0028abac3 100644 --- a/airflow/providers/hashicorp/_internal_client/vault_client.py +++ b/airflow/providers/hashicorp/_internal_client/vault_client.py @@ -46,7 +46,7 @@ ] -class _VaultClient(LoggingMixin): # pylint: disable=too-many-instance-attributes +class _VaultClient(LoggingMixin): """ Retrieves Authenticated client from Hashicorp Vault. This is purely internal class promoting authentication code reuse between the Hook and the SecretBackend, it should not be used directly in @@ -109,7 +109,7 @@ class _VaultClient(LoggingMixin): # pylint: disable=too-many-instance-attribute :type radius_port: int """ - def __init__( # pylint: disable=too-many-arguments + def __init__( self, url: Optional[str] = None, auth_type: str = 'token', @@ -275,7 +275,7 @@ def _auth_github(self, _client: hvac.Client) -> None: _client.auth.github.login(token=self.token) def _auth_gcp(self, _client: hvac.Client) -> None: - from airflow.providers.google.cloud.utils.credentials_provider import ( # noqa + from airflow.providers.google.cloud.utils.credentials_provider import ( _get_scopes, get_credentials_and_project_id, ) diff --git a/airflow/providers/hashicorp/hooks/vault.py b/airflow/providers/hashicorp/hooks/vault.py index f14c022e2e571..a17eceda31946 100644 --- a/airflow/providers/hashicorp/hooks/vault.py +++ b/airflow/providers/hashicorp/hooks/vault.py @@ -24,7 +24,7 @@ from requests import Response from airflow.hooks.base import BaseHook -from airflow.providers.hashicorp._internal_client.vault_client import ( # noqa +from airflow.providers.hashicorp._internal_client.vault_client import ( DEFAULT_KUBERNETES_JWT_PATH, DEFAULT_KV_ENGINE_VERSION, _VaultClient, @@ -115,7 +115,7 @@ class VaultHook(BaseHook): conn_type = 'vault' hook_name = 'Hashicorp Vault' - def __init__( # pylint: disable=too-many-arguments + def __init__( self, vault_conn_id: str = default_conn_name, auth_type: Optional[str] = None, diff --git a/airflow/providers/hashicorp/secrets/vault.py b/airflow/providers/hashicorp/secrets/vault.py index f745c8361aa86..b5e850277a530 100644 --- a/airflow/providers/hashicorp/secrets/vault.py +++ b/airflow/providers/hashicorp/secrets/vault.py @@ -18,12 +18,11 @@ """Objects relating to sourcing connections & variables from Hashicorp Vault""" from typing import Optional -from airflow.providers.hashicorp._internal_client.vault_client import _VaultClient # noqa +from airflow.providers.hashicorp._internal_client.vault_client import _VaultClient from airflow.secrets import BaseSecretsBackend from airflow.utils.log.logging_mixin import LoggingMixin -# pylint: disable=too-many-instance-attributes,too-many-locals class VaultBackend(BaseSecretsBackend, LoggingMixin): """ Retrieves Connections and Variables from Hashicorp Vault. @@ -109,7 +108,7 @@ class VaultBackend(BaseSecretsBackend, LoggingMixin): :type radius_port: str """ - def __init__( # pylint: disable=too-many-arguments + def __init__( self, connections_path: str = 'connections', variables_path: str = 'variables', diff --git a/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/airflow/providers/jenkins/operators/jenkins_job_trigger.py index 7044486ee4452..6d8b8d92d3eab 100644 --- a/airflow/providers/jenkins/operators/jenkins_job_trigger.py +++ b/airflow/providers/jenkins/operators/jenkins_job_trigger.py @@ -211,7 +211,7 @@ def execute(self, context: Mapping[Any, Any]) -> Optional[str]: time.sleep(self.sleep_time) keep_polling_job = True build_info = None - # pylint: disable=too-many-nested-blocks + while keep_polling_job: try: build_info = jenkins_server.get_build_info(name=self.job_name, number=build_number) @@ -228,7 +228,7 @@ def execute(self, context: Mapping[Any, Any]) -> Optional[str]: self.log.info('Waiting for job to complete : %s , build %s', self.job_name, build_number) time.sleep(self.sleep_time) except jenkins.NotFoundException as err: - # pylint: disable=no-member + raise AirflowException(f'Jenkins job status check failed. Final error was: {err.resp.status}') except jenkins.JenkinsException as err: raise AirflowException( diff --git a/airflow/providers/jira/sensors/jira.py b/airflow/providers/jira/sensors/jira.py index 0d9321206da37..30e272758bfa0 100644 --- a/airflow/providers/jira/sensors/jira.py +++ b/airflow/providers/jira/sensors/jira.py @@ -113,7 +113,7 @@ def poke(self, context: Dict) -> Any: def issue_field_checker(self, issue: Issue) -> Optional[bool]: """Check issue using different conditions to prepare to evaluate sensor.""" result = None - try: # pylint: disable=too-many-nested-blocks + try: if issue is not None and self.field is not None and self.expected_value is not None: field_val = getattr(issue.fields, self.field) @@ -133,7 +133,7 @@ def issue_field_checker(self, issue: Issue) -> Optional[bool]: except JIRAError as jira_error: self.log.error("Jira error while checking with expected value: %s", jira_error) - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception("Error while checking with expected value %s:", self.expected_value) if result is True: self.log.info( diff --git a/airflow/providers/microsoft/azure/hooks/azure_batch.py b/airflow/providers/microsoft/azure/hooks/azure_batch.py index f9f0db0376a88..a8baabdfca17f 100644 --- a/airflow/providers/microsoft/azure/hooks/azure_batch.py +++ b/airflow/providers/microsoft/azure/hooks/azure_batch.py @@ -108,7 +108,7 @@ def _get_required_param(name): batch_client = BatchServiceClient(credentials, batch_url=batch_account_url) return batch_client - def configure_pool( # pylint: disable=too-many-arguments + def configure_pool( self, pool_id: str, vm_size: Optional[str] = None, diff --git a/airflow/providers/microsoft/azure/hooks/azure_data_factory.py b/airflow/providers/microsoft/azure/hooks/azure_data_factory.py index 9486ac2acbece..8a1cc36090ec4 100644 --- a/airflow/providers/microsoft/azure/hooks/azure_data_factory.py +++ b/airflow/providers/microsoft/azure/hooks/azure_data_factory.py @@ -70,7 +70,7 @@ def bind_argument(arg, default_key): return wrapper -class AzureDataFactoryHook(BaseHook): # pylint: disable=too-many-public-methods +class AzureDataFactoryHook(BaseHook): """ A hook to interact with Azure Data Factory. diff --git a/airflow/providers/microsoft/azure/hooks/wasb.py b/airflow/providers/microsoft/azure/hooks/wasb.py index eccfe1ae5aa61..a76bc364f2dd7 100644 --- a/airflow/providers/microsoft/azure/hooks/wasb.py +++ b/airflow/providers/microsoft/azure/hooks/wasb.py @@ -107,7 +107,7 @@ def __init__(self, wasb_conn_id: str = default_conn_name, public_read: bool = Fa self.public_read = public_read self.connection = self.get_conn() - def get_conn(self) -> BlobServiceClient: # pylint: disable=too-many-return-statements + def get_conn(self) -> BlobServiceClient: """Return the BlobServiceClient object.""" conn = self.get_connection(self.conn_id) extra = conn.extra_dejson or {} diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/airflow/providers/microsoft/azure/log/wasb_task_handler.py index 5b5e8a3682149..f1d7f0be52cc5 100644 --- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py +++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py @@ -141,7 +141,7 @@ def wasb_log_exists(self, remote_log_location: str) -> bool: """ try: return self.hook.check_for_blob(self.wasb_container, remote_log_location) - # pylint: disable=broad-except + except Exception as e: self.log.debug('Exception when trying to check remote location: "%s"', e) return False diff --git a/airflow/providers/microsoft/azure/operators/azure_batch.py b/airflow/providers/microsoft/azure/operators/azure_batch.py index 279c3b119a872..78b9c46583b5c 100644 --- a/airflow/providers/microsoft/azure/operators/azure_batch.py +++ b/airflow/providers/microsoft/azure/operators/azure_batch.py @@ -25,7 +25,6 @@ from airflow.providers.microsoft.azure.hooks.azure_batch import AzureBatchHook -# pylint: disable=too-many-instance-attributes class AzureBatchOperator(BaseOperator): """ Executes a job on Azure Batch Service @@ -133,7 +132,7 @@ class AzureBatchOperator(BaseOperator): def __init__( self, - *, # pylint: disable=too-many-arguments,too-many-locals + *, batch_pool_id: str, batch_pool_vm_size: str, batch_job_id: str, diff --git a/airflow/providers/microsoft/azure/operators/azure_container_instances.py b/airflow/providers/microsoft/azure/operators/azure_container_instances.py index fa7b83664900e..1229312b33fcf 100644 --- a/airflow/providers/microsoft/azure/operators/azure_container_instances.py +++ b/airflow/providers/microsoft/azure/operators/azure_container_instances.py @@ -52,7 +52,6 @@ DEFAULT_CPU = 1.0 -# pylint: disable=too-many-instance-attributes class AzureContainerInstancesOperator(BaseOperator): """ Start a container on Azure Container Instances @@ -135,7 +134,6 @@ class AzureContainerInstancesOperator(BaseOperator): template_fields = ('name', 'image', 'command', 'environment_variables') template_fields_renderers = {"command": "bash", "environment_variables": "json"} - # pylint: disable=too-many-arguments def __init__( self, *, @@ -294,7 +292,7 @@ def on_kill(self) -> None: self.log.info("Deleting container group") try: self._ci_hook.delete(self.resource_group, self.name) - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception("Could not delete container group") def _monitor_logging(self, resource_group: str, name: str) -> int: @@ -302,7 +300,6 @@ def _monitor_logging(self, resource_group: str, name: str) -> int: last_message_logged = None last_line_logged = None - # pylint: disable=too-many-nested-blocks while True: try: cg_state = self._ci_hook.get_state(resource_group, name) @@ -357,7 +354,7 @@ def _monitor_logging(self, resource_group: str, name: str) -> int: return 1 else: self.log.exception("Exception while getting container groups") - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception("Exception while getting container groups") sleep(1) diff --git a/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py index 347473f2550fd..6a533b4cb25fe 100644 --- a/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py +++ b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py @@ -59,7 +59,6 @@ class OracleToAzureDataLakeOperator(BaseOperator): template_fields_renderers = {"sql_params": "py"} ui_color = '#e08c8c' - # pylint: disable=too-many-arguments def __init__( self, *, diff --git a/airflow/providers/microsoft/mssql/hooks/mssql.py b/airflow/providers/microsoft/mssql/hooks/mssql.py index eebce5eb466cd..75241f1f296d3 100644 --- a/airflow/providers/microsoft/mssql/hooks/mssql.py +++ b/airflow/providers/microsoft/mssql/hooks/mssql.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-member + """Microsoft SQLServer hook module""" import pymssql @@ -38,13 +38,11 @@ def __init__(self, *args, **kwargs) -> None: def get_conn( self, - ) -> pymssql.connect: # pylint: disable=protected-access,c-extension-no-member,no-member + ) -> pymssql.connect: """Returns a mssql connection object""" - conn = self.get_connection( - self.mssql_conn_id # type: ignore[attr-defined] # pylint: disable=no-member - ) - # pylint: disable=c-extension-no-member - conn = pymssql.connect( # pylint: disable=no-member + conn = self.get_connection(self.mssql_conn_id) # type: ignore[attr-defined] + + conn = pymssql.connect( server=conn.host, user=conn.login, password=conn.password, @@ -55,10 +53,10 @@ def get_conn( def set_autocommit( self, - conn: pymssql.connect, # pylint: disable=c-extension-no-member, no-member + conn: pymssql.connect, autocommit: bool, ) -> None: conn.autocommit(autocommit) - def get_autocommit(self, conn: pymssql.connect): # pylint: disable=c-extension-no-member, no-member + def get_autocommit(self, conn: pymssql.connect): return conn.autocommit_state diff --git a/airflow/providers/microsoft/winrm/hooks/winrm.py b/airflow/providers/microsoft/winrm/hooks/winrm.py index 64f15a5d32b51..10895d5c355e2 100644 --- a/airflow/providers/microsoft/winrm/hooks/winrm.py +++ b/airflow/providers/microsoft/winrm/hooks/winrm.py @@ -31,7 +31,8 @@ # TODO: Fixme please - I have too complex implementation -# pylint: disable=too-many-instance-attributes,too-many-arguments,too-many-branches + + class WinRMHook(BaseHook): """ Hook for winrm remote execution using pywinrm. diff --git a/airflow/providers/microsoft/winrm/operators/winrm.py b/airflow/providers/microsoft/winrm/operators/winrm.py index 816df703876e0..caf04703da67e 100644 --- a/airflow/providers/microsoft/winrm/operators/winrm.py +++ b/airflow/providers/microsoft/winrm/operators/winrm.py @@ -94,7 +94,6 @@ def execute(self, context: dict) -> Union[list, str]: winrm_client = self.winrm_hook.get_conn() - # pylint: disable=too-many-nested-blocks try: if self.ps_path is not None: self.log.info("Running command as powershell script: '%s'...", self.command) @@ -114,7 +113,7 @@ def execute(self, context: dict) -> Union[list, str]: command_done = False while not command_done: try: - # pylint: disable=protected-access + ( stdout, stderr, diff --git a/airflow/providers/mysql/hooks/mysql.py b/airflow/providers/mysql/hooks/mysql.py index ec56c561f9d0e..c1e247f981f24 100644 --- a/airflow/providers/mysql/hooks/mysql.py +++ b/airflow/providers/mysql/hooks/mysql.py @@ -160,9 +160,7 @@ def get_conn(self) -> MySQLConnectionTypes: :return: a mysql connection object """ - conn = self.connection or self.get_connection( - getattr(self, self.conn_name_attr) - ) # pylint: disable=no-member + conn = self.connection or self.get_connection(getattr(self, self.conn_name_attr)) client_name = conn.extra_dejson.get('client', 'mysqlclient') @@ -173,10 +171,10 @@ def get_conn(self) -> MySQLConnectionTypes: return MySQLdb.connect(**conn_config) if client_name == 'mysql-connector-python': - import mysql.connector # pylint: disable=no-name-in-module + import mysql.connector conn_config = self._get_conn_config_mysql_connector_python(conn) - return mysql.connector.connect(**conn_config) # pylint: disable=no-member + return mysql.connector.connect(**conn_config) raise ValueError('Unknown MySQL client name provided!') @@ -213,11 +211,9 @@ def bulk_dump(self, table: str, tmp_file: str) -> None: conn.commit() @staticmethod - def _serialize_cell( - cell: object, conn: Optional[Connection] = None - ) -> object: # pylint: disable=signature-differs # noqa: D403 + def _serialize_cell(cell: object, conn: Optional[Connection] = None) -> object: """ - MySQLdb converts an argument to a literal + The package MySQLdb converts an argument to a literal when passing those separately to execute. Hence, this method does nothing. :param cell: The cell to insert into the table diff --git a/airflow/providers/mysql/transfers/vertica_to_mysql.py b/airflow/providers/mysql/transfers/vertica_to_mysql.py index 03747a0fd4873..b67eeffa3b9b7 100644 --- a/airflow/providers/mysql/transfers/vertica_to_mysql.py +++ b/airflow/providers/mysql/transfers/vertica_to_mysql.py @@ -141,7 +141,7 @@ def execute(self, context): self.log.info("Inserting rows into MySQL...") mysql.insert_rows(table=self.mysql_table, rows=result, target_fields=selected_columns) self.log.info("Inserted rows into MySQL %s", count) - except (MySQLdb.Error, MySQLdb.Warning): # pylint: disable=no-member + except (MySQLdb.Error, MySQLdb.Warning): self.log.info("Inserted rows into MySQL 0") raise diff --git a/airflow/providers/odbc/hooks/odbc.py b/airflow/providers/odbc/hooks/odbc.py index 73210fa4a0d91..9805b5e29e34f 100644 --- a/airflow/providers/odbc/hooks/odbc.py +++ b/airflow/providers/odbc/hooks/odbc.py @@ -1,4 +1,3 @@ -# pylint: disable=c-extension-no-member # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/airflow/providers/opsgenie/operators/opsgenie_alert.py b/airflow/providers/opsgenie/operators/opsgenie_alert.py index 1cdca1c67604b..47e5497efbf09 100644 --- a/airflow/providers/opsgenie/operators/opsgenie_alert.py +++ b/airflow/providers/opsgenie/operators/opsgenie_alert.py @@ -68,7 +68,6 @@ class OpsgenieAlertOperator(BaseOperator): template_fields = ('message', 'alias', 'description', 'entity', 'priority', 'note') - # pylint: disable=too-many-arguments def __init__( self, *, diff --git a/airflow/providers/oracle/hooks/oracle.py b/airflow/providers/oracle/hooks/oracle.py index d11f77d9e50d6..59196727d2363 100644 --- a/airflow/providers/oracle/hooks/oracle.py +++ b/airflow/providers/oracle/hooks/oracle.py @@ -41,7 +41,6 @@ class OracleHook(DbApiHook): supports_autocommit = False - # pylint: disable=c-extension-no-member def get_conn(self) -> 'OracleHook': """ Returns a oracle connection object @@ -74,9 +73,7 @@ def get_conn(self) -> 'OracleHook': """ - conn = self.get_connection( - self.oracle_conn_id # type: ignore[attr-defined] # pylint: disable=no-member - ) + conn = self.get_connection(self.oracle_conn_id) # type: ignore[attr-defined] conn_config = {'user': conn.login, 'password': conn.password} sid = conn.extra_dejson.get('sid') mod = conn.extra_dejson.get('module') diff --git a/airflow/providers/oracle/transfers/oracle_to_oracle.py b/airflow/providers/oracle/transfers/oracle_to_oracle.py index fd4b939ac6ef4..7e116ea702c50 100644 --- a/airflow/providers/oracle/transfers/oracle_to_oracle.py +++ b/airflow/providers/oracle/transfers/oracle_to_oracle.py @@ -66,7 +66,6 @@ def __init__( self.source_sql_params = source_sql_params self.rows_chunk = rows_chunk - # pylint: disable=unused-argument def _execute(self, src_hook, dest_hook, context) -> None: with src_hook.get_conn() as src_conn: cursor = src_conn.cursor() diff --git a/airflow/providers/pagerduty/hooks/pagerduty.py b/airflow/providers/pagerduty/hooks/pagerduty.py index e42357a718177..e845cfcc1b25e 100644 --- a/airflow/providers/pagerduty/hooks/pagerduty.py +++ b/airflow/providers/pagerduty/hooks/pagerduty.py @@ -66,7 +66,6 @@ def get_session(self) -> pdpyras.APISession: self._session = pdpyras.APISession(self.token) return self._session - # pylint: disable=too-many-arguments def create_event( self, summary: str, diff --git a/airflow/providers/postgres/hooks/postgres.py b/airflow/providers/postgres/hooks/postgres.py index 0b512d78c6537..a401a20ae879a 100644 --- a/airflow/providers/postgres/hooks/postgres.py +++ b/airflow/providers/postgres/hooks/postgres.py @@ -144,7 +144,6 @@ def bulk_dump(self, table: str, tmp_file: str) -> None: """Dumps a database table into a tab-delimited file""" self.copy_expert(f"COPY {table} TO STDOUT", tmp_file) - # pylint: disable=signature-differs @staticmethod def _serialize_cell(cell: object, conn: Optional[connection] = None) -> object: """ diff --git a/airflow/providers/presto/hooks/presto.py b/airflow/providers/presto/hooks/presto.py index b65c3225304ee..086c21b8821ad 100644 --- a/airflow/providers/presto/hooks/presto.py +++ b/airflow/providers/presto/hooks/presto.py @@ -60,9 +60,7 @@ class PrestoHook(DbApiHook): def get_conn(self) -> Connection: """Returns a connection object""" - db = self.get_connection( - self.presto_conn_id # type: ignore[attr-defined] # pylint: disable=no-member - ) + db = self.get_connection(self.presto_conn_id) # type: ignore[attr-defined] extra = db.extra_dejson auth = None if db.password and extra.get('auth') == 'kerberos': @@ -99,15 +97,13 @@ def get_conn(self) -> Connection: # Unfortunately verify parameter is available via public API. # The PR is merged in the presto library, but has not been released. # See: https://github.com/prestosql/presto-python-client/pull/31 - presto_conn._http_session.verify = _boolify(extra['verify']) # pylint: disable=protected-access + presto_conn._http_session.verify = _boolify(extra['verify']) return presto_conn def get_isolation_level(self) -> Any: """Returns an isolation level""" - db = self.get_connection( - self.presto_conn_id # type: ignore[attr-defined] # pylint: disable=no-member - ) + db = self.get_connection(self.presto_conn_id) # type: ignore[attr-defined] isolation_level = db.extra_dejson.get('isolation_level', 'AUTOCOMMIT').upper() return getattr(IsolationLevel, isolation_level, IsolationLevel.AUTOCOMMIT) diff --git a/airflow/providers/qubole/hooks/qubole.py b/airflow/providers/qubole/hooks/qubole.py index b2b54ebe6b579..e4185132d2fc4 100644 --- a/airflow/providers/qubole/hooks/qubole.py +++ b/airflow/providers/qubole/hooks/qubole.py @@ -126,7 +126,7 @@ def get_ui_field_behaviour() -> Dict: "placeholders": {'host': 'https://.qubole.com/api'}, } - def __init__(self, *args, **kwargs) -> None: # pylint: disable=unused-argument + def __init__(self, *args, **kwargs) -> None: super().__init__() conn = self.get_connection(kwargs.get('qubole_conn_id', self.default_conn_name)) Qubole.configure(api_token=conn.password, api_url=conn.host) @@ -202,7 +202,6 @@ def kill(self, ti): self.log.info('Sending KILL signal to Qubole Command Id: %s', self.cmd.id) self.cmd.cancel() - # pylint: disable=consider-using-with def get_results( self, ti=None, @@ -271,7 +270,7 @@ def create_cmd_args(self, context) -> List[str]: tags = {self.dag_id, self.task_id, context['run_id']} positional_args_list = flatten_list(POSITIONAL_ARGS.values()) - for key, value in self.kwargs.items(): # pylint: disable=too-many-nested-blocks + for key, value in self.kwargs.items(): if key in COMMAND_ARGS[cmd_type]: if key in HYPHEN_ARGS: args.append(f"--{key.replace('_', '-')}={value}") diff --git a/airflow/providers/qubole/hooks/qubole_check.py b/airflow/providers/qubole/hooks/qubole_check.py index 47e29d42bbd80..9cafa39f9c4dc 100644 --- a/airflow/providers/qubole/hooks/qubole_check.py +++ b/airflow/providers/qubole/hooks/qubole_check.py @@ -98,7 +98,7 @@ def handle_failure_retry(context) -> None: log.info('Cancelling the Qubole Command Id: %s', cmd_id) cmd.cancel() - def get_first(self, sql): # pylint: disable=unused-argument + def get_first(self, sql): """Get Qubole query first record list""" self.execute(context=self.context) query_result = self.get_query_results() diff --git a/airflow/providers/qubole/operators/qubole_check.py b/airflow/providers/qubole/operators/qubole_check.py index 6f9f9250e165e..329cbc8cf8a27 100644 --- a/airflow/providers/qubole/operators/qubole_check.py +++ b/airflow/providers/qubole/operators/qubole_check.py @@ -47,7 +47,6 @@ def get_hook(self) -> QuboleCheckHook: ) -# pylint: disable=too-many-ancestors class QuboleCheckOperator(_QuboleCheckOperatorMixin, SQLCheckOperator, QuboleOperator): """ Performs checks against Qubole Commands. ``QuboleCheckOperator`` expects @@ -116,7 +115,8 @@ def __init__( # TODO(xinbinhuang): refactor to reduce levels of inheritance -# pylint: disable=too-many-ancestors + + class QuboleValueCheckOperator(_QuboleCheckOperatorMixin, SQLValueCheckOperator, QuboleOperator): """ Performs a simple value check using Qubole command. diff --git a/airflow/providers/qubole/sensors/qubole.py b/airflow/providers/qubole/sensors/qubole.py index c3230c359083b..8936fe8ebc884 100644 --- a/airflow/providers/qubole/sensors/qubole.py +++ b/airflow/providers/qubole/sensors/qubole.py @@ -52,10 +52,8 @@ def poke(self, context: dict) -> bool: status = False try: - status = self.sensor_class.check( # type: ignore[attr-defined] # pylint: disable=no-member - self.data - ) - except Exception as e: # pylint: disable=broad-except + status = self.sensor_class.check(self.data) # type: ignore[attr-defined] + except Exception as e: self.log.exception(e) status = False diff --git a/airflow/providers/salesforce/hooks/tableau.py b/airflow/providers/salesforce/hooks/tableau.py index cf5f7f3e041ea..8dcfe82a8e6b4 100644 --- a/airflow/providers/salesforce/hooks/tableau.py +++ b/airflow/providers/salesforce/hooks/tableau.py @@ -17,7 +17,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.tableau.hooks.tableau import TableauHook, TableauJobFinishCode # noqa warnings.warn( diff --git a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py index 309af331c6584..007575caad142 100644 --- a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py +++ b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py @@ -17,7 +17,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.tableau.operators.tableau_refresh_workbook import ( # noqa TableauRefreshWorkbookOperator, ) diff --git a/airflow/providers/salesforce/sensors/tableau_job_status.py b/airflow/providers/salesforce/sensors/tableau_job_status.py index 076159ee73e24..b1fb4a71bc5dc 100644 --- a/airflow/providers/salesforce/sensors/tableau_job_status.py +++ b/airflow/providers/salesforce/sensors/tableau_job_status.py @@ -17,7 +17,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.tableau.sensors.tableau_job_status import ( # noqa TableauJobFailedException, TableauJobStatusSensor, diff --git a/airflow/providers/segment/operators/segment_track_event.py b/airflow/providers/segment/operators/segment_track_event.py index 4a5f122482b1e..c7c3e8dce3997 100644 --- a/airflow/providers/segment/operators/segment_track_event.py +++ b/airflow/providers/segment/operators/segment_track_event.py @@ -69,5 +69,4 @@ def execute(self, context: Dict) -> None: self.properties, ) - # pylint: disable=no-member hook.track(user_id=self.user_id, event=self.event, properties=self.properties) # type: ignore diff --git a/airflow/providers/sendgrid/utils/emailer.py b/airflow/providers/sendgrid/utils/emailer.py index df832a4a2843e..58a1968180914 100644 --- a/airflow/providers/sendgrid/utils/emailer.py +++ b/airflow/providers/sendgrid/utils/emailer.py @@ -46,7 +46,7 @@ AddressesType = Union[str, Iterable[str]] -def send_email( # pylint: disable=too-many-locals +def send_email( to: AddressesType, subject: str, html_content: str, diff --git a/airflow/providers/singularity/operators/singularity.py b/airflow/providers/singularity/operators/singularity.py index a3c8c9676f3fe..d1099af89165c 100644 --- a/airflow/providers/singularity/operators/singularity.py +++ b/airflow/providers/singularity/operators/singularity.py @@ -71,7 +71,7 @@ class SingularityOperator(BaseOperator): ) template_fields_renderers = {"command": "bash", "environment": "json"} - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, image: str, diff --git a/airflow/providers/slack/hooks/slack.py b/airflow/providers/slack/hooks/slack.py index adefd8667bfa4..9173bc921eb79 100644 --- a/airflow/providers/slack/hooks/slack.py +++ b/airflow/providers/slack/hooks/slack.py @@ -24,15 +24,13 @@ from airflow.hooks.base import BaseHook -class SlackHook(BaseHook): # noqa +class SlackHook(BaseHook): """ Creates a Slack connection to be used for calls. Takes both Slack API token directly and connection that has Slack API token. If both are supplied, Slack API token will be used. Also exposes the rest of slack.WebClient args. - Examples: - .. code-block:: python # Create hook diff --git a/airflow/providers/slack/hooks/slack_webhook.py b/airflow/providers/slack/hooks/slack_webhook.py index 309d0d58ea3fd..c3cbef126d576 100644 --- a/airflow/providers/slack/hooks/slack_webhook.py +++ b/airflow/providers/slack/hooks/slack_webhook.py @@ -66,7 +66,6 @@ class SlackWebhookHook(HttpHook): conn_type = 'slackwebhook' hook_name = 'Slack Webhook' - # pylint: disable=too-many-arguments def __init__( self, http_conn_id=None, diff --git a/airflow/providers/slack/operators/slack.py b/airflow/providers/slack/operators/slack.py index 14af018d5c03c..96a8ea09582d0 100644 --- a/airflow/providers/slack/operators/slack.py +++ b/airflow/providers/slack/operators/slack.py @@ -75,9 +75,9 @@ def construct_api_call_params(self) -> Any: "SlackAPIOperator should not be used directly. Chose one of the subclasses instead" ) - def execute(self, **kwargs): # noqa: D403 + def execute(self, **kwargs): """ - SlackAPIOperator calls will not fail even if the call is not unsuccessful. + The SlackAPIOperator calls will not fail even if the call is not unsuccessful. It should not prevent a DAG from completing in success """ if not self.api_params: diff --git a/airflow/providers/slack/operators/slack_webhook.py b/airflow/providers/slack/operators/slack_webhook.py index 098324e2448c5..38aafedf72e76 100644 --- a/airflow/providers/slack/operators/slack_webhook.py +++ b/airflow/providers/slack/operators/slack_webhook.py @@ -69,7 +69,6 @@ class SlackWebhookOperator(SimpleHttpOperator): 'proxy', ] - # pylint: disable=too-many-arguments def __init__( self, *, diff --git a/airflow/providers/snowflake/hooks/snowflake.py b/airflow/providers/snowflake/hooks/snowflake.py index 640b86d84b1ce..293a7994200c0 100644 --- a/airflow/providers/snowflake/hooks/snowflake.py +++ b/airflow/providers/snowflake/hooks/snowflake.py @@ -20,8 +20,6 @@ from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization - -# pylint: disable=no-name-in-module from snowflake import connector from snowflake.connector import SnowflakeConnection @@ -148,9 +146,7 @@ def _get_conn_params(self) -> Dict[str, Optional[str]]: One method to fetch connection params as a dict used in get_uri() and get_connection() """ - conn = self.get_connection( - self.snowflake_conn_id # type: ignore[attr-defined] # pylint: disable=no-member - ) + conn = self.get_connection(self.snowflake_conn_id) # type: ignore[attr-defined] account = conn.extra_dejson.get('extra__snowflake__account', '') or conn.extra_dejson.get( 'account', '' ) @@ -228,10 +224,8 @@ def _get_aws_credentials(self) -> Tuple[Optional[Any], Optional[Any]]: intended to be used by external import and export statements """ - if self.snowflake_conn_id: # type: ignore[attr-defined] # pylint: disable=no-member - connection_object = self.get_connection( - self.snowflake_conn_id # type: ignore[attr-defined] # pylint: disable=no-member - ) + if self.snowflake_conn_id: # type: ignore[attr-defined] + connection_object = self.get_connection(self.snowflake_conn_id) # type: ignore[attr-defined] if 'aws_secret_access_key' in connection_object.extra_dejson: aws_access_key_id = connection_object.extra_dejson.get( 'aws_access_key_id' diff --git a/airflow/providers/snowflake/transfers/snowflake_to_slack.py b/airflow/providers/snowflake/transfers/snowflake_to_slack.py index a13841c058bae..352b53350c960 100644 --- a/airflow/providers/snowflake/transfers/snowflake_to_slack.py +++ b/airflow/providers/snowflake/transfers/snowflake_to_slack.py @@ -72,7 +72,7 @@ class SnowflakeToSlackOperator(BaseOperator): template_fields_renderers = {"slack_message": "jinja"} times_rendered = 0 - def __init__( # pylint: disable=too-many-arguments + def __init__( self, *, sql: str, diff --git a/airflow/providers/ssh/hooks/ssh.py b/airflow/providers/ssh/hooks/ssh.py index 71dc47894bb61..cd1458a9649a7 100644 --- a/airflow/providers/ssh/hooks/ssh.py +++ b/airflow/providers/ssh/hooks/ssh.py @@ -35,7 +35,7 @@ from getpass import getuser -class SSHHook(BaseHook): # pylint: disable=too-many-instance-attributes +class SSHHook(BaseHook): """ Hook for ssh remote execution using Paramiko. ref: https://github.com/paramiko/paramiko @@ -86,7 +86,7 @@ def get_ui_field_behaviour() -> Dict: }, } - def __init__( # pylint: disable=too-many-statements + def __init__( self, ssh_conn_id: Optional[str] = None, remote_host: Optional[str] = None, diff --git a/airflow/providers/trino/hooks/trino.py b/airflow/providers/trino/hooks/trino.py index 0914d04b32e4b..10bdb3a196c22 100644 --- a/airflow/providers/trino/hooks/trino.py +++ b/airflow/providers/trino/hooks/trino.py @@ -60,9 +60,7 @@ class TrinoHook(DbApiHook): def get_conn(self) -> Connection: """Returns a connection object""" - db = self.get_connection( - self.trino_conn_id # type: ignore[attr-defined] # pylint: disable=no-member - ) + db = self.get_connection(self.trino_conn_id) # type: ignore[attr-defined] extra = db.extra_dejson auth = None if db.password and extra.get('auth') == 'kerberos': @@ -99,15 +97,13 @@ def get_conn(self) -> Connection: # Unfortunately verify parameter is available via public API. # The PR is merged in the trino library, but has not been released. # See: https://github.com/trinodb/trino-python-client/pull/31 - trino_conn._http_session.verify = _boolify(extra['verify']) # pylint: disable=protected-access + trino_conn._http_session.verify = _boolify(extra['verify']) return trino_conn def get_isolation_level(self) -> Any: """Returns an isolation level""" - db = self.get_connection( - self.trino_conn_id # type: ignore[attr-defined] # pylint: disable=no-member - ) + db = self.get_connection(self.trino_conn_id) # type: ignore[attr-defined] isolation_level = db.extra_dejson.get('isolation_level', 'AUTOCOMMIT').upper() return getattr(IsolationLevel, isolation_level, IsolationLevel.AUTOCOMMIT) diff --git a/airflow/providers/vertica/hooks/vertica.py b/airflow/providers/vertica/hooks/vertica.py index 75f1b0c794381..9530c5965c5a8 100644 --- a/airflow/providers/vertica/hooks/vertica.py +++ b/airflow/providers/vertica/hooks/vertica.py @@ -33,7 +33,7 @@ class VerticaHook(DbApiHook): def get_conn(self) -> connect: """Return verticaql connection object""" - conn = self.get_connection(self.vertica_conn_id) # type: ignore # pylint: disable=no-member + conn = self.get_connection(self.vertica_conn_id) # type: ignore conn_config = { "user": conn.login, "password": conn.password or '', diff --git a/airflow/providers/yandex/operators/yandexcloud_dataproc.py b/airflow/providers/yandex/operators/yandexcloud_dataproc.py index 28b2e02270b89..0ab6245a7d628 100644 --- a/airflow/providers/yandex/operators/yandexcloud_dataproc.py +++ b/airflow/providers/yandex/operators/yandexcloud_dataproc.py @@ -73,9 +73,6 @@ class DataprocCreateClusterOperator(BaseOperator): :type connection_id: Optional[str] """ - # pylint: disable=too-many-instance-attributes - # pylint: disable=too-many-arguments - # pylint: disable=too-many-locals def __init__( self, *, @@ -213,7 +210,6 @@ class DataprocCreateHiveJobOperator(BaseOperator): template_fields = ['cluster_id'] - # pylint: disable=too-many-arguments def __init__( self, *, @@ -286,7 +282,6 @@ class DataprocCreateMapReduceJobOperator(BaseOperator): template_fields = ['cluster_id'] - # pylint: disable=too-many-arguments def __init__( self, *, @@ -364,7 +359,6 @@ class DataprocCreateSparkJobOperator(BaseOperator): template_fields = ['cluster_id'] - # pylint: disable=too-many-arguments def __init__( self, *, @@ -442,7 +436,6 @@ class DataprocCreatePysparkJobOperator(BaseOperator): template_fields = ['cluster_id'] - # pylint: disable=too-many-arguments def __init__( self, *, diff --git a/airflow/providers/zendesk/hooks/zendesk.py b/airflow/providers/zendesk/hooks/zendesk.py index e76322902d7c9..3573dc661995a 100644 --- a/airflow/providers/zendesk/hooks/zendesk.py +++ b/airflow/providers/zendesk/hooks/zendesk.py @@ -92,7 +92,6 @@ def call( keys += query_params['include'].split(',') results = {key: results[key] for key in keys} - # pylint: disable=too-many-nested-blocks if get_all_pages: while next_page is not None: try: diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py index 9f1469c8ae530..0770f240dfac7 100644 --- a/airflow/providers_manager.py +++ b/airflow/providers_manager.py @@ -34,7 +34,7 @@ import importlib.resources as importlib_resources except ImportError: # Try back-ported to PY<37 `importlib_resources`. - import importlib_resources # noqa + import importlib_resources log = logging.getLogger(__name__) @@ -128,10 +128,10 @@ def initialize_providers_manager(self): self._discover_all_airflow_builtin_providers_from_local_sources() self._discover_all_providers_from_packages() self._discover_hooks() - self._provider_dict = OrderedDict(sorted(self._provider_dict.items())) # noqa - self._hooks_dict = OrderedDict(sorted(self._hooks_dict.items())) # noqa - self._connection_form_widgets = OrderedDict(sorted(self._connection_form_widgets.items())) # noqa - self._field_behaviours = OrderedDict(sorted(self._field_behaviours.items())) # noqa + self._provider_dict = OrderedDict(sorted(self._provider_dict.items())) + self._hooks_dict = OrderedDict(sorted(self._hooks_dict.items())) + self._connection_form_widgets = OrderedDict(sorted(self._connection_form_widgets.items())) + self._field_behaviours = OrderedDict(sorted(self._field_behaviours.items())) self._discover_extra_links() self._initialized = True @@ -184,7 +184,7 @@ def _discover_all_airflow_builtin_providers_from_local_sources(self) -> None: try: for path in airflow.providers.__path__: self._add_provider_info_from_local_source_files_on_path(path) - except Exception as e: # noqa pylint: disable=broad-except + except Exception as e: log.warning("Error when loading 'provider.yaml' files from airflow sources: %s", e) def _add_provider_info_from_local_source_files_on_path(self, path) -> None: @@ -222,7 +222,7 @@ def _add_provider_info_from_local_source_file(self, path, package_name) -> None: "package name have already been registered", package_name, ) - except Exception as e: # noqa pylint: disable=broad-except + except Exception as e: log.warning("Error when loading '%s': %s", path, e) def _discover_hooks(self) -> None: @@ -280,7 +280,7 @@ def _add_hook(self, hook_class_name: str, provider_package: str) -> None: if field_behaviours: self._add_customized_fields(provider_package, hook_class, field_behaviours) - except ImportError as e: # noqa pylint: disable=broad-except + except ImportError as e: # When there is an ImportError we turn it into debug warnings as this is # an expected case when only some providers are installed log.debug( @@ -290,7 +290,7 @@ def _add_hook(self, hook_class_name: str, provider_package: str) -> None: e, ) return - except Exception as e: # noqa pylint: disable=broad-except + except Exception as e: log.warning( "Exception when importing '%s' from '%s' package: %s", hook_class_name, @@ -348,7 +348,7 @@ def _add_customized_fields(self, package_name: str, hook_class: type, customized ) return self._field_behaviours[connection_type] = customized_fields - except Exception as e: # noqa pylint: disable=broad-except + except Exception as e: log.warning( "Error when loading customized fields from package '%s' hook class '%s': %s", package_name, diff --git a/airflow/secrets/base_secrets.py b/airflow/secrets/base_secrets.py index 7070607e1c266..91f28be0bbcc9 100644 --- a/airflow/secrets/base_secrets.py +++ b/airflow/secrets/base_secrets.py @@ -95,7 +95,7 @@ def get_variable(self, key: str) -> Optional[str]: """ raise NotImplementedError() - def get_config(self, key: str) -> Optional[str]: # pylint: disable=unused-argument + def get_config(self, key: str) -> Optional[str]: """ Return value for Airflow Config Key diff --git a/airflow/secrets/environment_variables.py b/airflow/secrets/environment_variables.py index 1ad48dc5ba79a..92fa186ec3878 100644 --- a/airflow/secrets/environment_variables.py +++ b/airflow/secrets/environment_variables.py @@ -29,7 +29,6 @@ class EnvironmentVariablesBackend(BaseSecretsBackend): """Retrieves Connection object and Variable from environment variable.""" - # pylint: disable=missing-docstring def get_conn_uri(self, conn_id: str) -> Optional[str]: environment_uri = os.environ.get(CONN_ENV_PREFIX + conn_id.upper()) return environment_uri diff --git a/airflow/secrets/metastore.py b/airflow/secrets/metastore.py index 061316374d6e4..268f281535441 100644 --- a/airflow/secrets/metastore.py +++ b/airflow/secrets/metastore.py @@ -29,7 +29,6 @@ class MetastoreBackend(BaseSecretsBackend): """Retrieves Connection object and Variable from airflow metastore database.""" - # pylint: disable=missing-docstring @provide_session def get_connection(self, conn_id, session=None) -> Optional['Connection']: from airflow.models.connection import Connection @@ -38,7 +37,6 @@ def get_connection(self, conn_id, session=None) -> Optional['Connection']: session.expunge_all() return conn - # pylint: disable=missing-docstring @provide_session def get_connections(self, conn_id, session=None) -> List['Connection']: warnings.warn( diff --git a/airflow/security/kerberos.py b/airflow/security/kerberos.py index 299c327fe91e8..433484a3ace9c 100644 --- a/airflow/security/kerberos.py +++ b/airflow/security/kerberos.py @@ -94,7 +94,7 @@ def renew_from_kt(principal: str, keytab: str, exit_on_fail: bool = True): else: return subp.returncode - global NEED_KRB181_WORKAROUND # pylint: disable=global-statement + global NEED_KRB181_WORKAROUND if NEED_KRB181_WORKAROUND is None: NEED_KRB181_WORKAROUND = detect_conf_var() if NEED_KRB181_WORKAROUND: diff --git a/airflow/security/utils.py b/airflow/security/utils.py index ca203b5894f73..c4e2af91d7ab9 100644 --- a/airflow/security/utils.py +++ b/airflow/security/utils.py @@ -35,19 +35,21 @@ """Various security-related utils.""" import re import socket +from typing import List, Optional from airflow.utils.net import get_hostname -def get_components(principal): # noqa: D402 +def get_components(principal) -> Optional[List[str]]: """ - get_components(principal) -> (short name, instance (FQDN), realm) + Returns components retrieved from the kerberos principal. + -> (short name, instance (FQDN), realm) - ``principal`` is the kerberos principal to parse. + ``principal`` . """ if not principal: return None - return re.split(r'[\/@]', str(principal)) + return re.split(r'[/@]', str(principal)) def replace_hostname_pattern(components, host=None): diff --git a/airflow/sensors/base.py b/airflow/sensors/base.py index 24e369905b899..ebc91a91b1eec 100644 --- a/airflow/sensors/base.py +++ b/airflow/sensors/base.py @@ -39,7 +39,7 @@ # We need to keep the import here because GCSToLocalFilesystemOperator released in # Google Provider before 3.0.0 imported apply_defaults from here. # See https://github.com/apache/airflow/issues/16035 -from airflow.utils.decorators import apply_defaults # pylint: disable=unused-import +from airflow.utils.decorators import apply_defaults class BaseSensorOperator(BaseOperator, SkipMixin): diff --git a/airflow/sensors/base_sensor_operator.py b/airflow/sensors/base_sensor_operator.py index cfcc6842f0f74..716f03141ace9 100644 --- a/airflow/sensors/base_sensor_operator.py +++ b/airflow/sensors/base_sensor_operator.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.sensors.base import BaseSensorOperator # noqa warnings.warn( diff --git a/airflow/sensors/bash.py b/airflow/sensors/bash.py index 35cd0967770a0..33246dd6a0a57 100644 --- a/airflow/sensors/bash.py +++ b/airflow/sensors/bash.py @@ -64,7 +64,7 @@ def poke(self, context): script_location = tmp_dir + "/" + fname self.log.info("Temporary script location: %s", script_location) self.log.info("Running command: %s", bash_command) - # pylint: disable=subprocess-popen-preexec-fn + with Popen( ['bash', fname], stdout=PIPE, diff --git a/airflow/sensors/date_time_sensor.py b/airflow/sensors/date_time_sensor.py index b6c8c7d6e0dcd..63a221685af7c 100644 --- a/airflow/sensors/date_time_sensor.py +++ b/airflow/sensors/date_time_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.sensors.date_time import DateTimeSensor # noqa warnings.warn( diff --git a/airflow/sensors/external_task.py b/airflow/sensors/external_task.py index 929dcfd16a993..351d9013dd3ec 100644 --- a/airflow/sensors/external_task.py +++ b/airflow/sensors/external_task.py @@ -210,7 +210,7 @@ def get_count(self, dttm_filter, session, states) -> int: .filter( TI.dag_id == self.external_dag_id, TI.task_id == self.external_task_id, - TI.state.in_(states), # pylint: disable=no-member + TI.state.in_(states), TI.execution_date.in_(dttm_filter), ) .scalar() @@ -220,7 +220,7 @@ def get_count(self, dttm_filter, session, states) -> int: session.query(func.count()) .filter( DR.dag_id == self.external_dag_id, - DR.state.in_(states), # pylint: disable=no-member + DR.state.in_(states), DR.execution_date.in_(dttm_filter), ) .scalar() diff --git a/airflow/sensors/external_task_sensor.py b/airflow/sensors/external_task_sensor.py index c3ff2163671dd..bc24a4d1f27eb 100644 --- a/airflow/sensors/external_task_sensor.py +++ b/airflow/sensors/external_task_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.sensors.external_task import ( # noqa ExternalTaskMarker, ExternalTaskSensor, diff --git a/airflow/sensors/hdfs_sensor.py b/airflow/sensors/hdfs_sensor.py index 4f762fb6458cb..0d5690085beb9 100644 --- a/airflow/sensors/hdfs_sensor.py +++ b/airflow/sensors/hdfs_sensor.py @@ -20,7 +20,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hdfs.sensors.hdfs import HdfsSensor # noqa warnings.warn( diff --git a/airflow/sensors/hive_partition_sensor.py b/airflow/sensors/hive_partition_sensor.py index 383ffec2cc77e..8f6f08ae3f552 100644 --- a/airflow/sensors/hive_partition_sensor.py +++ b/airflow/sensors/hive_partition_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hive.sensors.hive_partition import HivePartitionSensor # noqa warnings.warn( diff --git a/airflow/sensors/http_sensor.py b/airflow/sensors/http_sensor.py index e54aa7e79b722..96dce065b50e8 100644 --- a/airflow/sensors/http_sensor.py +++ b/airflow/sensors/http_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.http.sensors.http import HttpSensor # noqa warnings.warn( diff --git a/airflow/sensors/metastore_partition_sensor.py b/airflow/sensors/metastore_partition_sensor.py index 67797f3c99b69..812c86fc57c0a 100644 --- a/airflow/sensors/metastore_partition_sensor.py +++ b/airflow/sensors/metastore_partition_sensor.py @@ -22,7 +22,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hive.sensors.metastore_partition import MetastorePartitionSensor # noqa warnings.warn( diff --git a/airflow/sensors/named_hive_partition_sensor.py b/airflow/sensors/named_hive_partition_sensor.py index 911568d8e6719..574c2ce04402c 100644 --- a/airflow/sensors/named_hive_partition_sensor.py +++ b/airflow/sensors/named_hive_partition_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hive.sensors.named_hive_partition import NamedHivePartitionSensor # noqa warnings.warn( diff --git a/airflow/sensors/s3_key_sensor.py b/airflow/sensors/s3_key_sensor.py index 2fd7b90321cce..d0f7c40b6d657 100644 --- a/airflow/sensors/s3_key_sensor.py +++ b/airflow/sensors/s3_key_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.s3_key import S3KeySensor # noqa warnings.warn( diff --git a/airflow/sensors/s3_prefix_sensor.py b/airflow/sensors/s3_prefix_sensor.py index a6169df940df3..9826dde3a5f3a 100644 --- a/airflow/sensors/s3_prefix_sensor.py +++ b/airflow/sensors/s3_prefix_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.sensors.s3_prefix import S3PrefixSensor # noqa warnings.warn( diff --git a/airflow/sensors/smart_sensor.py b/airflow/sensors/smart_sensor.py index 6c17beb8912ad..c8c5ba7f2e31a 100644 --- a/airflow/sensors/smart_sensor.py +++ b/airflow/sensors/smart_sensor.py @@ -43,7 +43,7 @@ handler_config = config['handlers']['task'] try: formatter_config = config['formatters'][handler_config['formatter']] -except Exception as err: # pylint: disable=broad-except +except Exception as err: formatter_config = None print(err) dictConfigurator = DictConfigurator(config) @@ -79,7 +79,7 @@ def __init__(self, si): self.execution_context = json.loads(si.execution_context) if si.execution_context else {} try: self.log = self._get_sensor_logger(si) - except Exception as e: # pylint: disable=broad-except + except Exception as e: self.log = None print(e) self.hashcode = si.hashcode @@ -105,7 +105,7 @@ def create_new_task_handler(): Create task log handler for a sensor work. :return: log handler """ - from airflow.utils.log.secrets_masker import _secrets_masker # noqa + from airflow.utils.log.secrets_masker import _secrets_masker handler_config_copy = {k: handler_config[k] for k in handler_config} del handler_config_copy['filters'] @@ -147,7 +147,7 @@ def close_sensor_logger(self): for handler in self.log.handlers: try: handler.close() - except Exception as e: # pylint: disable=broad-except + except Exception as e: print(e) @property @@ -371,7 +371,7 @@ def _load_sensor_works(self, session=None): for ti in tis: try: sensor_works.append(SensorWork(ti)) - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception("Exception at creating sensor work for ti %s", ti.key) self.log.info("%d tasks detected.", len(sensor_works)) @@ -468,7 +468,7 @@ def mark_state(ti, sensor_instance): session.commit() - except Exception: # pylint: disable=broad-except + except Exception: self.log.warning( "Exception _mark_multi_state in smart sensor for hashcode %s", str(poke_hash), # cast to str in advance for highlighting @@ -496,7 +496,7 @@ def email_alert(task_instance, error_info): email = sensor_work.execution_context.get('email') send_email(email, subject, html_content) - except Exception: # pylint: disable=broad-except + except Exception: sensor_work.log.warning("Exception alerting email.", exc_info=True) def handle_failure(sensor_work, ti): @@ -656,7 +656,7 @@ def _execute_sensor_work(self, sensor_work): self._check_and_handle_ti_timeout(sensor_work) self.cached_sensor_exceptions.pop(cache_key, None) - except Exception as e: # pylint: disable=broad-except + except Exception as e: # The retry_infra_failure decorator inside hive_hooks will raise exception with # is_infra_failure == True. Long poking timeout here is also considered an infra # failure. Other exceptions should fail. @@ -727,7 +727,7 @@ def _emit_loop_stats(self): Stats.gauge("smart_sensor_operator.poked_exception", count_poke_exception) Stats.gauge("smart_sensor_operator.exception_failures", count_exception_failures) Stats.gauge("smart_sensor_operator.infra_failures", count_infra_failure) - except Exception: # pylint: disable=broad-except + except Exception: self.log.exception("Exception at getting loop stats %s") def execute(self, context): diff --git a/airflow/sensors/sql_sensor.py b/airflow/sensors/sql_sensor.py index 4fa74516f844c..8a077db534496 100644 --- a/airflow/sensors/sql_sensor.py +++ b/airflow/sensors/sql_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.sensors.sql import SqlSensor # noqa warnings.warn( diff --git a/airflow/sensors/time_delta_sensor.py b/airflow/sensors/time_delta_sensor.py index 5b09d54b4eef9..73f32c2fc82fc 100644 --- a/airflow/sensors/time_delta_sensor.py +++ b/airflow/sensors/time_delta_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.sensors.time_delta import TimeDeltaSensor # noqa warnings.warn( diff --git a/airflow/sensors/web_hdfs_sensor.py b/airflow/sensors/web_hdfs_sensor.py index fa13ea2fbbce4..8f9324e7c2b7b 100644 --- a/airflow/sensors/web_hdfs_sensor.py +++ b/airflow/sensors/web_hdfs_sensor.py @@ -19,7 +19,6 @@ import warnings -# pylint: disable=unused-import from airflow.providers.apache.hdfs.sensors.web_hdfs import WebHdfsSensor # noqa warnings.warn( diff --git a/airflow/sentry.py b/airflow/sentry.py index 62eac9abf7610..51fe26f2c31dc 100644 --- a/airflow/sentry.py +++ b/airflow/sentry.py @@ -52,7 +52,7 @@ def flush(self): import sentry_sdk # Verify blinker installation - from blinker import signal # noqa: F401 pylint: disable=unused-import + from blinker import signal # noqa: F401 from sentry_sdk.integrations.flask import FlaskIntegration from sentry_sdk.integrations.logging import ignore_logger diff --git a/airflow/serialization/json_schema.py b/airflow/serialization/json_schema.py index dbe78e0ed7b96..272d669e3d869 100644 --- a/airflow/serialization/json_schema.py +++ b/airflow/serialization/json_schema.py @@ -30,12 +30,11 @@ class Validator(Protocol): """ - This class is only used for TypeChecking (for IDEs, mypy, pylint, etc) + This class is only used for TypeChecking (for IDEs, mypy, etc) due to the way ``Draft7Validator`` is created. They are created or do not inherit from proper classes. Hence you can not have ``type: Draft7Validator``. """ - # pylint: disable=unused-argument def is_valid(self, instance) -> bool: """Check if the instance is valid under the current schema""" ... diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index 451305c6f56ed..d2f456dc72956 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -195,7 +195,6 @@ def serialize_to_json( serialized_object[key] = value return serialized_object - # pylint: disable=too-many-return-statements @classmethod def _serialize(cls, var: Any) -> Any: # Unfortunately there is no support for recursive types in mypy """Helper function of depth first search for serialization. @@ -255,10 +254,8 @@ def _serialize(cls, var: Any) -> Any: # Unfortunately there is no support for r log.debug('Cast type %s to str in serialization.', type(var)) return str(var) - # pylint: enable=too-many-return-statements - @classmethod - def _deserialize(cls, encoded_var: Any) -> Any: # pylint: disable=too-many-return-statements + def _deserialize(cls, encoded_var: Any) -> Any: """Helper function of depth first search for deserialization.""" # JSON primitives (except for dict) are not encoded. if cls._is_primitive(encoded_var): @@ -308,7 +305,7 @@ def _deserialize_timedelta(cls, seconds: int) -> datetime.timedelta: @classmethod def _is_constructor_param(cls, attrname: str, instance: Any) -> bool: - # pylint: disable=unused-argument + return attrname in cls._CONSTRUCTOR_PARAMS @classmethod @@ -328,7 +325,6 @@ def _value_is_hardcoded_default(cls, attrname: str, value: Any, instance: Any) - to account for the case where the default value of the field is None but has the ``field = field or {}`` set. """ - # pylint: disable=unused-argument if attrname in cls._CONSTRUCTOR_PARAMS and ( cls._CONSTRUCTOR_PARAMS[attrname] is value or (value in [{}, []]) ): @@ -453,7 +449,7 @@ def deserialize_operator(cls, encoded_op: Dict[str, Any]) -> BaseOperator: op_extra_links_from_plugin = {} # We don't want to load Extra Operator links in Scheduler - if cls._load_operator_extra_links: # pylint: disable=too-many-nested-blocks + if cls._load_operator_extra_links: from airflow import plugins_manager plugins_manager.initialize_extra_operators_links_plugins() @@ -655,7 +651,7 @@ class SerializedDAG(DAG, BaseSerialization): _decorated_fields = {'schedule_interval', 'default_args', '_access_control'} @staticmethod - def __get_constructor_defaults(): # pylint: disable=no-method-argument + def __get_constructor_defaults(): param_to_attr = { 'concurrency': '_concurrency', 'description': '_description', @@ -710,9 +706,9 @@ def deserialize_dag(cls, encoded_dag: Dict[str, Any]) -> 'SerializedDAG': if k == "_downstream_task_ids": v = set(v) elif k == "tasks": - # pylint: disable=protected-access + SerializedBaseOperator._load_operator_extra_links = cls._load_operator_extra_links - # pylint: enable=protected-access + v = {task["task_id"]: SerializedBaseOperator.deserialize_operator(task) for task in v} k = "task_dict" elif k == "timezone": @@ -731,7 +727,7 @@ def deserialize_dag(cls, encoded_dag: Dict[str, Any]) -> 'SerializedDAG': setattr(dag, k, v) # Set _task_group - # pylint: disable=protected-access + if "_task_group" in encoded_dag: dag._task_group = SerializedTaskGroup.deserialize_task_group( # type: ignore encoded_dag["_task_group"], None, dag.task_dict @@ -742,7 +738,6 @@ def deserialize_dag(cls, encoded_dag: Dict[str, Any]) -> 'SerializedDAG': dag._task_group = TaskGroup.create_root(dag) for task in dag.tasks: dag.task_group.add(task) - # pylint: enable=protected-access # Set has_on_*_callbacks to True if they exist in Serialized blob as False is the default if "has_on_success_callback" in encoded_dag: @@ -769,7 +764,7 @@ def deserialize_dag(cls, encoded_dag: Dict[str, Any]) -> 'SerializedDAG': for task_id in serializable_task.downstream_task_ids: # Bypass set_upstream etc here - it does more than we want - # noqa: E501 # pylint: disable=protected-access + dag.task_dict[task_id]._upstream_task_ids.add(serializable_task.task_id) return dag @@ -802,7 +797,7 @@ def serialize_task_group(cls, task_group: TaskGroup) -> Optional[Union[Dict[str, return None serialize_group = { - "_group_id": task_group._group_id, # pylint: disable=protected-access + "_group_id": task_group._group_id, "prefix_group_id": task_group.prefix_group_id, "tooltip": task_group.tooltip, "ui_color": task_group.ui_color, diff --git a/airflow/settings.py b/airflow/settings.py index 580aa97c6b991..857dffe8ad3b1 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -31,7 +31,6 @@ from sqlalchemy.orm.session import Session as SASession from sqlalchemy.pool import NullPool -# pylint: disable=unused-import from airflow.configuration import AIRFLOW_HOME, WEBSERVER_CONFIG, conf # NOQA F401 from airflow.executors import executor_constants from airflow.logging_config import configure_logging @@ -47,7 +46,7 @@ TIMEZONE = pendulum.tz.local_timezone() else: TIMEZONE = pendulum.tz.timezone(tz) -except Exception: # pylint: disable=broad-except +except Exception: pass log.info("Configured default timezone %s", TIMEZONE) @@ -79,7 +78,7 @@ Session: Optional[SASession] = None # The JSON library to use for DAG Serialization and De-Serialization -json = json # pylint: disable=self-assigning-variable +json = json # Dictionary containing State and colors associated to each state to # display on the Webserver @@ -118,7 +117,7 @@ def custom_show_warning(message, category, filename, lineno, file=None, line=Non warnings.showwarning = custom_show_warning -def task_policy(task) -> None: # pylint: disable=unused-argument +def task_policy(task) -> None: """ This policy setting allows altering tasks after they are loaded in the DagBag. It allows administrator to rewire some task's parameters. @@ -141,7 +140,7 @@ def task_policy(task) -> None: # pylint: disable=unused-argument """ -def dag_policy(dag) -> None: # pylint: disable=unused-argument +def dag_policy(dag) -> None: """ This policy setting allows altering DAGs after they are loaded in the DagBag. It allows administrator to rewire some DAG's parameters. @@ -161,7 +160,7 @@ def dag_policy(dag) -> None: # pylint: disable=unused-argument """ -def task_instance_mutation_hook(task_instance): # pylint: disable=unused-argument +def task_instance_mutation_hook(task_instance): """ This setting allows altering task instances before they are queued by the Airflow scheduler. @@ -176,7 +175,7 @@ def task_instance_mutation_hook(task_instance): # pylint: disable=unused-argume """ -def pod_mutation_hook(pod): # pylint: disable=unused-argument +def pod_mutation_hook(pod): """ This setting allows altering ``kubernetes.client.models.V1Pod`` object before they are passed to the Kubernetes client by the ``PodLauncher`` @@ -191,7 +190,6 @@ def pod_mutation_hook(pod): # pylint: disable=unused-argument """ -# pylint: disable=global-statement def configure_vars(): """Configure Global Variables from airflow.cfg""" global SQL_ALCHEMY_CONN @@ -346,12 +344,12 @@ def validate_session(): check_session = sessionmaker(bind=engine) session = check_session() try: - session.execute("select 1") # pylint: disable=no-member + session.execute("select 1") conn_status = True except exc.DBAPIError as err: log.error(err) conn_status = False - session.close() # pylint: disable=no-member + session.close() return conn_status @@ -410,11 +408,11 @@ def get_session_lifetime_config(): def import_local_settings(): """Import airflow_local_settings.py files to allow overriding any configs in settings.py file""" - try: # pylint: disable=too-many-nested-blocks + try: import airflow_local_settings if hasattr(airflow_local_settings, "__all__"): - for i in airflow_local_settings.__all__: # pylint: disable=no-member + for i in airflow_local_settings.__all__: globals()[i] = getattr(airflow_local_settings, i) else: for k, v in airflow_local_settings.__dict__.items(): @@ -453,9 +451,6 @@ def initialize(): atexit.register(dispose_orm) -# pylint: enable=global-statement - - # Const stuff KILOBYTE = 1024 diff --git a/airflow/stats.py b/airflow/stats.py index ad4d24140238e..0a7004d10a595 100644 --- a/airflow/stats.py +++ b/airflow/stats.py @@ -50,7 +50,7 @@ def stop(self, send=True): class StatsLogger(Protocol): - """This class is only used for TypeChecking (for IDEs, mypy, pylint, etc)""" + """This class is only used for TypeChecking (for IDEs, mypy, etc)""" @classmethod def incr(cls, stat: str, count: int = 1, rate: int = 1) -> None: @@ -144,7 +144,7 @@ def start(self): self._start_time = time.perf_counter() return self - def stop(self, send=True): # pylint: disable=unused-argument + def stop(self, send=True): """Stop the timer, and optionally send it to stats backend""" self.duration = time.perf_counter() - self._start_time if send and self.real_timer: @@ -216,7 +216,7 @@ def get_current_handler_stat_name_func() -> Callable[[str], str]: return conf.getimport('metrics', 'stat_name_handler') or stat_name_default_handler -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def validate_stat(fn: T) -> T: @@ -243,7 +243,7 @@ class AllowListValidator: def __init__(self, allow_list=None): if allow_list: - # pylint: disable=consider-using-generator + self.allow_list = tuple(item.strip().lower() for item in allow_list.split(',')) else: self.allow_list = None @@ -323,7 +323,7 @@ def decr(self, stat, count=1, rate=1, tags=None): return None @validate_stat - def gauge(self, stat, value, rate=1, delta=False, tags=None): # pylint: disable=unused-argument + def gauge(self, stat, value, rate=1, delta=False, tags=None): """Gauge stat""" if self.allow_list_validator.test(stat): tags = tags or [] @@ -432,5 +432,5 @@ def get_constant_tags(cls): Stats: StatsLogger else: - class Stats(metaclass=_Stats): # noqa: D101 + class Stats(metaclass=_Stats): """Empty class for Stats - we use metaclass to inject the right one""" diff --git a/airflow/task/task_runner/__init__.py b/airflow/task/task_runner/__init__.py index 30f11d25b8a4a..e00bceaecd51b 100644 --- a/airflow/task/task_runner/__init__.py +++ b/airflow/task/task_runner/__init__.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=missing-docstring + import logging from airflow.configuration import conf diff --git a/airflow/task/task_runner/base_task_runner.py b/airflow/task/task_runner/base_task_runner.py index c8035b04a86e2..d763dab4325c1 100644 --- a/airflow/task/task_runner/base_task_runner.py +++ b/airflow/task/task_runner/base_task_runner.py @@ -84,7 +84,6 @@ def __init__(self, local_task_job): # - the runner can read/execute those values as it needs cfg_path = tmp_configuration_copy(chmod=0o600) - # pylint: disable=consider-using-with self._error_file = NamedTemporaryFile(delete=True) self._cfg_path = cfg_path self._command = ( @@ -133,7 +132,7 @@ def run_command(self, run_with=None): self.log.info("Running on host: %s", get_hostname()) self.log.info('Running: %s', full_cmd) - # pylint: disable=subprocess-popen-preexec-fn,consider-using-with + proc = subprocess.Popen( full_cmd, stdout=subprocess.PIPE, diff --git a/airflow/task/task_runner/standard_task_runner.py b/airflow/task/task_runner/standard_task_runner.py index bb566b275f2a1..67755120e765d 100644 --- a/airflow/task/task_runner/standard_task_runner.py +++ b/airflow/task/task_runner/standard_task_runner.py @@ -21,7 +21,7 @@ from typing import Optional import psutil -from setproctitle import setproctitle # pylint: disable=no-name-in-module +from setproctitle import setproctitle from airflow.settings import CAN_FORK from airflow.task.task_runner.base_task_runner import BaseTaskRunner @@ -46,7 +46,7 @@ def _start_by_exec(self): subprocess = self.run_command() return psutil.Process(subprocess.pid) - def _start_by_fork(self): # pylint: disable=inconsistent-return-statements + def _start_by_fork(self): pid = os.fork() if pid: self.log.info("Started process %d to run task", pid) @@ -84,13 +84,13 @@ def _start_by_fork(self): # pylint: disable=inconsistent-return-statements try: args.func(args, dag=self.dag) return_code = 0 - except Exception: # pylint: disable=broad-except + except Exception: return_code = 1 finally: # Explicitly flush any pending exception to Sentry if enabled Sentry.flush() logging.shutdown() - os._exit(return_code) # pylint: disable=protected-access + os._exit(return_code) def return_code(self, timeout: int = 0) -> Optional[int]: # We call this multiple times, but we can only wait on the process once diff --git a/airflow/ti_deps/deps/not_previously_skipped_dep.py b/airflow/ti_deps/deps/not_previously_skipped_dep.py index 08413dfd571d6..3d1bde949ece7 100644 --- a/airflow/ti_deps/deps/not_previously_skipped_dep.py +++ b/airflow/ti_deps/deps/not_previously_skipped_dep.py @@ -28,7 +28,7 @@ class NotPreviouslySkippedDep(BaseTIDep): IGNORABLE = True IS_TASK_DEP = True - def _get_dep_statuses(self, ti, session, dep_context): # pylint: disable=signature-differs + def _get_dep_statuses(self, ti, session, dep_context): from airflow.models.skipmixin import ( XCOM_SKIPMIXIN_FOLLOWED, XCOM_SKIPMIXIN_KEY, diff --git a/airflow/ti_deps/deps/task_not_running_dep.py b/airflow/ti_deps/deps/task_not_running_dep.py index 610b7de506644..59df2fff07112 100644 --- a/airflow/ti_deps/deps/task_not_running_dep.py +++ b/airflow/ti_deps/deps/task_not_running_dep.py @@ -29,7 +29,7 @@ class TaskNotRunningDep(BaseTIDep): IGNORABLE = False def __eq__(self, other): - return type(self) == type(other) # pylint: disable=C0123 + return type(self) == type(other) def __hash__(self): return hash(type(self)) diff --git a/airflow/ti_deps/deps/trigger_rule_dep.py b/airflow/ti_deps/deps/trigger_rule_dep.py index 3ac3bf0a74ae0..b65a84955532d 100644 --- a/airflow/ti_deps/deps/trigger_rule_dep.py +++ b/airflow/ti_deps/deps/trigger_rule_dep.py @@ -81,7 +81,7 @@ def _get_dep_statuses(self, ti, session, dep_context): ) @provide_session - def _evaluate_trigger_rule( # pylint: disable=too-many-branches + def _evaluate_trigger_rule( self, ti, successes, skipped, failed, upstream_failed, done, flag_upstream_failed, session ): """ diff --git a/airflow/typing_compat.py b/airflow/typing_compat.py index a207ef2a688d0..0f3db7575cfa5 100644 --- a/airflow/typing_compat.py +++ b/airflow/typing_compat.py @@ -25,19 +25,14 @@ # Protocol and TypedDict are only added to typing module starting from # python 3.8 we can safely remove this shim import after Airflow drops # support for <3.8 - from typing import ( # type: ignore # noqa # pylint: disable=unused-import - Literal, - Protocol, - TypedDict, - runtime_checkable, - ) + from typing import Literal, Protocol, TypedDict, runtime_checkable # type: ignore except ImportError: from typing_extensions import Literal, Protocol, TypedDict, runtime_checkable # type: ignore # noqa # Before Py 3.7, there is no re.Pattern class try: - from re import Pattern as RePatternType # type: ignore # pylint: disable=unused-import + from re import Pattern as RePatternType # type: ignore except ImportError: import re diff --git a/airflow/utils/cli.py b/airflow/utils/cli.py index 80973b7336d93..2b5eb5943ec97 100644 --- a/airflow/utils/cli.py +++ b/airflow/utils/cli.py @@ -37,7 +37,7 @@ from airflow.utils.platform import getuser, is_terminal_support_colors from airflow.utils.session import provide_session -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) if TYPE_CHECKING: from airflow.models import DAG @@ -115,7 +115,7 @@ def _build_metrics(func_name, namespace): sub_commands_to_check = {'users', 'connections'} sensitive_fields = {'-p', '--password', '--conn-password'} full_command = list(sys.argv) - if full_command[1] in sub_commands_to_check: # pylint: disable=too-many-nested-blocks + if full_command[1] in sub_commands_to_check: for idx, command in enumerate(full_command): if command in sensitive_fields: # For cases when password is passed as "--password xyz" (with space between key and value) @@ -252,7 +252,7 @@ def setup_logging(filename): return handler.stream -def sigint_handler(sig, frame): # pylint: disable=unused-argument +def sigint_handler(sig, frame): """ Returns without error on SIGINT or SIGTERM signals in interactive command mode e.g. CTRL+C or kill @@ -260,7 +260,7 @@ def sigint_handler(sig, frame): # pylint: disable=unused-argument sys.exit(0) -def sigquit_handler(sig, frame): # pylint: disable=unused-argument +def sigquit_handler(sig, frame): """ Helps debug deadlocks by printing stacktraces when this gets a SIGQUIT e.g. kill -s QUIT or CTRL+\ @@ -268,7 +268,7 @@ def sigquit_handler(sig, frame): # pylint: disable=unused-argument print(f"Dumping stack traces for all threads in PID {os.getpid()}") id_to_name = {th.ident: th.name for th in threading.enumerate()} code = [] - for thread_id, stack in sys._current_frames().items(): # pylint: disable=protected-access + for thread_id, stack in sys._current_frames().items(): code.append(f"\n# Thread: {id_to_name.get(thread_id, '')}({thread_id})") for filename, line_number, name, line in traceback.extract_stack(stack): code.append(f'File: "{filename}", line {line_number}, in {name}') diff --git a/airflow/utils/cli_action_loggers.py b/airflow/utils/cli_action_loggers.py index 23de08d98311d..3d40018305214 100644 --- a/airflow/utils/cli_action_loggers.py +++ b/airflow/utils/cli_action_loggers.py @@ -67,7 +67,7 @@ def on_pre_execution(**kwargs): for callback in __pre_exec_callbacks: try: callback(**kwargs) - except Exception: # pylint: disable=broad-except + except Exception: logging.exception('Failed on pre-execution callback using %s', callback) @@ -85,7 +85,7 @@ def on_post_execution(**kwargs): for callback in __post_exec_callbacks: try: callback(**kwargs) - except Exception: # pylint: disable=broad-except + except Exception: logging.exception('Failed on post-execution callback using %s', callback) @@ -101,7 +101,7 @@ def default_action_log(log, **_): try: with create_session() as session: session.add(log) - except Exception as error: # pylint: disable=broad-except + except Exception as error: logging.warning("Failed to log action with %s", error) diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index 676b6105aad0c..5af49e866fb63 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -33,7 +33,7 @@ from multiprocessing.connection import Connection as MultiprocessingConnection from typing import TYPE_CHECKING, Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Union, cast -from setproctitle import setproctitle # pylint: disable=no-name-in-module +from setproctitle import setproctitle from sqlalchemy import or_ from tabulate import tabulate @@ -463,7 +463,7 @@ def end(self): self._parent_signal_conn.close() -class DagFileProcessorManager(LoggingMixin): # pylint: disable=too-many-instance-attributes +class DagFileProcessorManager(LoggingMixin): """ Given a list of DAG definition files, this kicks off several processors in parallel to process them and put the results to a multiprocessing.Queue @@ -580,7 +580,7 @@ def register_exit_signals(self): # So that we ignore the debug dump signal, making it easier to send signal.signal(signal.SIGUSR2, signal.SIG_IGN) - def _exit_gracefully(self, signum, frame): # pylint: disable=unused-argument + def _exit_gracefully(self, signum, frame): """Helper method to clean up DAG file processors to avoid leaving orphan processes.""" self.log.info("Exiting gracefully upon receiving signal %s", signum) self.log.debug("Current Stacktrace is: %s", '\n'.join(map(str, inspect.stack()))) @@ -628,7 +628,6 @@ def _run_parsing_loop(self): while True: loop_start_time = time.monotonic() - # pylint: disable=no-else-break ready = multiprocessing.connection.wait(self.waitables.keys(), timeout=poll_time) if self._signal_conn in ready: agent_signal = self._signal_conn.recv() @@ -674,7 +673,7 @@ def _run_parsing_loop(self): self._processors.pop(processor.file_path) self._refresh_dag_dir() - self._find_zombies() # pylint: disable=no-value-for-parameter + self._find_zombies() self._kill_timed_out_processors() @@ -760,8 +759,8 @@ def _refresh_dag_dir(self): try: self.log.debug("Removing old import errors") - self.clear_nonexistent_import_errors() # pylint: disable=no-value-for-parameter - except Exception: # noqa pylint: disable=broad-except + self.clear_nonexistent_import_errors() + except Exception: self.log.exception("Error removing old import errors") SerializedDagModel.remove_deleted_dags(self._file_paths) @@ -1218,7 +1217,6 @@ def emit_metrics(self): 'dag_processing.import_errors', sum(stat.import_errors for stat in self._file_stats.values()) ) - # pylint: disable=missing-docstring @property def file_paths(self): return self._file_paths diff --git a/airflow/utils/dates.py b/airflow/utils/dates.py index de5e52b668e57..93bbdc5b40128 100644 --- a/airflow/utils/dates.py +++ b/airflow/utils/dates.py @@ -20,7 +20,7 @@ from typing import Dict, List, Optional, Union from croniter import croniter -from dateutil.relativedelta import relativedelta # noqa: F401 for doctest +from dateutil.relativedelta import relativedelta # for doctest from airflow.utils import timezone @@ -34,7 +34,6 @@ } -# pylint: disable=too-many-branches def date_range( start_date: datetime, end_date: Optional[datetime] = None, diff --git a/airflow/utils/db.py b/airflow/utils/db.py index 979e020252702..ae8dc0e6d0de1 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -23,8 +23,8 @@ from airflow import settings from airflow.configuration import conf -from airflow.jobs.base_job import BaseJob # noqa: F401 # pylint: disable=unused-import -from airflow.models import ( # noqa: F401 # pylint: disable=unused-import +from airflow.jobs.base_job import BaseJob # noqa: F401 +from airflow.models import ( # noqa: F401 DAG, XCOM_RETURN_KEY, BaseOperator, @@ -47,13 +47,10 @@ ) # We need to add this model manually to get reset working well -from airflow.models.serialized_dag import SerializedDagModel # noqa: F401 # pylint: disable=unused-import +from airflow.models.serialized_dag import SerializedDagModel # noqa: F401 # TODO: remove create_session once we decide to break backward compatibility -from airflow.utils.session import ( # noqa: F401 # pylint: disable=unused-import - create_session, - provide_session, -) +from airflow.utils.session import create_session, provide_session # noqa: F401 log = logging.getLogger(__name__) @@ -577,7 +574,7 @@ def initdb(): from flask_appbuilder.models.sqla import Base - Base.metadata.create_all(settings.engine) # pylint: disable=no-member + Base.metadata.create_all(settings.engine) def _get_alembic_config(): @@ -745,10 +742,10 @@ def drop_airflow_models(connection): Base.metadata.remove(user) Base.metadata.remove(chart) # alembic adds significant import time, so we import it lazily - from alembic.migration import MigrationContext # noqa + from alembic.migration import MigrationContext migration_ctx = MigrationContext.configure(connection) - version = migration_ctx._version # noqa pylint: disable=protected-access + version = migration_ctx._version if version.exists(connection): version.drop(connection) @@ -762,7 +759,7 @@ def drop_flask_models(connection): """ from flask_appbuilder.models.sqla import Base - Base.metadata.drop_all(connection) # pylint: disable=no-member + Base.metadata.drop_all(connection) @provide_session diff --git a/airflow/utils/decorators.py b/airflow/utils/decorators.py index 60f033cf72d4a..e0b314b13e9b4 100644 --- a/airflow/utils/decorators.py +++ b/airflow/utils/decorators.py @@ -21,7 +21,7 @@ from functools import wraps from typing import Callable, TypeVar, cast -T = TypeVar('T', bound=Callable) # pylint: disable=invalid-name +T = TypeVar('T', bound=Callable) def apply_defaults(func: T) -> T: diff --git a/airflow/utils/edgemodifier.py b/airflow/utils/edgemodifier.py index 70cad537845c8..9fbac62b9d2ad 100644 --- a/airflow/utils/edgemodifier.py +++ b/airflow/utils/edgemodifier.py @@ -121,6 +121,6 @@ def add_edge_info(self, dag, upstream_id: str, downstream_id: str): # Factory functions -def Label(label: str): # pylint: disable=C0103 +def Label(label: str): """Creates an EdgeModifier that sets a human-readable label on the edge.""" return EdgeModifier(label=label) diff --git a/airflow/utils/event_scheduler.py b/airflow/utils/event_scheduler.py index 67783385e3de3..5f9b09cbd5257 100644 --- a/airflow/utils/event_scheduler.py +++ b/airflow/utils/event_scheduler.py @@ -29,7 +29,7 @@ def call_regular_interval( action: Callable, arguments=(), kwargs={}, - ): # pylint: disable=dangerous-default-value + ): """Helper to call a function at (roughly) a given interval""" def repeat(*args, **kwargs): diff --git a/airflow/utils/file.py b/airflow/utils/file.py index b0086122eaa78..c02207951f5d5 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -31,7 +31,7 @@ log = logging.getLogger(__name__) -def TemporaryDirectory(*args, **kwargs): # pylint: disable=invalid-name +def TemporaryDirectory(*args, **kwargs): """This function is deprecated. Please use `tempfile.TemporaryDirectory`""" import warnings from tempfile import TemporaryDirectory as TmpDir @@ -41,7 +41,7 @@ def TemporaryDirectory(*args, **kwargs): # pylint: disable=invalid-name DeprecationWarning, stacklevel=2, ) - # pylint: disable=consider-using-with + return TmpDir(*args, **kwargs) @@ -91,7 +91,7 @@ def open_maybe_zipped(fileloc, mode='r'): if archive and zipfile.is_zipfile(archive): return io.TextIOWrapper(zipfile.ZipFile(archive, mode=mode).open(filename)) else: - # pylint: disable=consider-using-with + return open(fileloc, mode=mode) @@ -195,7 +195,7 @@ def find_dag_file_paths(directory: Union[str, "pathlib.Path"], safe_mode: bool) continue file_paths.append(file_path) - except Exception: # noqa pylint: disable=broad-except + except Exception: log.exception("Error while examining %s", file_path) return file_paths diff --git a/airflow/utils/helpers.py b/airflow/utils/helpers.py index 7fce1774a321a..f74a01729c7a3 100644 --- a/airflow/utils/helpers.py +++ b/airflow/utils/helpers.py @@ -65,7 +65,7 @@ def alchemy_to_dict(obj: Any) -> Optional[Dict]: def ask_yesno(question): """Helper to get yes / no answer from user.""" yes = {'yes', 'y'} - no = {'no', 'n'} # pylint: disable=invalid-name + no = {'no', 'n'} done = False print(question) @@ -95,8 +95,8 @@ def as_tuple(obj): return tuple([obj]) -T = TypeVar('T') # pylint: disable=invalid-name -S = TypeVar('S') # pylint: disable=invalid-name +T = TypeVar('T') +S = TypeVar('S') def chunks(items: List[T], chunk_size: int) -> Generator[List[T], None, None]: diff --git a/airflow/utils/log/cloudwatch_task_handler.py b/airflow/utils/log/cloudwatch_task_handler.py index 9344e98eb450d..cdb7b7958629a 100644 --- a/airflow/utils/log/cloudwatch_task_handler.py +++ b/airflow/utils/log/cloudwatch_task_handler.py @@ -18,7 +18,6 @@ """This module is deprecated. Please use :mod:`airflow.providers.amazon.aws.log.cloudwatch_task_handler`.""" import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.log.cloudwatch_task_handler import CloudwatchTaskHandler # noqa warnings.warn( diff --git a/airflow/utils/log/es_task_handler.py b/airflow/utils/log/es_task_handler.py index 27e202f36b028..30ec16a1c3d30 100644 --- a/airflow/utils/log/es_task_handler.py +++ b/airflow/utils/log/es_task_handler.py @@ -18,7 +18,6 @@ """This module is deprecated. Please use :mod:`airflow.providers.elasticsearch.log.es_task_handler`.""" import warnings -# pylint: disable=unused-import from airflow.providers.elasticsearch.log.es_task_handler import ElasticsearchTaskHandler # noqa warnings.warn( diff --git a/airflow/utils/log/file_processor_handler.py b/airflow/utils/log/file_processor_handler.py index a1b47f2117777..7792faf48eed4 100644 --- a/airflow/utils/log/file_processor_handler.py +++ b/airflow/utils/log/file_processor_handler.py @@ -111,7 +111,7 @@ def _symlink_latest_log_directory(self): """ log_directory = self._get_log_directory() latest_log_directory_path = os.path.join(self.base_log_folder, "latest") - if os.path.isdir(log_directory): # pylint: disable=too-many-nested-blocks + if os.path.isdir(log_directory): try: # if symlink exists but is stale, update it if os.path.islink(latest_log_directory_path): diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index 1b52a4f55d54a..2dc9beb57b0ac 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -95,7 +95,7 @@ def _render_filename(self, ti, try_number): def _read_grouped_logs(self): return False - def _read(self, ti, try_number, metadata=None): # pylint: disable=unused-argument + def _read(self, ti, try_number, metadata=None): """ Template method that contains custom logic of reading logs given the try_number. @@ -119,10 +119,10 @@ def _read(self, ti, try_number, metadata=None): # pylint: disable=unused-argume with open(location) as file: log += f"*** Reading local file: {location}\n" log += "".join(file.readlines()) - except Exception as e: # pylint: disable=broad-except + except Exception as e: log = f"*** Failed to load local log file: {location}\n" log += f"*** {str(e)}\n" - elif conf.get('core', 'executor') == 'KubernetesExecutor': # pylint: disable=too-many-nested-blocks + elif conf.get('core', 'executor') == 'KubernetesExecutor': try: from airflow.kubernetes.kube_client import get_kube_client @@ -158,7 +158,7 @@ def _read(self, ti, try_number, metadata=None): # pylint: disable=unused-argume for line in res: log += line.decode() - except Exception as f: # pylint: disable=broad-except + except Exception as f: log += f'*** Unable to fetch logs from worker pod {ti.hostname} ***\n{str(f)}\n\n' else: url = os.path.join("http://{ti.hostname}:{worker_log_server_port}/log", log_relative_path).format( @@ -190,7 +190,7 @@ def _read(self, ti, try_number, metadata=None): # pylint: disable=unused-argume response.raise_for_status() log += '\n' + response.text - except Exception as e: # pylint: disable=broad-except + except Exception as e: log += f"*** Failed to fetch log file from worker. {str(e)}\n" return log, {'end_of_log': True} diff --git a/airflow/utils/log/gcs_task_handler.py b/airflow/utils/log/gcs_task_handler.py index 06a91f2b66773..69ae32f8a077e 100644 --- a/airflow/utils/log/gcs_task_handler.py +++ b/airflow/utils/log/gcs_task_handler.py @@ -18,7 +18,6 @@ """This module is deprecated. Please use :mod:`airflow.providers.google.cloud.log.gcs_task_handler`.""" import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.log.gcs_task_handler import GCSTaskHandler # noqa warnings.warn( diff --git a/airflow/utils/log/json_formatter.py b/airflow/utils/log/json_formatter.py index f271eba253fd3..140a4df834afd 100644 --- a/airflow/utils/log/json_formatter.py +++ b/airflow/utils/log/json_formatter.py @@ -27,7 +27,6 @@ class JSONFormatter(logging.Formatter): """JSONFormatter instances are used to convert a log record to json.""" - # pylint: disable=too-many-arguments def __init__(self, fmt=None, datefmt=None, style='%', json_fields=None, extras=None): super().__init__(fmt, datefmt, style) if extras is None: diff --git a/airflow/utils/log/logging_mixin.py b/airflow/utils/log/logging_mixin.py index c36d4b32be2b1..e101eb58fdc74 100644 --- a/airflow/utils/log/logging_mixin.py +++ b/airflow/utils/log/logging_mixin.py @@ -90,10 +90,10 @@ def close(self): """ @property - def closed(self): # noqa: D402 + def closed(self): """ - Returns False to indicate that the stream is not closed (as it will be - open for the duration of Airflow's lifecycle). + Returns False to indicate that the stream is not closed, as it will be + open for the duration of Airflow's lifecycle. For compatibility with the io.IOBase interface. """ @@ -137,7 +137,6 @@ class RedirectStdHandler(StreamHandler): sys.stderr/stdout at handler construction time. """ - # pylint: disable=super-init-not-called def __init__(self, stream): if not isinstance(stream, str): raise Exception( @@ -149,7 +148,7 @@ def __init__(self, stream): self._use_stderr = False # StreamHandler tries to set self.stream - Handler.__init__(self) # pylint: disable=non-parent-init-called + Handler.__init__(self) @property def stream(self): diff --git a/airflow/utils/log/s3_task_handler.py b/airflow/utils/log/s3_task_handler.py index bc5bfab28b62c..01365c6760e6d 100644 --- a/airflow/utils/log/s3_task_handler.py +++ b/airflow/utils/log/s3_task_handler.py @@ -18,7 +18,6 @@ """This module is deprecated. Please use :mod:`airflow.providers.amazon.aws.log.s3_task_handler`.""" import warnings -# pylint: disable=unused-import from airflow.providers.amazon.aws.log.s3_task_handler import S3TaskHandler # noqa warnings.warn( diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py index 8bb7557a9df26..1e8ad16be3eef 100644 --- a/airflow/utils/log/secrets_masker.py +++ b/airflow/utils/log/secrets_masker.py @@ -171,7 +171,6 @@ def _redact_all(self, item: "RedactableItem", depth: int) -> "RedactableItem": else: return item - # pylint: disable=too-many-return-statements def _redact(self, item: "RedactableItem", name: Optional[str], depth: int) -> "RedactableItem": # Avoid spending too much effort on redacting on deeply nested # structures. This also avoid infinite recursion if a structure has @@ -201,7 +200,7 @@ def _redact(self, item: "RedactableItem", name: Optional[str], depth: int) -> "R else: return item # I think this should never happen, but it does not hurt to leave it just in case - except Exception as e: # pylint: disable=broad-except + except Exception as e: log.warning( "Unable to redact %r, please report this via . " "Error was: %s: %s", @@ -220,7 +219,6 @@ def redact(self, item: "RedactableItem", name: Optional[str] = None) -> "Redacta """ return self._redact(item, name, depth=0) - # pylint: enable=too-many-return-statements def add_mask(self, secret: Union[str, dict, Iterable], name: str = None): """Add a new secret to be masked to this filter instance.""" if isinstance(secret, dict): diff --git a/airflow/utils/log/stackdriver_task_handler.py b/airflow/utils/log/stackdriver_task_handler.py index 26786dd65937c..da2eda1af41c4 100644 --- a/airflow/utils/log/stackdriver_task_handler.py +++ b/airflow/utils/log/stackdriver_task_handler.py @@ -20,7 +20,6 @@ """ import warnings -# pylint: disable=unused-import from airflow.providers.google.cloud.log.stackdriver_task_handler import StackdriverTaskHandler # noqa warnings.warn( diff --git a/airflow/utils/log/task_handler_with_custom_formatter.py b/airflow/utils/log/task_handler_with_custom_formatter.py index 84c3b654ae621..5034d00fe16e9 100644 --- a/airflow/utils/log/task_handler_with_custom_formatter.py +++ b/airflow/utils/log/task_handler_with_custom_formatter.py @@ -45,7 +45,7 @@ def set_context(self, ti): if prefix: _, self.prefix_jinja_template = parse_template_string(prefix) rendered_prefix = self._render_prefix(ti) - formatter = logging.Formatter(rendered_prefix + ":" + self.formatter._fmt) # pylint: disable=W0212 + formatter = logging.Formatter(rendered_prefix + ":" + self.formatter._fmt) self.setFormatter(formatter) self.setLevel(self.level) diff --git a/airflow/utils/log/wasb_task_handler.py b/airflow/utils/log/wasb_task_handler.py index ef75fdd498e75..adfa94f593ffd 100644 --- a/airflow/utils/log/wasb_task_handler.py +++ b/airflow/utils/log/wasb_task_handler.py @@ -18,7 +18,6 @@ """This module is deprecated. Please use :mod:`airflow.providers.microsoft.azure.log.wasb_task_handler`.""" import warnings -# pylint: disable=unused-import from airflow.providers.microsoft.azure.log.wasb_task_handler import WasbTaskHandler # noqa warnings.warn( diff --git a/airflow/utils/orm_event_handlers.py b/airflow/utils/orm_event_handlers.py index f992873d83734..1a5da96812b41 100644 --- a/airflow/utils/orm_event_handlers.py +++ b/airflow/utils/orm_event_handlers.py @@ -30,7 +30,7 @@ def setup_event_handlers(engine): """Setups event handlers.""" - # pylint: disable=unused-argument, unused-variable + @event.listens_for(engine, "connect") def connect(dbapi_connection, connection_record): connection_record.info['pid'] = os.getpid() @@ -86,5 +86,3 @@ def after_cursor_execute(conn, cursor, statement, parameters, context, executema stack_info, statement.replace("\n", " "), ) - - # pylint: enable=unused-argument, unused-variable diff --git a/airflow/utils/platform.py b/airflow/utils/platform.py index 73eb609cdbcc2..0c1db2d3296a2 100644 --- a/airflow/utils/platform.py +++ b/airflow/utils/platform.py @@ -54,7 +54,7 @@ def get_airflow_git_version(): git_version = None try: git_version = str(pkgutil.get_data('airflow', 'git_version'), encoding="UTF-8") - except Exception as e: # pylint: disable=broad-except + except Exception as e: log.debug(e) return git_version diff --git a/airflow/utils/process_utils.py b/airflow/utils/process_utils.py index 1fbaccded9cd5..d3a707610b9e7 100644 --- a/airflow/utils/process_utils.py +++ b/airflow/utils/process_utils.py @@ -160,7 +160,7 @@ def execute_interactive(cmd: List[str], **kwargs): # open pseudo-terminal to interact with subprocess master_fd, slave_fd = pty.openpty() - try: # pylint: disable=too-many-nested-blocks + try: # use os.setsid() make it run in a new process group, or bash job control will not be enabled with subprocess.Popen( cmd, stdin=slave_fd, stdout=slave_fd, stderr=slave_fd, universal_newlines=True, **kwargs @@ -233,7 +233,7 @@ def patch_environ(new_env_variables: Dict[str, str]): """ current_env_state = {key: os.environ.get(key) for key in new_env_variables.keys()} os.environ.update(new_env_variables) - try: # pylint: disable=too-many-nested-blocks + try: yield finally: for key, old_value in current_env_state.items(): diff --git a/airflow/utils/serve_logs.py b/airflow/utils/serve_logs.py index fd5eadb2ac5a6..4d22d6c4fc1e4 100644 --- a/airflow/utils/serve_logs.py +++ b/airflow/utils/serve_logs.py @@ -29,7 +29,7 @@ from airflow.configuration import conf -def flask_app(): # noqa: D103 +def flask_app(): flask_app = Flask(__name__) max_request_age = conf.getint('webserver', 'log_request_clock_grace', fallback=30) log_directory = os.path.expanduser(conf.get('logging', 'BASE_LOG_FOLDER')) diff --git a/airflow/utils/session.py b/airflow/utils/session.py index f8b9bcd071eb4..5012136da7ed7 100644 --- a/airflow/utils/session.py +++ b/airflow/utils/session.py @@ -37,7 +37,7 @@ def create_session(): session.close() -RT = TypeVar("RT") # pylint: disable=invalid-name +RT = TypeVar("RT") def find_session_idx(func: Callable[..., RT]) -> int: diff --git a/airflow/utils/sqlalchemy.py b/airflow/utils/sqlalchemy.py index d60f57d5ba54b..34b20ed8ab948 100644 --- a/airflow/utils/sqlalchemy.py +++ b/airflow/utils/sqlalchemy.py @@ -37,7 +37,6 @@ using_mysql = conf.get('core', 'sql_alchemy_conn').lower().startswith('mysql') -# pylint: enable=unused-argument class UtcDateTime(TypeDecorator): """ Almost equivalent to :class:`~sqlalchemy.types.DateTime` with diff --git a/airflow/utils/task_group.py b/airflow/utils/task_group.py index 1a2cc7c6bc12f..90afa0765b0c4 100644 --- a/airflow/utils/task_group.py +++ b/airflow/utils/task_group.py @@ -334,7 +334,7 @@ class TaskGroupContext: _previous_context_managed_task_groups: List[TaskGroup] = [] @classmethod - def push_context_managed_task_group(cls, task_group: TaskGroup): # pylint: disable=redefined-outer-name + def push_context_managed_task_group(cls, task_group: TaskGroup): """Push a TaskGroup into the list of managed TaskGroups.""" if cls._context_managed_task_group: cls._previous_context_managed_task_groups.append(cls._context_managed_task_group) diff --git a/airflow/utils/timeout.py b/airflow/utils/timeout.py index cab432fd8451a..22a0faf4c7cfa 100644 --- a/airflow/utils/timeout.py +++ b/airflow/utils/timeout.py @@ -23,7 +23,7 @@ from airflow.utils.log.logging_mixin import LoggingMixin -class timeout(LoggingMixin): # pylint: disable=invalid-name +class timeout(LoggingMixin): """To be used in a ``with`` block and timeout its content.""" def __init__(self, seconds=1, error_message='Timeout'): @@ -31,7 +31,7 @@ def __init__(self, seconds=1, error_message='Timeout'): self.seconds = seconds self.error_message = error_message + ', PID: ' + str(os.getpid()) - def handle_timeout(self, signum, frame): # pylint: disable=unused-argument + def handle_timeout(self, signum, frame): """Logs information and raises AirflowTaskTimeout.""" self.log.error("Process timed out, PID: %s", str(os.getpid())) raise AirflowTaskTimeout(self.error_message) diff --git a/airflow/utils/types.py b/airflow/utils/types.py index a9b87255e9139..9f3c5591c0e00 100644 --- a/airflow/utils/types.py +++ b/airflow/utils/types.py @@ -27,7 +27,7 @@ class DagRunType(str, enum.Enum): SCHEDULED = "scheduled" MANUAL = "manual" - def __str__(self) -> str: # pylint: disable=invalid-str-returned + def __str__(self) -> str: return self.value @staticmethod diff --git a/airflow/utils/weekday.py b/airflow/utils/weekday.py index 45bdb32f5c374..698ec6067187f 100644 --- a/airflow/utils/weekday.py +++ b/airflow/utils/weekday.py @@ -41,7 +41,6 @@ def get_weekday_number(cls, week_day_str): """ sanitized_week_day_str = week_day_str.upper() - # pylint: disable=unsupported-membership-test if sanitized_week_day_str not in cls.__members__: raise AttributeError(f'Invalid Week Day passed: "{week_day_str}"') diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py index 3033964deec8c..91528e9387669 100644 --- a/airflow/www/api/experimental/endpoints.py +++ b/airflow/www/api/experimental/endpoints.py @@ -37,7 +37,7 @@ log = logging.getLogger(__name__) -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def requires_authentication(function: T): diff --git a/airflow/www/app.py b/airflow/www/app.py index 632e1d2727c6d..40edfb6f8f3a5 100644 --- a/airflow/www/app.py +++ b/airflow/www/app.py @@ -141,7 +141,7 @@ def create_app(config=None, testing=False): def cached_app(config=None, testing=False): """Return cached instance of Airflow WWW app""" - global app # pylint: disable=global-statement + global app if not app: app = create_app(config=config, testing=testing) return app @@ -149,5 +149,5 @@ def cached_app(config=None, testing=False): def purge_cached_app(): """Removes the cached version of the app in global state.""" - global app # pylint: disable=global-statement + global app app = None diff --git a/airflow/www/auth.py b/airflow/www/auth.py index 370f7c626d41f..8d42f510b48ad 100644 --- a/airflow/www/auth.py +++ b/airflow/www/auth.py @@ -20,7 +20,7 @@ from flask import current_app, flash, redirect, request, url_for -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def has_access(permissions: Optional[Sequence[Tuple[str, str]]] = None) -> Callable[[T], T]: diff --git a/airflow/www/decorators.py b/airflow/www/decorators.py index 39d272ca6f6d7..7eeaa4a3d51bb 100644 --- a/airflow/www/decorators.py +++ b/airflow/www/decorators.py @@ -27,7 +27,7 @@ from airflow.models import Log from airflow.utils.session import create_session -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def action_logging(f: T) -> T: @@ -68,7 +68,7 @@ def gzipped(f: T) -> T: @functools.wraps(f) def view_func(*args, **kwargs): @after_this_request - def zipper(response): # pylint: disable=unused-variable + def zipper(response): accept_encoding = request.headers.get('Accept-Encoding', '') if 'gzip' not in accept_encoding.lower(): diff --git a/airflow/www/extensions/init_jinja_globals.py b/airflow/www/extensions/init_jinja_globals.py index 4f1c93ece0ca3..ac5cb2bfe3948 100644 --- a/airflow/www/extensions/init_jinja_globals.py +++ b/airflow/www/extensions/init_jinja_globals.py @@ -47,7 +47,7 @@ def init_jinja_globals(app): try: airflow_version = airflow.__version__ - except Exception as e: # pylint: disable=broad-except + except Exception as e: airflow_version = None logging.error(e) diff --git a/airflow/www/extensions/init_manifest_files.py b/airflow/www/extensions/init_manifest_files.py index 0294fc394905d..c524475eac5f9 100644 --- a/airflow/www/extensions/init_manifest_files.py +++ b/airflow/www/extensions/init_manifest_files.py @@ -39,7 +39,7 @@ def parse_manifest_json(): for source, target in manifest.copy().items(): manifest[source] = os.path.join("dist", target) - except Exception: # noqa pylint: disable=broad-except + except Exception: print("Please make sure to build the frontend in static/ directory and restart the server") def get_asset_url(filename): @@ -50,7 +50,7 @@ def get_asset_url(filename): parse_manifest_json() @app.context_processor - def get_url_for_asset(): # pylint: disable=unused-variable + def get_url_for_asset(): """ Template tag to return the asset URL. WebPack renders the assets after minification and modification diff --git a/airflow/www/forms.py b/airflow/www/forms.py index b17425bf5849a..45c9dd5f58cc2 100644 --- a/airflow/www/forms.py +++ b/airflow/www/forms.py @@ -159,7 +159,7 @@ class DagRunForm(DynamicForm): def populate_obj(self, item): """Populates the attributes of the passed obj with data from the form’s fields.""" - super().populate_obj(item) # pylint: disable=no-member + super().populate_obj(item) item.run_type = DagRunType.from_run_id(item.run_id) if item.conf: item.conf = json.loads(item.conf) diff --git a/airflow/www/gunicorn_config.py b/airflow/www/gunicorn_config.py index bd64f001c55b5..7c07b661037aa 100644 --- a/airflow/www/gunicorn_config.py +++ b/airflow/www/gunicorn_config.py @@ -28,7 +28,5 @@ def post_worker_init(_): This is used by airflow.cli.commands.webserver_command to track the status of the worker. """ - old_title = setproctitle.getproctitle() # pylint: disable=c-extension-no-member - setproctitle.setproctitle( # pylint: disable=c-extension-no-member - settings.GUNICORN_WORKER_READY_PREFIX + old_title - ) + old_title = setproctitle.getproctitle() + setproctitle.setproctitle(settings.GUNICORN_WORKER_READY_PREFIX + old_title) diff --git a/airflow/www/security.py b/airflow/www/security.py index 8768587fdb769..9259043c571d2 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -58,7 +58,7 @@ } -class AirflowSecurityManager(SecurityManager, LoggingMixin): # pylint: disable=too-many-public-methods +class AirflowSecurityManager(SecurityManager, LoggingMixin): """Custom security manager, which introduces a permission model adapted to Airflow""" ########################################################################### @@ -447,8 +447,8 @@ def clean_perms(self): sesh = self.get_session pvms = sesh.query(sqla_models.PermissionView).filter( or_( - sqla_models.PermissionView.permission == None, # noqa pylint: disable=singleton-comparison - sqla_models.PermissionView.view_menu == None, # noqa pylint: disable=singleton-comparison + sqla_models.PermissionView.permission == None, # noqa + sqla_models.PermissionView.view_menu == None, ) ) # Since FAB doesn't define ON DELETE CASCADE on these tables, we need @@ -736,7 +736,7 @@ def check_authorization( class ApplessAirflowSecurityManager(AirflowSecurityManager): """Security Manager that doesn't need the whole flask app""" - def __init__(self, session=None): # pylint: disable=super-init-not-called + def __init__(self, session=None): self.session = session @property diff --git a/airflow/www/utils.py b/airflow/www/utils.py index e0ba0dbaadf3d..1b446523be804 100644 --- a/airflow/www/utils.py +++ b/airflow/www/utils.py @@ -27,7 +27,7 @@ from flask_appbuilder.models.sqla import filters as fab_sqlafilters from flask_appbuilder.models.sqla.interface import SQLAInterface from pygments import highlight, lexers -from pygments.formatters import HtmlFormatter # noqa pylint: disable=no-name-in-module +from pygments.formatters import HtmlFormatter from airflow.utils import timezone from airflow.utils.code_utils import get_python_source @@ -37,7 +37,7 @@ from airflow.www.widgets import AirflowDateTimePickerWidget -def get_sensitive_variables_fields(): # noqa: D103 +def get_sensitive_variables_fields(): import warnings from airflow.utils.log.secrets_masker import get_sensitive_variables_fields @@ -51,7 +51,7 @@ def get_sensitive_variables_fields(): # noqa: D103 return get_sensitive_variables_fields() -def should_hide_value_for_key(key_name): # noqa: D103 +def should_hide_value_for_key(key_name): import warnings from airflow.utils.log.secrets_masker import should_hide_value_for_key @@ -140,7 +140,7 @@ def generate_pages(current_page, num_of_pages, search=None, status=None, tags=No if current_page > 0: page_link = f'?{get_params(page=current_page - 1, search=search, status=status, tags=tags)}' - output.append(previous_node.format(href_link=page_link, disabled=is_disabled)) # noqa + output.append(previous_node.format(href_link=page_link, disabled=is_disabled)) mid = int(window / 2) last_page = num_of_pages - 1 @@ -152,7 +152,7 @@ def generate_pages(current_page, num_of_pages, search=None, status=None, tags=No else: pages = list(range(num_of_pages - window, last_page + 1)) - def is_current(current, page): # noqa + def is_current(current, page): return page == current for page in pages: @@ -163,7 +163,7 @@ def is_current(current, page): # noqa else f'?{get_params(page=page, search=search, status=status, tags=tags)}', 'page_num': page + 1, } - output.append(page_node.format(**vals)) # noqa + output.append(page_node.format(**vals)) is_disabled = 'disabled' if current_page >= num_of_pages - 1 else '' @@ -173,7 +173,7 @@ def is_current(current, page): # noqa else f'?{get_params(page=current_page + 1, search=search, status=status, tags=tags)}' ) - output.append(next_node.format(href_link=page_link, disabled=is_disabled)) # noqa + output.append(next_node.format(href_link=page_link, disabled=is_disabled)) last_node_link = ( void_link @@ -220,7 +220,7 @@ def task_instance_link(attr): url_root = url_for( 'Airflow.graph', dag_id=dag_id, root=task_id, execution_date=execution_date.isoformat() ) - return Markup( # noqa + return Markup( """ {task_id} @@ -237,7 +237,7 @@ def state_token(state): """Returns a formatted string with HTML for a given State""" color = State.color(state) fg_color = State.color_fg(state) - return Markup( # noqa + return Markup( """ {state} @@ -256,7 +256,7 @@ def nobr_f(attr_name): def nobr(attr): f = attr.get(attr_name) - return Markup("{}").format(f) # noqa + return Markup("{}").format(f) return nobr @@ -264,7 +264,7 @@ def nobr(attr): def datetime_f(attr_name): """Returns a formatted string with HTML for given DataTime""" - def dt(attr): # pylint: disable=invalid-name + def dt(attr): f = attr.get(attr_name) as_iso = f.isoformat() if f else '' if not as_iso: @@ -273,21 +273,18 @@ def dt(attr): # pylint: disable=invalid-name if timezone.utcnow().isoformat()[:4] == f[:4]: f = f[5:] # The empty title will be replaced in JS code when non-UTC dates are displayed - return Markup('').format(as_iso, f) # noqa + return Markup('').format(as_iso, f) return dt -# pylint: enable=invalid-name - - def json_f(attr_name): """Returns a formatted string with HTML for given JSON serializable""" def json_(attr): f = attr.get(attr_name) serialized = json.dumps(f) - return Markup('{}').format(serialized) # noqa + return Markup('{}').format(serialized) return json_ @@ -297,7 +294,7 @@ def dag_link(attr): dag_id = attr.get('dag_id') execution_date = attr.get('execution_date') url = url_for('Airflow.graph', dag_id=dag_id, execution_date=execution_date) - return Markup('{}').format(url, dag_id) if dag_id else Markup('None') # noqa + return Markup('{}').format(url, dag_id) if dag_id else Markup('None') def dag_run_link(attr): @@ -306,10 +303,10 @@ def dag_run_link(attr): run_id = attr.get('run_id') execution_date = attr.get('execution_date') url = url_for('Airflow.graph', dag_id=dag_id, run_id=run_id, execution_date=execution_date) - return Markup('{run_id}').format(url=url, run_id=run_id) # noqa + return Markup('{run_id}').format(url=url, run_id=run_id) -def pygment_html_render(s, lexer=lexers.TextLexer): # noqa pylint: disable=no-member +def pygment_html_render(s, lexer=lexers.TextLexer): """Highlight text using a given Lexer""" return highlight(s, lexer(), HtmlFormatter(linenos=True)) @@ -321,11 +318,11 @@ def render(obj, lexer): out = Markup(pygment_html_render(obj, lexer)) elif isinstance(obj, (tuple, list)): for i, text_to_render in enumerate(obj): - out += Markup("
List item #{}
").format(i) # noqa + out += Markup("
List item #{}
").format(i) out += Markup("
" + pygment_html_render(text_to_render, lexer) + "
") elif isinstance(obj, dict): for k, v in obj.items(): - out += Markup('
Dict item "{}"
').format(k) # noqa + out += Markup('
Dict item "{}"
').format(k) out += Markup("
" + pygment_html_render(v, lexer) + "
") return out @@ -349,7 +346,6 @@ def wrapped_markdown(s, css_class='rich_doc'): return Markup(f'
' + markdown.markdown(s, extensions=['tables']) + "
") -# pylint: disable=no-member def get_attr_renderer(): """Return Dictionary containing different Pygments Lexers for Rendering & Highlighting""" return { @@ -374,9 +370,6 @@ def get_attr_renderer(): } -# pylint: enable=no-member - - def get_chart_height(dag): """ We use the number of tasks in the DAG as a heuristic to @@ -388,33 +381,33 @@ def get_chart_height(dag): return 600 + len(dag.tasks) * 10 -class UtcAwareFilterMixin: # noqa: D101 +class UtcAwareFilterMixin: """Mixin for filter for UTC time.""" def apply(self, query, value): """Apply the filter.""" value = timezone.parse(value, timezone=timezone.utc) - return super().apply(query, value) # noqa + return super().apply(query, value) -class UtcAwareFilterEqual(UtcAwareFilterMixin, fab_sqlafilters.FilterEqual): # noqa: D101 +class UtcAwareFilterEqual(UtcAwareFilterMixin, fab_sqlafilters.FilterEqual): """Equality filter for UTC time.""" -class UtcAwareFilterGreater(UtcAwareFilterMixin, fab_sqlafilters.FilterGreater): # noqa: D101 +class UtcAwareFilterGreater(UtcAwareFilterMixin, fab_sqlafilters.FilterGreater): """Greater Than filter for UTC time.""" -class UtcAwareFilterSmaller(UtcAwareFilterMixin, fab_sqlafilters.FilterSmaller): # noqa: D101 +class UtcAwareFilterSmaller(UtcAwareFilterMixin, fab_sqlafilters.FilterSmaller): """Smaller Than filter for UTC time.""" -class UtcAwareFilterNotEqual(UtcAwareFilterMixin, fab_sqlafilters.FilterNotEqual): # noqa: D101 +class UtcAwareFilterNotEqual(UtcAwareFilterMixin, fab_sqlafilters.FilterNotEqual): """Not Equal To filter for UTC time.""" -class UtcAwareFilterConverter(fab_sqlafilters.SQLAFilterConverter): # noqa: D101 +class UtcAwareFilterConverter(fab_sqlafilters.SQLAFilterConverter): """Retrieve conversion tables for UTC-Aware filters.""" conversion_table = ( diff --git a/airflow/www/views.py b/airflow/www/views.py index 9bc677ed2211a..e21b8230936b7 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -54,7 +54,7 @@ from flask_appbuilder import BaseView, ModelView, expose from flask_appbuilder.actions import action from flask_appbuilder.fieldwidgets import Select2Widget -from flask_appbuilder.models.sqla.filters import BaseFilter # noqa +from flask_appbuilder.models.sqla.filters import BaseFilter from flask_appbuilder.security.views import ( PermissionModelView, PermissionViewModelView, @@ -75,7 +75,7 @@ from jinja2.utils import htmlsafe_json_dumps, pformat # type: ignore from pendulum.datetime import DateTime from pygments import highlight, lexers -from pygments.formatters import HtmlFormatter # noqa pylint: disable=no-name-in-module +from pygments.formatters import HtmlFormatter from sqlalchemy import and_, desc, func, or_, union_all from sqlalchemy.orm import joinedload from wtforms import SelectField, validators @@ -382,24 +382,24 @@ def get_downstream(task): ###################################################################################### -def circles(error): # pylint: disable=unused-argument +def circles(error): """Show Circles on screen for any error in the Webserver""" return ( render_template( 'airflow/circles.html', hostname=socket.getfqdn() - if conf.getboolean('webserver', 'EXPOSE_HOSTNAME', fallback=True) # noqa + if conf.getboolean('webserver', 'EXPOSE_HOSTNAME', fallback=True) else 'redact', ), 404, ) -def show_traceback(error): # pylint: disable=unused-argument +def show_traceback(error): """Show Traceback for a given error""" return ( render_template( - 'airflow/traceback.html', # noqa + 'airflow/traceback.html', python_version=sys.version.split(" ")[0], airflow_version=version, hostname=socket.getfqdn() @@ -418,7 +418,7 @@ def show_traceback(error): # pylint: disable=unused-argument ###################################################################################### -class AirflowBaseView(BaseView): # noqa: D101 +class AirflowBaseView(BaseView): """Base View to set Airflow related properties""" from airflow import macros @@ -443,7 +443,7 @@ def render_template(self, *args, **kwargs): ) -def add_user_permissions_to_dag(sender, template, context, **extra): # noqa pylint: disable=unused-argument +def add_user_permissions_to_dag(sender, template, context, **extra): """ Adds `.can_edit`, `.can_trigger`, and `.can_delete` properties to DAG based on current user's permissions. @@ -468,7 +468,7 @@ def add_user_permissions_to_dag(sender, template, context, **extra): # noqa pyl before_render_template.connect(add_user_permissions_to_dag) -class Airflow(AirflowBaseView): # noqa: D101 pylint: disable=too-many-public-methods +class Airflow(AirflowBaseView): """Main Airflow application.""" @expose('/health') @@ -489,7 +489,7 @@ def health(self): latest_scheduler_heartbeat = scheduler_job.latest_heartbeat.isoformat() if scheduler_job.is_alive(): scheduler_status = 'healthy' - except Exception: # noqa pylint: disable=broad-except + except Exception: payload['metadatabase']['status'] = 'unhealthy' payload['scheduler'] = { @@ -504,7 +504,7 @@ def health(self): [ (permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), ] - ) # pylint: disable=too-many-locals,too-many-statements + ) def index(self): """Home view.""" hide_paused_dags_by_default = conf.getboolean('webserver', 'hide_paused_dags_by_default') @@ -553,11 +553,10 @@ def index(self): # read orm_dags from the db dags_query = session.query(DagModel).filter(~DagModel.is_subdag, DagModel.is_active) - # pylint: disable=no-member if arg_search_query: dags_query = dags_query.filter( DagModel.dag_id.ilike('%' + arg_search_query + '%') - | DagModel.owners.ilike('%' + arg_search_query + '%') # noqa + | DagModel.owners.ilike('%' + arg_search_query + '%') ) if arg_tags_filter: @@ -701,7 +700,7 @@ def dag_stats(self, session=None): return wwwutils.json_response({}) payload = {} - dag_state_stats = dag_state_stats.filter(dr.dag_id.in_(filter_dag_ids)) # pylint: disable=no-member + dag_state_stats = dag_state_stats.filter(dr.dag_id.in_(filter_dag_ids)) data = {} for dag_id, state, count in dag_state_stats: @@ -741,7 +740,6 @@ def task_stats(self, session=None): else: filter_dag_ids = allowed_dag_ids - # pylint: disable=comparison-with-callable running_dag_run_query_result = ( session.query(DagRun.dag_id, DagRun.execution_date) .join(DagModel, DagModel.dag_id == DagRun.dag_id) @@ -749,13 +747,11 @@ def task_stats(self, session=None): ) # pylint: enable=comparison-with-callable - # pylint: disable=no-member running_dag_run_query_result = running_dag_run_query_result.filter(DagRun.dag_id.in_(filter_dag_ids)) # pylint: enable=no-member running_dag_run_query_result = running_dag_run_query_result.subquery('running_dag_run') - # pylint: disable=no-member # Select all task_instances from active dag_runs. running_task_instance_query_result = session.query( TaskInstance.dag_id.label('dag_id'), TaskInstance.state.label('state') @@ -769,7 +765,7 @@ def task_stats(self, session=None): # pylint: enable=no-member if conf.getboolean('webserver', 'SHOW_RECENT_STATS_FOR_COMPLETED_RUNS', fallback=True): - # pylint: disable=comparison-with-callable + last_dag_run = ( session.query(DagRun.dag_id, sqla.func.max(DagRun.execution_date).label('execution_date')) .join(DagModel, DagModel.dag_id == DagRun.dag_id) @@ -777,7 +773,7 @@ def task_stats(self, session=None): .group_by(DagRun.dag_id) ) # pylint: enable=comparison-with-callable - # pylint: disable=no-member + last_dag_run = last_dag_run.filter(DagRun.dag_id.in_(filter_dag_ids)) last_dag_run = last_dag_run.subquery('last_dag_run') # pylint: enable=no-member @@ -880,20 +876,14 @@ def code(self, session=None): dag_id = request.args.get('dag_id') dag_orm = DagModel.get_dagmodel(dag_id, session=session) code = DagCode.get_code_by_fileloc(dag_orm.fileloc) - html_code = Markup( - highlight( - code, lexers.PythonLexer(), HtmlFormatter(linenos=True) # pylint: disable=no-member - ) - ) + html_code = Markup(highlight(code, lexers.PythonLexer(), HtmlFormatter(linenos=True))) - except Exception as e: # pylint: disable=broad-except + except Exception as e: all_errors += ( "Exception encountered during " + f"dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{e}\n" ) - html_code = Markup('

Failed to load file.

Details: {}

').format( # noqa - escape(all_errors) - ) + html_code = Markup('

Failed to load file.

Details: {}

').format(escape(all_errors)) return self.render_template( 'airflow/dag_code.html', @@ -966,12 +956,12 @@ def rendered_templates(self): ti = models.TaskInstance(task=task, execution_date=dttm) try: ti.get_rendered_template_fields() - except AirflowException as e: # pylint: disable=broad-except + except AirflowException as e: msg = "Error rendering template: " + escape(e) - if e.__cause__: # pylint: disable=using-constant-test + if e.__cause__: msg += Markup("

OriginalError: ") + escape(e.__cause__) flash(msg, "error") - except Exception as e: # pylint: disable=broad-except + except Exception as e: flash("Error rendering template: " + str(e), "error") title = "Rendered Template" html_dict = {} @@ -983,9 +973,7 @@ def rendered_templates(self): if renderer in renderers: html_dict[template_field] = renderers[renderer](content) else: - html_dict[template_field] = Markup("
{}
").format( - pformat(content) - ) # noqa + html_dict[template_field] = Markup("
{}
").format(pformat(content)) return self.render_template( 'airflow/ti_code.html', @@ -1026,10 +1014,10 @@ def rendered_k8s(self): pod_spec = ti.get_rendered_k8s_spec() except AirflowException as e: msg = "Error rendering Kubernetes POD Spec: " + escape(e) - if e.__cause__: # pylint: disable=using-constant-test + if e.__cause__: msg += Markup("

OriginalError: ") + escape(e.__cause__) flash(msg, "error") - except Exception as e: # pylint: disable=broad-except + except Exception as e: flash("Error rendering Kubernetes Pod Spec: " + str(e), "error") title = "Rendered K8s Pod Spec" html_dict = {} @@ -1260,14 +1248,14 @@ def task(self): for attr_name in dir(ti): if not attr_name.startswith('_'): attr = getattr(ti, attr_name) - if type(attr) != type(self.task): # noqa pylint: disable=unidiomatic-typecheck + if type(attr) != type(self.task): # noqa ti_attrs.append((attr_name, str(attr))) task_attrs = [] for attr_name in dir(task): if not attr_name.startswith('_'): attr = getattr(task, attr_name) - # pylint: disable=unidiomatic-typecheck + if type(attr) != type(self.task) and attr_name not in wwwutils.get_attr_renderer(): # noqa task_attrs.append((attr_name, str(attr))) # pylint: enable=unidiomatic-typecheck @@ -1395,14 +1383,14 @@ def run(self): valid_kubernetes_config = False try: - from airflow.executors.celery_executor import CeleryExecutor # noqa + from airflow.executors.celery_executor import CeleryExecutor valid_celery_config = isinstance(executor, CeleryExecutor) except ImportError: pass try: - from airflow.executors.kubernetes_executor import KubernetesExecutor # noqa + from airflow.executors.kubernetes_executor import KubernetesExecutor valid_kubernetes_config = isinstance(executor, KubernetesExecutor) except ImportError: @@ -1515,7 +1503,7 @@ def trigger(self, session=None): dr = DagRun.find(dag_id=dag_id, execution_date=execution_date, run_type=DagRunType.MANUAL) if dr: - flash(f"This run_id {dr.run_id} already exists") # noqa + flash(f"This run_id {dr.run_id} already exists") return redirect(origin) run_conf = {} @@ -1677,7 +1665,6 @@ def blocked(self, session=None): if not filter_dag_ids: return wwwutils.json_response([]) - # pylint: disable=comparison-with-callable dags = ( session.query(DagRun.dag_id, sqla.func.count(DagRun.id)) .filter(DagRun.state == State.RUNNING) @@ -1792,7 +1779,7 @@ def dagrun_success(self): origin = get_safe_url(request.form.get('origin')) return self._mark_dagrun_state_as_success(dag_id, execution_date, confirmed, origin) - def _mark_task_instance_state( # pylint: disable=too-many-arguments + def _mark_task_instance_state( self, dag_id, task_id, @@ -2020,8 +2007,8 @@ def recurse_nodes(task, visited): (permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG), ] ) - @gzipped # pylint: disable=too-many-locals - @action_logging # pylint: disable=too-many-locals + @gzipped + @action_logging def tree(self): """Get Dag as tree.""" dag_id = request.args.get('dag_id') @@ -2095,8 +2082,8 @@ def tree(self): (permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), ] ) - @gzipped # pylint: disable=too-many-locals - @action_logging # pylint: disable=too-many-locals + @gzipped + @action_logging def calendar(self): """Get DAG runs as calendar""" dag_id = request.args.get('dag_id') @@ -2247,8 +2234,8 @@ class GraphForm(DateTimeWithNumRunsWithDagRunsForm): (permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), ] ) - @action_logging # pylint: disable=too-many-locals - @provide_session # pylint: disable=too-many-locals + @action_logging + @provide_session def duration(self, session=None): """Get Dag as duration graph.""" default_dag_run = conf.getint('webserver', 'default_dag_run_display_number') @@ -2856,7 +2843,7 @@ def conf(self): code_html = Markup( highlight( config, - lexers.IniLexer(), # Lexer call pylint: disable=no-member + lexers.IniLexer(), # Lexer call HtmlFormatter(noclasses=True), ) ) @@ -2890,14 +2877,14 @@ def redoc(self): class DagFilter(BaseFilter): """Filter using DagIDs""" - def apply(self, query, func): # noqa pylint: disable=redefined-outer-name,unused-argument + def apply(self, query, func): if current_app.appbuilder.sm.has_all_dags_access(): return query filter_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) return query.filter(self.model.dag_id.in_(filter_dag_ids)) -class AirflowModelView(ModelView): # noqa: D101 +class AirflowModelView(ModelView): """Airflow Mode View.""" list_widget = AirflowModelListWidget @@ -2911,7 +2898,7 @@ class SlaMissModelView(AirflowModelView): route_base = '/slamiss' - datamodel = AirflowModelView.CustomSQLAInterface(SlaMiss) # noqa # type: ignore + datamodel = AirflowModelView.CustomSQLAInterface(SlaMiss) # type: ignore class_permission_name = permissions.RESOURCE_SLA_MISS method_permission_name = { @@ -3034,7 +3021,7 @@ class ConnectionModelView(AirflowModelView): route_base = '/connection' - datamodel = AirflowModelView.CustomSQLAInterface(Connection) # noqa # type: ignore + datamodel = AirflowModelView.CustomSQLAInterface(Connection) # type: ignore class_permission_name = permissions.RESOURCE_CONNECTION method_permission_name = { @@ -3200,7 +3187,7 @@ class PoolModelView(AirflowModelView): route_base = '/pool' - datamodel = AirflowModelView.CustomSQLAInterface(models.Pool) # noqa # type: ignore + datamodel = AirflowModelView.CustomSQLAInterface(models.Pool) # type: ignore class_permission_name = permissions.RESOURCE_POOL method_permission_name = { @@ -3238,34 +3225,30 @@ def action_muldelete(self, items): def pool_link(self): """Pool link rendering.""" - pool_id = self.get('pool') # noqa pylint: disable=no-member + pool_id = self.get('pool') if pool_id is not None: url = url_for('TaskInstanceModelView.list', _flt_3_pool=pool_id) - return Markup("{pool_id}").format(url=url, pool_id=pool_id) # noqa + return Markup("{pool_id}").format(url=url, pool_id=pool_id) else: return Markup('Invalid') def frunning_slots(self): """Running slots rendering.""" - pool_id = self.get('pool') # noqa pylint: disable=no-member - running_slots = self.get('running_slots') # noqa pylint: disable=no-member + pool_id = self.get('pool') + running_slots = self.get('running_slots') if pool_id is not None and running_slots is not None: url = url_for('TaskInstanceModelView.list', _flt_3_pool=pool_id, _flt_3_state='running') - return Markup("{running_slots}").format( # noqa - url=url, running_slots=running_slots - ) + return Markup("{running_slots}").format(url=url, running_slots=running_slots) else: return Markup('Invalid') def fqueued_slots(self): """Queued slots rendering.""" - pool_id = self.get('pool') # noqa pylint: disable=no-member - queued_slots = self.get('queued_slots') # noqa pylint: disable=no-member + pool_id = self.get('pool') + queued_slots = self.get('queued_slots') if pool_id is not None and queued_slots is not None: url = url_for('TaskInstanceModelView.list', _flt_3_pool=pool_id, _flt_3_state='queued') - return Markup("{queued_slots}").format( # noqa - url=url, queued_slots=queued_slots - ) + return Markup("{queued_slots}").format(url=url, queued_slots=queued_slots) else: return Markup('Invalid') @@ -3282,7 +3265,7 @@ class VariableModelView(AirflowModelView): list_template = 'airflow/variable_list.html' edit_template = 'airflow/variable_edit.html' - datamodel = AirflowModelView.CustomSQLAInterface(models.Variable) # noqa # type: ignore + datamodel = AirflowModelView.CustomSQLAInterface(models.Variable) # type: ignore class_permission_name = permissions.RESOURCE_VARIABLE method_permission_name = { @@ -3311,8 +3294,8 @@ class VariableModelView(AirflowModelView): def hidden_field_formatter(self): """Formats hidden fields""" - key = self.get('key') # noqa pylint: disable=no-member - val = self.get('val') # noqa pylint: disable=no-member + key = self.get('key') + val = self.get('val') if secrets_masker.should_hide_value_for_key(key): return Markup('*' * 8) if val: @@ -3326,7 +3309,7 @@ def hidden_field_formatter(self): validators_columns = {'key': [validators.DataRequired()]} - def prefill_form(self, form, request_id): # pylint: disable=unused-argument + def prefill_form(self, form, request_id): if secrets_masker.should_hide_value_for_key(form.key.data): form.val.data = '*' * 8 @@ -3345,7 +3328,7 @@ def action_varexport(self, items): for var in items: try: val = decoder.decode(var.val) - except Exception: # noqa pylint: disable=broad-except + except Exception: val = var.val var_dict[var.key] = val @@ -3359,12 +3342,8 @@ def action_varexport(self, items): def varimport(self): """Import variables""" try: - out = request.files['file'].read() - if isinstance(out, bytes): - variable_dict = json.loads(out.decode('utf-8')) - else: - variable_dict = json.loads(out) - except Exception: # noqa pylint: disable=broad-except + variable_dict = json.loads(request.files['file'].read()) + except Exception: self.update_redirect() flash("Missing file or syntax error.", 'error') return redirect(self.get_redirect()) @@ -3373,7 +3352,7 @@ def varimport(self): for k, v in variable_dict.items(): try: models.Variable.set(k, v, serialize_json=not isinstance(v, str)) - except Exception as e: # pylint: disable=broad-except + except Exception as e: logging.info('Variable import failed: %s', repr(e)) fail_count += 1 else: @@ -3390,7 +3369,7 @@ class JobModelView(AirflowModelView): route_base = '/job' - datamodel = AirflowModelView.CustomSQLAInterface(BaseJob) # noqa # type: ignore + datamodel = AirflowModelView.CustomSQLAInterface(BaseJob) # type: ignore class_permission_name = permissions.RESOURCE_JOB method_permission_name = { @@ -3444,7 +3423,7 @@ class DagRunModelView(AirflowModelView): route_base = '/dagrun' - datamodel = AirflowModelView.CustomSQLAInterface(models.DagRun) # noqa # type: ignore + datamodel = AirflowModelView.CustomSQLAInterface(models.DagRun) # type: ignore class_permission_name = permissions.RESOURCE_DAG_RUN method_permission_name = { @@ -3507,7 +3486,7 @@ class DagRunModelView(AirflowModelView): @action('muldelete', "Delete", "Are you sure you want to delete selected records?", single=False) @provide_session - def action_muldelete(self, items, session=None): # noqa # pylint: disable=unused-argument + def action_muldelete(self, items, session=None): """Multiple delete.""" self.datamodel.delete_all(items) self.update_redirect() @@ -3519,15 +3498,13 @@ def action_set_running(self, drs, session=None): """Set state to running.""" try: count = 0 - for dr in ( - session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all() - ): # noqa pylint: disable=no-member + for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all(): count += 1 dr.start_date = timezone.utcnow() dr.state = State.RUNNING session.commit() flash(f"{count} dag runs were set to running") - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: flash(str(ex), 'error') flash('Failed to set state', 'error') return redirect(self.get_default_url()) @@ -3544,9 +3521,7 @@ def action_set_failed(self, drs, session=None): try: count = 0 altered_tis = [] - for dr in ( - session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all() - ): # noqa pylint: disable=no-member + for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all(): count += 1 altered_tis += set_dag_run_state_to_failed( current_app.dag_bag.get_dag(dr.dag_id), dr.execution_date, commit=True, session=session @@ -3556,7 +3531,7 @@ def action_set_failed(self, drs, session=None): "{count} dag runs and {altered_ti_count} task instances " "were set to failed".format(count=count, altered_ti_count=altered_ti_count) ) - except Exception: # noqa pylint: disable=broad-except + except Exception: flash('Failed to set state', 'error') return redirect(self.get_default_url()) @@ -3572,9 +3547,7 @@ def action_set_success(self, drs, session=None): try: count = 0 altered_tis = [] - for dr in ( - session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all() - ): # noqa pylint: disable=no-member + for dr in session.query(DagRun).filter(DagRun.id.in_([dagrun.id for dagrun in drs])).all(): count += 1 altered_tis += set_dag_run_state_to_success( current_app.dag_bag.get_dag(dr.dag_id), dr.execution_date, commit=True, session=session @@ -3584,7 +3557,7 @@ def action_set_success(self, drs, session=None): "{count} dag runs and {altered_ti_count} task instances " "were set to success".format(count=count, altered_ti_count=altered_ti_count) ) - except Exception: # noqa pylint: disable=broad-except + except Exception: flash('Failed to set state', 'error') return redirect(self.get_default_url()) @@ -3607,7 +3580,7 @@ def action_clear(self, drs, session=None): models.clear_task_instances(tis, session, dag=dag) flash(f"{count} dag runs and {cleared_ti_count} task instances were cleared") - except Exception: # noqa pylint: disable=broad-except + except Exception: flash('Failed to clear state', 'error') return redirect(self.get_default_url()) @@ -3617,7 +3590,7 @@ class LogModelView(AirflowModelView): route_base = '/log' - datamodel = AirflowModelView.CustomSQLAInterface(Log) # noqa # type:ignore + datamodel = AirflowModelView.CustomSQLAInterface(Log) # type:ignore class_permission_name = permissions.RESOURCE_AUDIT_LOG method_permission_name = { @@ -3647,7 +3620,7 @@ class TaskRescheduleModelView(AirflowModelView): route_base = '/taskreschedule' - datamodel = AirflowModelView.CustomSQLAInterface(models.TaskReschedule) # noqa # type: ignore + datamodel = AirflowModelView.CustomSQLAInterface(models.TaskReschedule) # type: ignore related_views = [DagRunModelView] class_permission_name = permissions.RESOURCE_TASK_RESCHEDULE @@ -3680,8 +3653,8 @@ class TaskRescheduleModelView(AirflowModelView): def duration_f(self): """Duration calculation.""" - end_date = self.get('end_date') # noqa pylint: disable=no-member - duration = self.get('duration') # noqa pylint: disable=no-member + end_date = self.get('end_date') + duration = self.get('duration') if end_date and duration: return timedelta(seconds=duration) return None @@ -3702,7 +3675,7 @@ class TaskInstanceModelView(AirflowModelView): route_base = '/taskinstance' - datamodel = AirflowModelView.CustomSQLAInterface(models.TaskInstance) # noqa # type: ignore + datamodel = AirflowModelView.CustomSQLAInterface(models.TaskInstance) # type: ignore class_permission_name = permissions.RESOURCE_TASK_INSTANCE method_permission_name = { @@ -3780,15 +3753,15 @@ class TaskInstanceModelView(AirflowModelView): def log_url_formatter(self): """Formats log URL.""" - log_url = self.get('log_url') # noqa pylint: disable=no-member - return Markup( # noqa + log_url = self.get('log_url') + return Markup( '' ).format(log_url=log_url) def duration_f(self): """Formats duration.""" - end_date = self.get('end_date') # noqa pylint: disable=no-member - duration = self.get('duration') # noqa pylint: disable=no-member + end_date = self.get('end_date') + duration = self.get('duration') if end_date and duration: return timedelta(seconds=duration) return None @@ -3833,7 +3806,7 @@ def action_clear(self, task_instances, session=None): flash(f"{len(task_instances)} task instances have been cleared") self.update_redirect() return redirect(self.get_redirect()) - except Exception as e: # noqa pylint: disable=broad-except + except Exception as e: flash(f'Failed to clear task instances: "{e}"', 'error') return None @@ -3846,7 +3819,7 @@ def set_task_instance_state(self, tis, target_state, session=None): ti.set_state(target_state, session) session.commit() flash(f"{count} task instances were set to '{target_state}'") - except Exception: # noqa pylint: disable=broad-except + except Exception: flash('Failed to set state', 'error') @action('set_running', "Set state to 'running'", '', single=False) @@ -3903,7 +3876,7 @@ class DagModelView(AirflowModelView): route_base = '/dagmodel' - datamodel = AirflowModelView.CustomSQLAInterface(DagModel) # noqa # type: ignore + datamodel = AirflowModelView.CustomSQLAInterface(DagModel) # type: ignore class_permission_name = permissions.RESOURCE_DAG method_permission_name = { @@ -3933,7 +3906,7 @@ class DagModelView(AirflowModelView): def get_query(self): """Default filters for model""" return ( - super() # noqa pylint: disable=no-member + super() .get_query() .filter(or_(models.DagModel.is_active, models.DagModel.is_paused)) .filter(~models.DagModel.is_subdag) @@ -3941,12 +3914,7 @@ def get_query(self): def get_count_query(self): """Default filters for model""" - return ( - super() # noqa pylint: disable=no-member - .get_count_query() - .filter(models.DagModel.is_active) - .filter(~models.DagModel.is_subdag) - ) + return super().get_count_query().filter(models.DagModel.is_active).filter(~models.DagModel.is_subdag) @auth.has_access( [ @@ -3963,13 +3931,13 @@ def autocomplete(self, session=None): wwwutils.json_response([]) # Provide suggestions of dag_ids and owners - dag_ids_query = session.query(DagModel.dag_id.label('item')).filter( # pylint: disable=no-member + dag_ids_query = session.query(DagModel.dag_id.label('item')).filter( ~DagModel.is_subdag, DagModel.is_active, DagModel.dag_id.ilike('%' + query + '%') - ) # noqa pylint: disable=no-member + ) owners_query = session.query(func.distinct(DagModel.owners).label('item')).filter( ~DagModel.is_subdag, DagModel.is_active, DagModel.owners.ilike('%' + query + '%') - ) # noqa pylint: disable=no-member + ) # Hide DAGs if not showing status: "all" status = flask_session.get(FILTER_STATUS_COOKIE) @@ -3981,7 +3949,7 @@ def autocomplete(self, session=None): owners_query = owners_query.filter(DagModel.is_paused) filter_dag_ids = current_app.appbuilder.sm.get_accessible_dag_ids(g.user) - # pylint: disable=no-member + dag_ids_query = dag_ids_query.filter(DagModel.dag_id.in_(filter_dag_ids)) owners_query = owners_query.filter(DagModel.dag_id.in_(filter_dag_ids)) # pylint: enable=no-member diff --git a/breeze b/breeze index aac92e088f00c..173e61998b9df 100755 --- a/breeze +++ b/breeze @@ -3322,14 +3322,8 @@ function breeze::run_static_checks() { echo "Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" echo pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" - elif [[ ${static_check} == "all-but-pylint" ]]; then - echo - echo "Setting SKIP=pylint. Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" - echo - echo - SKIP=pylint pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" else - if [[ ${static_check} == *"pylint"* || ${static_check} == *"mypy"* || ${static_check} == *"flake8"* ]]; then + if [[ ${static_check} == *"mypy"* || ${static_check} == *"flake8"* ]]; then echo echo "Running build pre-commit before running the requested static check" echo diff --git a/breeze-complete b/breeze-complete index d445194ef39e9..9e21c2c9be14e 100644 --- a/breeze-complete +++ b/breeze-complete @@ -75,7 +75,6 @@ EOF _breeze_allowed_static_checks=$(cat <<-EOF all -all-but-pylint airflow-config-yaml airflow-providers-available airflow-provider-yaml-files-ok @@ -92,7 +91,6 @@ check-hooks-apply check-integrations check-merge-conflict check-xml -consistent-pylint daysago-import-check debug-statements detect-private-key @@ -127,8 +125,6 @@ providers-init-file provider-yamls pydevd pydocstyle -pylint -pylint-tests python-no-log-warn pyupgrade restrict-start_date @@ -148,6 +144,7 @@ update-setup-cfg-file verify-db-migrations-documented version-sync yamllint +yesqa EOF ) diff --git a/chart/tests/helm_template_generator.py b/chart/tests/helm_template_generator.py index 6523d32895d14..fc5b7b6def520 100644 --- a/chart/tests/helm_template_generator.py +++ b/chart/tests/helm_template_generator.py @@ -33,7 +33,7 @@ BASE_URL_SPEC = "https://raw.githubusercontent.com/instrumenta/kubernetes-json-schema/master/v1.14.0" crd_lookup = { - 'keda.sh/v1alpha1::ScaledObject': 'https://raw.githubusercontent.com/kedacore/keda/v2.0.0/config/crd/bases/keda.sh_scaledobjects.yaml', # noqa: E501 # pylint: disable=line-too-long + 'keda.sh/v1alpha1::ScaledObject': 'https://raw.githubusercontent.com/kedacore/keda/v2.0.0/config/crd/bases/keda.sh_scaledobjects.yaml', # noqa: E501 } @@ -120,4 +120,4 @@ def render_k8s_object(obj, type_to_render): """ Function that renders dictionaries into k8s objects. For helm chart testing only. """ - return api_client._ApiClient__deserialize_model(obj, type_to_render) # pylint: disable=W0212 + return api_client._ApiClient__deserialize_model(obj, type_to_render) diff --git a/chart/tests/test_basic_helm_chart.py b/chart/tests/test_basic_helm_chart.py index d5f9681d182d4..20aa097887be7 100644 --- a/chart/tests/test_basic_helm_chart.py +++ b/chart/tests/test_basic_helm_chart.py @@ -251,7 +251,7 @@ def get_k8s_objs_with_image(obj: Union[List[Any], Dict[str, Any]]) -> List[Dict[ objs_with_image = get_k8s_objs_with_image(k8s_objects) for obj in objs_with_image: - image: str = obj["image"] # pylint: disable=invalid-sequence-index + image: str = obj["image"] if image.startswith(image_repo): # Make sure that a command is not specified assert "command" not in obj diff --git a/dev/import_all_classes.py b/dev/import_all_classes.py index 2a1e688cd2700..a1611f3e0cc7f 100755 --- a/dev/import_all_classes.py +++ b/dev/import_all_classes.py @@ -80,7 +80,7 @@ def onerror(_): attribute = getattr(_module, attribute_name) if isclass(attribute): imported_classes.append(class_name) - except Exception: # noqa + except Exception: exception_str = traceback.format_exc() tracebacks.append(exception_str) if tracebacks: diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py index 49dc393d6ec8f..16bd0464b0090 100755 --- a/dev/provider_packages/prepare_provider_packages.py +++ b/dev/provider_packages/prepare_provider_packages.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# pylint: disable=wrong-import-order + # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -56,7 +56,7 @@ try: from yaml import CSafeLoader as SafeLoader except ImportError: - from yaml import SafeLoader # noqa + from yaml import SafeLoader INITIAL_CHANGELOG_CONTENT = """ @@ -114,7 +114,7 @@ # Note - we do not test protocols as they are not really part of the official API of # Apache Airflow -logger = logging.getLogger(__name__) # noqa +logger = logging.getLogger(__name__) PY3 = sys.version_info[0] == 3 @@ -759,7 +759,7 @@ def get_package_class_summary( for entity in EntityType: print_wrong_naming(entity, all_verified_entities[entity].wrong_entities) - entities_summary: Dict[EntityType, EntityTypeSummary] = {} # noqa + entities_summary: Dict[EntityType, EntityTypeSummary] = {} for entity_type in EntityType: entities_summary[entity_type] = get_details_about_classes( @@ -1301,7 +1301,7 @@ def get_provider_yaml(provider_package_id: str) -> Dict[str, Any]: if not os.path.exists(provider_yaml_file_name): raise Exception(f"The provider.yaml file is missing: {provider_yaml_file_name}") with open(provider_yaml_file_name) as provider_file: - provider_yaml_dict = yaml.load(provider_file, SafeLoader) # noqa + provider_yaml_dict = yaml.load(provider_file, SafeLoader) return provider_yaml_dict @@ -1730,7 +1730,7 @@ def black_mode(): config = parse_pyproject_toml(os.path.join(SOURCE_DIR_PATH, "pyproject.toml")) target_versions = set( - target_version_option_callback(None, None, config.get('target_version', [])), # noqa + target_version_option_callback(None, None, config.get('target_version', [])), ) return Mode( @@ -2347,7 +2347,7 @@ def generate_issue_content(package_ids: List[str], github_token: str, suffix: st except UnknownObjectException: # Fallback to issue if PR not found try: - pull_requests[pr_number] = repo.get_issue(pr_number) # noqa (same fields as PR) + pull_requests[pr_number] = repo.get_issue(pr_number) # (same fields as PR) except UnknownObjectException: console.print(f"[red]The PR #{pr_number} could not be found[/]") progress.advance(task) diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py index 91e8dc4c4af1a..c84f464d460b0 100755 --- a/dev/retag_docker_images.py +++ b/dev/retag_docker_images.py @@ -131,4 +131,4 @@ def main( if __name__ == "__main__": - main() # noqa + main() diff --git a/dev/send_email.py b/dev/send_email.py index 0e9c8b7a45f02..756e4ee1d7d77 100755 --- a/dev/send_email.py +++ b/dev/send_email.py @@ -115,7 +115,7 @@ def inter_send_email( click.secho("✅ Email sent successfully", fg="green") except smtplib.SMTPAuthenticationError: sys.exit("SMTP User authentication error, Email not sent!") - except Exception as e: # pylint: disable=broad-except + except Exception as e: sys.exit(f"SMTP exception {e}") @@ -337,4 +337,4 @@ def announce(base_parameters, receiver_email: str): if __name__ == '__main__': - cli() # pylint: disable=no-value-for-parameter + cli() diff --git a/docs/build_docs.py b/docs/build_docs.py index 6bc60b6dcbf0c..a474c8349749d 100755 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -26,24 +26,14 @@ from tabulate import tabulate from airflow.utils.helpers import partition -from docs.exts.docs_build import dev_index_generator, lint_checks # pylint: disable=no-name-in-module +from docs.exts.docs_build import dev_index_generator, lint_checks from docs.exts.docs_build.code_utils import CONSOLE_WIDTH, PROVIDER_INIT_FILE -from docs.exts.docs_build.docs_builder import ( # pylint: disable=no-name-in-module - DOCS_DIR, - AirflowDocsBuilder, - get_available_packages, -) -from docs.exts.docs_build.errors import ( # pylint: disable=no-name-in-module - DocBuildError, - display_errors_summary, -) -from docs.exts.docs_build.fetch_inventories import fetch_inventories # pylint: disable=no-name-in-module -from docs.exts.docs_build.github_action_utils import with_group # pylint: disable=no-name-in-module -from docs.exts.docs_build.package_filter import process_package_filters # pylint: disable=no-name-in-module -from docs.exts.docs_build.spelling_checks import ( # pylint: disable=no-name-in-module - SpellingError, - display_spelling_error_summary, -) +from docs.exts.docs_build.docs_builder import DOCS_DIR, AirflowDocsBuilder, get_available_packages +from docs.exts.docs_build.errors import DocBuildError, display_errors_summary +from docs.exts.docs_build.fetch_inventories import fetch_inventories +from docs.exts.docs_build.github_action_utils import with_group +from docs.exts.docs_build.package_filter import process_package_filters +from docs.exts.docs_build.spelling_checks import SpellingError, display_spelling_error_summary TEXT_RED = '\033[31m' TEXT_RESET = '\033[0m' diff --git a/docs/conf.py b/docs/conf.py index 5fa1b359232b8..3046303868f9c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# flake8: noqa # Disable Flake8 because of all the sphinx imports # # Licensed to the Apache Software Foundation (ASF) under one @@ -47,9 +46,7 @@ import airflow from airflow.configuration import AirflowConfigParser, default_config_yaml -from docs.exts.docs_build.third_party_inventories import ( # pylint: disable=no-name-in-module,wrong-import-order - THIRD_PARTY_INDEXES, -) +from docs.exts.docs_build.third_party_inventories import THIRD_PARTY_INDEXES sys.path.append(os.path.join(os.path.dirname(__file__), 'exts')) @@ -65,7 +62,7 @@ PACKAGE_DIR = os.path.join(ROOT_DIR, 'airflow') PACKAGE_VERSION = airflow.__version__ elif PACKAGE_NAME.startswith('apache-airflow-providers-'): - from provider_yaml_utils import load_package_data # pylint: disable=no-name-in-module + from provider_yaml_utils import load_package_data ALL_PROVIDER_YAMLS = load_package_data() try: diff --git a/docs/exts/__init__.py b/docs/exts/__init__.py index dd26d4ff9adea..3f1ba525dab98 100644 --- a/docs/exts/__init__.py +++ b/docs/exts/__init__.py @@ -1,4 +1,3 @@ -# flake8: noqa # Disable Flake8 because of all the sphinx imports # # Licensed to the Apache Software Foundation (ASF) under one diff --git a/docs/exts/airflow_intersphinx.py b/docs/exts/airflow_intersphinx.py index 1cef267baab4c..1ae91bb9f35f7 100644 --- a/docs/exts/airflow_intersphinx.py +++ b/docs/exts/airflow_intersphinx.py @@ -19,7 +19,7 @@ import time from typing import Any, Dict -from provider_yaml_utils import load_package_data # pylint: disable=no-name-in-module +from provider_yaml_utils import load_package_data from sphinx.application import Sphinx CURRENT_DIR = os.path.dirname(__file__) @@ -156,7 +156,7 @@ def inspect_main(inv_data, name) -> None: print(f":{role_name}:`{name}:{entry}`") except ValueError as exc: print(exc.args[0] % exc.args[1:]) - except Exception as exc: # pylint: disable=broad-except + except Exception as exc: print(f'Unknown error: {exc!r}') provider_mapping = _generate_provider_intersphinx_mapping() diff --git a/docs/exts/docroles.py b/docs/exts/docroles.py index f12f493bd4ee6..8977035b059f3 100644 --- a/docs/exts/docroles.py +++ b/docs/exts/docroles.py @@ -60,14 +60,14 @@ def get_template_field(env, fullname): def template_field_role( app, - typ, # pylint: disable=unused-argument + typ, rawtext, text, lineno, inliner, - options=None, # pylint: disable=unused-argument + options=None, content=None, -): # pylint: disable=unused-argument +): """ A role that allows you to include a list of template fields in the middle of the text. This is especially useful when writing guides describing how to use the operator. @@ -109,7 +109,7 @@ def template_field_role( def setup(app): """Sets the extension up""" - from docutils.parsers.rst import roles # pylint: disable=wrong-import-order + from docutils.parsers.rst import roles roles.register_local_role("template-fields", partial(template_field_role, app)) diff --git a/docs/exts/docs_build/__init__.py b/docs/exts/docs_build/__init__.py index dd26d4ff9adea..3f1ba525dab98 100644 --- a/docs/exts/docs_build/__init__.py +++ b/docs/exts/docs_build/__init__.py @@ -1,4 +1,3 @@ -# flake8: noqa # Disable Flake8 because of all the sphinx imports # # Licensed to the Apache Software Foundation (ASF) under one diff --git a/docs/exts/docs_build/dev_index_generator.py b/docs/exts/docs_build/dev_index_generator.py index 800f4de48a927..a26ec5d8e7576 100644 --- a/docs/exts/docs_build/dev_index_generator.py +++ b/docs/exts/docs_build/dev_index_generator.py @@ -22,7 +22,7 @@ import jinja2 -from docs.exts.provider_yaml_utils import load_package_data # pylint: disable=no-name-in-module +from docs.exts.provider_yaml_utils import load_package_data CURRENT_DIR = os.path.abspath(os.path.dirname(__file__)) DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir)) diff --git a/docs/exts/docs_build/docs_builder.py b/docs/exts/docs_build/docs_builder.py index 16370f76daead..de92880a07915 100644 --- a/docs/exts/docs_build/docs_builder.py +++ b/docs/exts/docs_build/docs_builder.py @@ -34,12 +34,8 @@ pretty_format_path, ) from docs.exts.docs_build.errors import DocBuildError, parse_sphinx_warnings - -# pylint: disable=no-name-in-module from docs.exts.docs_build.spelling_checks import SpellingError, parse_spelling_warnings -# pylint: enable=no-name-in-module - console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH) @@ -164,7 +160,7 @@ def check_spelling(self, verbose: bool) -> List[SpellingError]: ) console.print(f"[blue]{self.package_name:60}:[/] The output is hidden until an error occurs.") with open(self.log_spelling_filename, "wt") as output: - completed_proc = run( # pylint: disable=subprocess-run-check + completed_proc = run( build_cmd, cwd=self._src_dir, env=env, @@ -243,7 +239,7 @@ def build_sphinx_docs(self, verbose: bool) -> List[DocBuildError]: f"The output is hidden until an error occurs." ) with open(self.log_build_filename, "wt") as output: - completed_proc = run( # pylint: disable=subprocess-run-check + completed_proc = run( build_cmd, cwd=self._src_dir, env=env, diff --git a/docs/exts/docs_build/errors.py b/docs/exts/docs_build/errors.py index e334d0800e3c4..b654fc6621748 100644 --- a/docs/exts/docs_build/errors.py +++ b/docs/exts/docs_build/errors.py @@ -104,7 +104,7 @@ def parse_sphinx_warnings(warning_text: str, docs_dir: str) -> List[DocBuildErro message=warning_parts[2], ) ) - except Exception: # noqa pylint: disable=broad-except + except Exception: # If an exception occurred while parsing the warning message, display the raw warning message. sphinx_build_errors.append( DocBuildError(file_path=None, line_no=None, message=sphinx_warning) diff --git a/docs/exts/docs_build/fetch_inventories.py b/docs/exts/docs_build/fetch_inventories.py index 4eb42295ac2f9..d55e28ae14547 100644 --- a/docs/exts/docs_build/fetch_inventories.py +++ b/docs/exts/docs_build/fetch_inventories.py @@ -30,12 +30,8 @@ from requests.adapters import DEFAULT_POOLSIZE from airflow.utils.helpers import partition -from docs.exts.docs_build.docs_builder import ( # pylint: disable=no-name-in-module - get_available_providers_packages, -) -from docs.exts.docs_build.third_party_inventories import ( # pylint: disable=no-name-in-module - THIRD_PARTY_INDEXES, -) +from docs.exts.docs_build.docs_builder import get_available_providers_packages +from docs.exts.docs_build.third_party_inventories import THIRD_PARTY_INDEXES CURRENT_DIR = os.path.dirname(__file__) ROOT_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir, os.pardir)) diff --git a/docs/exts/docs_build/lint_checks.py b/docs/exts/docs_build/lint_checks.py index 7b4e7b0a7c047..c45139877a61f 100644 --- a/docs/exts/docs_build/lint_checks.py +++ b/docs/exts/docs_build/lint_checks.py @@ -22,8 +22,8 @@ from itertools import chain from typing import Iterable, List, Optional, Set -from docs.exts.docs_build.docs_builder import ALL_PROVIDER_YAMLS # pylint: disable=no-name-in-module -from docs.exts.docs_build.errors import DocBuildError # pylint: disable=no-name-in-module +from docs.exts.docs_build.docs_builder import ALL_PROVIDER_YAMLS +from docs.exts.docs_build.errors import DocBuildError ROOT_PROJECT_DIR = os.path.abspath( os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir) diff --git a/docs/exts/docs_build/spelling_checks.py b/docs/exts/docs_build/spelling_checks.py index 4da70c11894ba..62d4baeccb23f 100644 --- a/docs/exts/docs_build/spelling_checks.py +++ b/docs/exts/docs_build/spelling_checks.py @@ -108,7 +108,7 @@ def parse_spelling_warnings(warning_text: str, docs_dir: str) -> List[SpellingEr message=sphinx_warning, ) ) - except Exception: # noqa pylint: disable=broad-except + except Exception: # If an exception occurred while parsing the warning message, display the raw warning message. sphinx_spelling_errors.append( SpellingError( diff --git a/docs/exts/exampleinclude.py b/docs/exts/exampleinclude.py index 815eb21578a24..8bf395d7ff307 100644 --- a/docs/exts/exampleinclude.py +++ b/docs/exts/exampleinclude.py @@ -33,7 +33,7 @@ from sphinx.util.nodes import set_source_info try: - import sphinx_airflow_theme # pylint: disable=unused-import + import sphinx_airflow_theme airflow_theme_is_available = True except ImportError: @@ -42,7 +42,7 @@ logger = logging.getLogger(__name__) -class ExampleHeader(nodes.reference, nodes.FixedTextElement): # pylint: disable=too-many-ancestors +class ExampleHeader(nodes.reference, nodes.FixedTextElement): """Header for examples.""" @@ -124,11 +124,10 @@ def run(self): retnode = container_node return [retnode] - except Exception as exc: # pylint: disable=broad-except + except Exception as exc: return [document.reporter.warning(str(exc), line=self.lineno)] -# pylint: disable=protected-access def register_source(app, env, modname): """ Registers source code. @@ -147,7 +146,7 @@ def register_source(app, env, modname): if code_tags is None: try: analyzer = ModuleAnalyzer.for_module(modname) - except Exception as ex: # pylint: disable=broad-except + except Exception as ex: logger.info( "Module \"%s\" could not be loaded. Full source will not be available. \"%s\"", modname, ex ) @@ -172,9 +171,6 @@ def register_source(app, env, modname): return True -# pylint: enable=protected-access - - def create_node(env, relative_path, show_button): """ Creates documentation node for example include. @@ -208,7 +204,6 @@ def create_node(env, relative_path, show_button): return paragraph -# pylint: disable=protected-access def doctree_read(app, doctree): """ Reads documentation tree for the application and register sources in the generated documentation. @@ -238,9 +233,6 @@ def doctree_read(app, doctree): objnode.replace_self(onlynode) -# pylint: enable=protected-access - - def setup(app): """ Sets the plugin up and returns configuration of the plugin. diff --git a/docs/exts/operators_and_hooks_ref.py b/docs/exts/operators_and_hooks_ref.py index fccc99197f055..91824b39fc0f4 100644 --- a/docs/exts/operators_and_hooks_ref.py +++ b/docs/exts/operators_and_hooks_ref.py @@ -24,10 +24,7 @@ from docutils.nodes import Element from docutils.parsers.rst import Directive, directives from docutils.statemachine import StringList -from provider_yaml_utils import ( # pylint: disable=no-name-in-module - get_provider_yaml_paths, - load_package_data, -) +from provider_yaml_utils import get_provider_yaml_paths, load_package_data from sphinx.util import nested_parse_with_titles from sphinx.util.docutils import switch_source_input diff --git a/docs/exts/providers_packages_ref.py b/docs/exts/providers_packages_ref.py index 10ea9d645055e..d0c420c1b6c99 100644 --- a/docs/exts/providers_packages_ref.py +++ b/docs/exts/providers_packages_ref.py @@ -15,7 +15,7 @@ # specific language governing permissions and limitations # under the License. -from provider_yaml_utils import load_package_data # pylint: disable=no-name-in-module +from provider_yaml_utils import load_package_data from sphinx.application import Sphinx diff --git a/docs/exts/removemarktransform.py b/docs/exts/removemarktransform.py index b8857ddc7c9df..865d08f08259d 100644 --- a/docs/exts/removemarktransform.py +++ b/docs/exts/removemarktransform.py @@ -1,4 +1,3 @@ -# flake8: noqa # Disable Flake8 because of all the sphinx imports # # Licensed to the Apache Software Foundation (ASF) under one @@ -22,7 +21,7 @@ import re from docutils import nodes -from pygments.lexers import Python3Lexer, PythonLexer, guess_lexer # noqa pylint: disable=no-name-in-module +from pygments.lexers import Python3Lexer, PythonLexer, guess_lexer from sphinx.transforms import SphinxTransform from sphinx.transforms.post_transforms.code import TrimDoctestFlagsTransform @@ -61,7 +60,7 @@ def is_pycode(node: nodes.literal_block) -> bool: try: lexer = guess_lexer(node.rawsource) return isinstance(lexer, (PythonLexer, Python3Lexer)) - except Exception: # noqa pylint: disable=broad-except + except Exception: pass return False diff --git a/docs/exts/substitution_extensions.py b/docs/exts/substitution_extensions.py index 1cf3c6d6a28bf..801ec0e624394 100644 --- a/docs/exts/substitution_extensions.py +++ b/docs/exts/substitution_extensions.py @@ -30,7 +30,7 @@ LOGGER = logging.getLogger(__name__) -OriginalCodeBlock: Directive = directives._directives['code-block'] # pylint: disable=protected-access +OriginalCodeBlock: Directive = directives._directives['code-block'] _SUBSTITUTION_OPTION_NAME = 'substitutions' diff --git a/docs/publish_docs.py b/docs/publish_docs.py index 0be5f61cd093d..92e5406c93868 100755 --- a/docs/publish_docs.py +++ b/docs/publish_docs.py @@ -20,13 +20,10 @@ import argparse import os -# pylint: disable=no-name-in-module from docs.exts.docs_build.docs_builder import AirflowDocsBuilder from docs.exts.docs_build.package_filter import process_package_filters from docs.exts.provider_yaml_utils import load_package_data -# pylint: enable=no-name-in-module - AIRFLOW_SITE_DIR = os.environ.get('AIRFLOW_SITE_DIRECTORY') if __name__ != "__main__": diff --git a/kubernetes_tests/test_kubernetes_pod_operator.py b/kubernetes_tests/test_kubernetes_pod_operator.py index 64b6344ef441e..2da5da0618b41 100644 --- a/kubernetes_tests/test_kubernetes_pod_operator.py +++ b/kubernetes_tests/test_kubernetes_pod_operator.py @@ -1,4 +1,3 @@ -# pylint: disable=unused-argument # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -69,7 +68,7 @@ def get_current_task_name(self): return "_" + unittest.TestCase.id(self).replace(".", "_")[::-1] def setUp(self): - self.maxDiff = None # pylint: disable=invalid-name + self.maxDiff = None self.api_client = ApiClient() self.expected_pod = { 'apiVersion': 'v1', @@ -568,7 +567,7 @@ def test_xcom_push(self): volume_mount = self.api_client.sanitize_for_serialization(PodDefaults.VOLUME_MOUNT) container = self.api_client.sanitize_for_serialization(PodDefaults.SIDECAR_CONTAINER) self.expected_pod['spec']['containers'][0]['args'] = args - self.expected_pod['spec']['containers'][0]['volumeMounts'].insert(0, volume_mount) # noqa + self.expected_pod['spec']['containers'][0]['volumeMounts'].insert(0, volume_mount) self.expected_pod['spec']['volumes'].insert(0, volume) self.expected_pod['spec']['containers'].append(container) assert self.expected_pod == actual_pod @@ -818,9 +817,7 @@ def test_init_container(self): @mock.patch("airflow.providers.cncf.kubernetes.utils.pod_launcher.PodLauncher.start_pod") @mock.patch("airflow.providers.cncf.kubernetes.utils.pod_launcher.PodLauncher.monitor_pod") @mock.patch("airflow.kubernetes.kube_client.get_kube_client") - def test_pod_template_file( - self, mock_client, monitor_mock, start_mock # pylint: disable=unused-argument - ): + def test_pod_template_file(self, mock_client, monitor_mock, start_mock): from airflow.utils.state import State path = sys.path[0] + '/tests/kubernetes/pod.yaml' @@ -904,9 +901,7 @@ def test_pod_template_file( @mock.patch("airflow.providers.cncf.kubernetes.utils.pod_launcher.PodLauncher.start_pod") @mock.patch("airflow.providers.cncf.kubernetes.utils.pod_launcher.PodLauncher.monitor_pod") @mock.patch("airflow.kubernetes.kube_client.get_kube_client") - def test_pod_priority_class_name( - self, mock_client, monitor_mock, start_mock # pylint: disable=unused-argument - ): + def test_pod_priority_class_name(self, mock_client, monitor_mock, start_mock): """Test ability to assign priorityClassName to pod""" from airflow.utils.state import State @@ -947,7 +942,7 @@ def test_pod_name(self): ) @mock.patch("airflow.providers.cncf.kubernetes.utils.pod_launcher.PodLauncher.monitor_pod") - def test_on_kill(self, monitor_mock): # pylint: disable=unused-argument + def test_on_kill(self, monitor_mock): from airflow.utils.state import State client = kube_client.get_kube_client(in_cluster=False) @@ -1018,6 +1013,3 @@ def test_reattach_failing_pod_once(self): create_mock.return_value = ("success", {}, {}) k.execute(context) create_mock.assert_called_once() - - -# pylint: enable=unused-argument diff --git a/kubernetes_tests/test_kubernetes_pod_operator_backcompat.py b/kubernetes_tests/test_kubernetes_pod_operator_backcompat.py index e2fc6bccfe8eb..ea9c9ee4d7250 100644 --- a/kubernetes_tests/test_kubernetes_pod_operator_backcompat.py +++ b/kubernetes_tests/test_kubernetes_pod_operator_backcompat.py @@ -1,4 +1,3 @@ -# pylint: disable=unused-argument # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -67,7 +66,7 @@ def get_current_task_name(self): return "_" + unittest.TestCase.id(self).replace(".", "_")[::-1] def setUp(self): - self.maxDiff = None # pylint: disable=invalid-name + self.maxDiff = None self.api_client = ApiClient() self.expected_pod = { 'apiVersion': 'v1', @@ -443,7 +442,7 @@ def test_xcom_push(self): volume_mount = self.api_client.sanitize_for_serialization(PodDefaults.VOLUME_MOUNT) container = self.api_client.sanitize_for_serialization(PodDefaults.SIDECAR_CONTAINER) self.expected_pod['spec']['containers'][0]['args'] = args - self.expected_pod['spec']['containers'][0]['volumeMounts'].insert(0, volume_mount) # noqa + self.expected_pod['spec']['containers'][0]['volumeMounts'].insert(0, volume_mount) self.expected_pod['spec']['volumes'].insert(0, volume) self.expected_pod['spec']['containers'].append(container) assert self.expected_pod == actual_pod @@ -614,6 +613,3 @@ def test_init_container(self): {'name': 'test-volume', 'persistentVolumeClaim': {'claimName': 'test-volume'}} ] assert self.expected_pod == actual_pod - - -# pylint: enable=unused-argument diff --git a/pylintrc b/pylintrc deleted file mode 100644 index a44a5f9699ea1..0000000000000 --- a/pylintrc +++ /dev/null @@ -1,597 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -[MASTER] - -# Add files or directories to the ignore list. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the ignore list. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=0 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -#load-plugins=tests.airflow_pylint.do_not_use_asserts,tests.airflow_pylint.disable_checks_for_tests - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - -# Pylint 2.8 changes this to `extension-pkg-allow-list` -extension-pkg-whitelist=cassandra - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then re-enable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - next-method-defined, - xreadlines-attribute, - exception-escape, - comprehension-escape, - duplicate-code, # deemed unnecessary - abstract-method, # deemed unnecessary - keyword-arg-before-vararg, # deemed unnecessary - no-self-use, # http://pylint-messages.wikidot.com/messages:r0201 - no-else-return, # deemed unnecessary - no-else-raise, # deemed unnecessary - too-many-format-args, # Pylint fails on multiline string format - too-many-lines, # Pylint fails on too many lines and we have several cases of those - cell-var-from-loop, # Raises spurious errors - arguments-differ, # Doesn't always raise valid messages - import-error, # Requires installing Airflow environment in CI task which takes long, therefore ignored. Tests should fail anyways if deps are missing. Possibly un-ignore in the future if we ever use pre-built Docker images for CI. - fixme, # There should be a good reason for adding a TODO - pointless-statement, # Is raised on the bitshift operator. Could be disabled only on /example_dags after https://github.com/PyCQA/pylint/projects/1. - ungrouped-imports, # Disabled to avoid conflict with isort import order rules, which is enabled in the project. - missing-module-docstring, - import-outside-toplevel, # We import outside toplevel to avoid cyclic imports - raise-missing-from, # We don't use raise...from - misplaced-comparison-constant - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=3 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[LOGGING] - -# Format style used to check logging format string. `old` means using % -# formatting, while `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package.. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local,sqlalchemy.orm.scoping.scoped_session - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules=alembic.op,alembic.context - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_|args|kwargs - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_|^kwargs|^args|^mock_.+ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=110 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[BASIC] - -# Naming style matching correct argument names. -#argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -# BasPH: Kept snake_case but allow for 1-char argument names. -argument-rgx=[a-z_][a-z0-9_]{0,30}$ - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=toto, - tutu, - tata - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=any - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=e, - ex, - i, - j, - k, - v, # Commonly used when iterating dict.items() - _, - ti, # Commonly used in Airflow as shorthand for taskinstance - op, # Commonly used in Airflow as shorthand for operator - dr, # Commonly used in Airflow as shorthand for dag run - f, # Commonly used as shorthand for file - db, # Commonly used as shorthand for database - df, # Commonly used as shorthand for DataFrame - cm, # Commonly used as shorthand for context manager - ds, # Used in Airflow templates - ts, # Used in Airflow templates - id # Commonly used as shorthand for identifier - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming-style. -# Default regex changes: -# - allow modules beginning with 0-9 (used in airflow/migrations) -# - allow up to 60 chars (airflow/migrations contains quite long names) -module-rgx=[a-z0-9_][a-z0-9_]{2,59}$ - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix,imp - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# Maximum number of arguments for function / method. -# BasPH: choose 10 because this was 80% of the sorted list of number of arguments above 5 (Pylint default) -max-args=10 - -# Maximum number of attributes for a class (see R0902). -# BasPH: choose 15 because this was 80% of the sorted list of number of attributes above 7 (Pylint default) -max-attributes=15 - -# Maximum number of boolean expressions in an if statement. -max-bool-expr=5 - -# Maximum number of branch for function / method body. -# BasPH: choose 22 because this was 80% of the sorted list of number of attributes above 12 (Pylint default) -max-branches=22 - -# Maximum number of locals for function / method body. -# BasPH: choose 24 because this was 80% of the sorted list of number of locals above 15 (Pylint default) -max-locals=24 - -# Maximum number of parents for a class (see R0901). -max-parents=8 - -# Maximum number of public methods for a class (see R0904). -# BasPH: choose 27 because this was 50% of the sorted list of 30 number of public methods above 20 (Pylint default) -max-public-methods=27 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -# BasPH: choose 69 because this was 80% of the sorted list of number of statements above 50 (Pylint default) -max-statements=69 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=0 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception". -overgeneral-exceptions=Exception diff --git a/pylintrc-tests b/pylintrc-tests deleted file mode 100644 index 841fe0b381fea..0000000000000 --- a/pylintrc-tests +++ /dev/null @@ -1,606 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -[MASTER] - -# Add files or directories to the ignore list. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the ignore list. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=0 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -#load-plugins=tests.airflow_pylint.do_not_use_asserts,tests.airflow_pylint.disable_checks_for_tests - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - -# Pylint 2.8 changes this to `extension-pkg-allow-list` -extension-pkg-whitelist=cassandra - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then re-enable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - next-method-defined, - xreadlines-attribute, - exception-escape, - comprehension-escape, - duplicate-code, # deemed unnecessary - abstract-method, # deemed unnecessary - keyword-arg-before-vararg, # deemed unnecessary - no-self-use, # http://pylint-messages.wikidot.com/messages:r0201 - no-else-return, # deemed unnecessary - no-else-raise, # deemed unnecessary - too-many-format-args, # Pylint fails on multiline string format - too-many-lines, # Pylint fails on too many lines and we have several cases of those - cell-var-from-loop, # Raises spurious errors - arguments-differ, # Doesn't always raise valid messages - import-error, # Requires installing Airflow environment in CI task which takes long, therefore ignored. Tests should fail anyways if deps are missing. Possibly un-ignore in the future if we ever use pre-built Docker images for CI. - fixme, # There should be a good reason for adding a TODO - pointless-statement, # Is raised on the bitshift operator. Could be disabled only on /example_dags after https://github.com/PyCQA/pylint/projects/1. - ungrouped-imports, # Disabled to avoid conflict with isort import order rules, which is enabled in the project. - missing-module-docstring, - import-outside-toplevel, # We import outside toplevel to avoid cyclic imports - raise-missing-from, # We don't use raise...from - misplaced-comparison-constant, - # Those checks are disabled for tests only - missing-docstring, - no-self-use, - too-many-public-methods, - protected-access, - redefined-outer-name, # Gets confused about file-scoped pytest fixtures - attribute-defined-outside-init, - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=3 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[LOGGING] - -# Format style used to check logging format string. `old` means using % -# formatting, while `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package.. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local,sqlalchemy.orm.scoping.scoped_session - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules=alembic.op,alembic.context - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_|args|kwargs - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_|^kwargs|^args|^mock_.+ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=110 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[BASIC] - -# Naming style matching correct argument names. -#argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -# BasPH: Kept snake_case but allow for 1-char argument names. -argument-rgx=[a-z_][a-z0-9_]{0,32}$ - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=toto, - tutu, - tata - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=any - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=e, - ex, - i, - j, - k, - n, - v, # Commonly used when iterating dict.items() - _, - ti, # Commonly used in Airflow as shorthand for taskinstance - op, # Commonly used in Airflow as shorthand for operator - dr, # Commonly used in Airflow as shorthand for dag run - f, # Commonly used as shorthand for file - db, # Commonly used as shorthand for database - df, # Commonly used as shorthand for DataFrame - cm, # Commonly used as shorthand for context manager - ds, # Used in Airflow templates - ts, # Used in Airflow templates - id, # Commonly used as shorthand for identifier - fd, # aka "file-descriptor" -- common in socket code - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming-style. -# Default regex changes: -# - allow modules beginning with 0-9 (used in airflow/migrations) -# - allow up to 60 chars (airflow/migrations contains quite long names) -module-rgx=[a-z0-9_][a-z0-9_]{2,59}$ - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix,imp - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# Maximum number of arguments for function / method. -# BasPH: choose 10 because this was 80% of the sorted list of number of arguments above 5 (Pylint default) -max-args=10 - -# Maximum number of attributes for a class (see R0902). -# BasPH: choose 15 because this was 80% of the sorted list of number of attributes above 7 (Pylint default) -max-attributes=15 - -# Maximum number of boolean expressions in an if statement. -max-bool-expr=5 - -# Maximum number of branch for function / method body. -# BasPH: choose 22 because this was 80% of the sorted list of number of attributes above 12 (Pylint default) -max-branches=22 - -# Maximum number of locals for function / method body. -# BasPH: choose 24 because this was 80% of the sorted list of number of locals above 15 (Pylint default) -max-locals=24 - -# Maximum number of parents for a class (see R0901). -max-parents=8 - -# Maximum number of public methods for a class (see R0904). -# BasPH: choose 27 because this was 50% of the sorted list of 30 number of public methods above 20 (Pylint default) -max-public-methods=27 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -# BasPH: choose 69 because this was 80% of the sorted list of number of statements above 50 (Pylint default) -max-statements=69 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=0 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception". -overgeneral-exceptions=Exception diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml index 56a06f1a6d21e..fb19ee6eecfd9 100644 --- a/scripts/ci/docker-compose/local.yml +++ b/scripts/ci/docker-compose/local.yml @@ -46,8 +46,6 @@ services: - ../../../docs:/opt/airflow/docs:cached - ../../../hooks:/opt/airflow/hooks:cached - ../../../logs:/root/airflow/logs:cached - - ../../../pylintrc:/opt/airflow/pylintrc:cached - - ../../../pylintrc-tests:/opt/airflow/pylintrc-tests:cached - ../../../pyproject.toml:/opt/airflow/pyproject.toml:cached - ../../../pytest.ini:/opt/airflow/pytest.ini:cached - ../../../scripts:/opt/airflow/scripts:cached diff --git a/scripts/ci/libraries/_all_libs.sh b/scripts/ci/libraries/_all_libs.sh index 24be4749a8ee9..b2ccbed784bd5 100755 --- a/scripts/ci/libraries/_all_libs.sh +++ b/scripts/ci/libraries/_all_libs.sh @@ -50,8 +50,6 @@ readonly SCRIPTS_CI_DIR . "${LIBRARIES_DIR}"/_permissions.sh # shellcheck source=scripts/ci/libraries/_push_pull_remove_images.sh . "${LIBRARIES_DIR}"/_push_pull_remove_images.sh -# shellcheck source=scripts/ci/libraries/_pylint.sh -. "${LIBRARIES_DIR}"/_pylint.sh # shellcheck source=scripts/ci/libraries/_runs.sh . "${LIBRARIES_DIR}"/_runs.sh # shellcheck source=scripts/ci/libraries/_spinner.sh diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh index 598d944549550..d9c77aab6eaf5 100644 --- a/scripts/ci/libraries/_build_images.sh +++ b/scripts/ci/libraries/_build_images.sh @@ -131,7 +131,7 @@ function build_images::confirm_image_rebuild() { fi if [[ -f "${LAST_FORCE_ANSWER_FILE}" ]]; then # set variable from last answered response given in the same pre-commit run - so that it can be - # answered in the first pre-commit check (build) and then used in another (pylint/mypy/flake8 etc). + # answered in the first pre-commit check (build) and then used in another (mypy/flake8 etc). # shellcheck disable=SC1090 source "${LAST_FORCE_ANSWER_FILE}" fi diff --git a/scripts/ci/libraries/_local_mounts.sh b/scripts/ci/libraries/_local_mounts.sh index 9f0675243794a..c9cc70957e029 100644 --- a/scripts/ci/libraries/_local_mounts.sh +++ b/scripts/ci/libraries/_local_mounts.sh @@ -42,8 +42,6 @@ function local_mounts::generate_local_mounts_list { "$prefix"docs:/opt/airflow/docs:cached "$prefix"hooks:/opt/airflow/hooks:cached "$prefix"logs:/root/airflow/logs:cached - "$prefix"pylintrc:/opt/airflow/pylintrc:cached - "$prefix"pylintrc-tests:/opt/airflow/pylintrc-tests:cached "$prefix"pyproject.toml:/opt/airflow/pyproject.toml:cached "$prefix"pytest.ini:/opt/airflow/pytest.ini:cached "$prefix"scripts:/opt/airflow/scripts:cached diff --git a/scripts/ci/libraries/_pylint.sh b/scripts/ci/libraries/_pylint.sh deleted file mode 100644 index 6299e7a68aedf..0000000000000 --- a/scripts/ci/libraries/_pylint.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# In case of the pylint checks we filter out some files which are still in pylint_todo.txt list -function pylint::filter_out_files_from_pylint_todo_list() { - FILTERED_FILES=() - set +e - local file - for file in "$@" - do - if [[ ${file} == "airflow/migrations/versions/"* ]]; then - # Skip all generated migration scripts - continue - fi - if ! grep -x "./${file}" <"${AIRFLOW_SOURCES}/scripts/ci/pylint_todo.txt" >/dev/null; then - FILTERED_FILES+=("${file}") - fi - done - set -e - export FILTERED_FILES - readonly FILTERED_FILES -} diff --git a/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py b/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py index 55ad3d643a346..20088b749ce65 100755 --- a/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py +++ b/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py @@ -29,7 +29,7 @@ sys.path.insert(0, AIRFLOW_SOURCES_DIR) # flake8: noqa: F401 -# pylint: disable=wrong-import-position + from setup import ALL_PROVIDERS # noqa sys.path.append(AIRFLOW_SOURCES_DIR) diff --git a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py index 8acf7db52f991..6097ac22512b5 100755 --- a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py +++ b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py @@ -92,7 +92,7 @@ def get_deprecated_extras_from_docs() -> Dict[str, str]: docs_content = get_file_content(DOCS_FILE) deprecated_extras_section_regex = re.compile( - r'\| Deprecated extra \| Extra to be used instead \|\n(.*)\n', re.DOTALL # noqa + r'\| Deprecated extra \| Extra to be used instead \|\n(.*)\n', re.DOTALL ) deprecated_extras_content = deprecated_extras_section_regex.findall(docs_content)[0] diff --git a/scripts/ci/pre_commit/pre_commit_insert_extras.py b/scripts/ci/pre_commit/pre_commit_insert_extras.py index ce6706975ef14..86f612b9edb12 100755 --- a/scripts/ci/pre_commit/pre_commit_insert_extras.py +++ b/scripts/ci/pre_commit/pre_commit_insert_extras.py @@ -25,7 +25,7 @@ sys.path.insert(0, AIRFLOW_SOURCES_DIR) # flake8: noqa: F401 -# pylint: disable=wrong-import-position + from setup import EXTRAS_REQUIREMENTS # isort:skip sys.path.append(AIRFLOW_SOURCES_DIR) diff --git a/scripts/ci/pre_commit/pre_commit_pylint.sh b/scripts/ci/pre_commit/pre_commit_pylint.sh deleted file mode 100755 index 163979a250152..0000000000000 --- a/scripts/ci/pre_commit/pre_commit_pylint.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -export PYTHON_MAJOR_MINOR_VERSION="3.6" -export FORCE_ANSWER_TO_QUESTIONS=${FORCE_ANSWER_TO_QUESTIONS:="quit"} -export REMEMBER_LAST_ANSWER="true" -export PRINT_INFO_FROM_SCRIPTS="false" -export SKIP_CHECK_REMOTE_IMAGE="true" - - -# shellcheck source=scripts/ci/static_checks/pylint.sh -. "$( dirname "${BASH_SOURCE[0]}" )/../static_checks/pylint.sh" "${@}" diff --git a/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py b/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py index 0480286e749e6..600c5de2411f5 100755 --- a/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py +++ b/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py @@ -81,7 +81,7 @@ def write_config(yaml_config_file_path: str, default_cfg_file_path: str): configfile.writelines(FILE_HEADER) config_yaml = read_default_config_yaml(yaml_config_file_path) - for section in config_yaml: # pylint: disable=too-many-nested-blocks + for section in config_yaml: _write_section(configfile, section) diff --git a/scripts/ci/pylint_todo.txt b/scripts/ci/pylint_todo.txt deleted file mode 100644 index 7a6ef29a98c3c..0000000000000 --- a/scripts/ci/pylint_todo.txt +++ /dev/null @@ -1,5 +0,0 @@ -./airflow/models/dag.py -./airflow/models/dagrun.py -./airflow/www/utils.py -./airflow/configuration.py -./airflow/models/taskinstance.py diff --git a/scripts/ci/static_checks/pylint.sh b/scripts/ci/static_checks/pylint.sh deleted file mode 100755 index c69498e3832d2..0000000000000 --- a/scripts/ci/static_checks/pylint.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# shellcheck source=scripts/ci/libraries/_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" - -function run_pylint() { - if [[ "${#@}" == "0" ]]; then - docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ - --entrypoint "/usr/local/bin/dumb-init" \ - "${AIRFLOW_CI_IMAGE}" \ - "--" "/opt/airflow/scripts/in_container/run_pylint.sh" - else - docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ - --entrypoint "/usr/local/bin/dumb-init" \ - "${AIRFLOW_CI_IMAGE}" \ - "--" "/opt/airflow/scripts/in_container/run_pylint.sh" "${@}" - fi -} - -build_images::prepare_ci_build - -build_images::rebuild_ci_image_if_needed - -# Bug: Pylint only looks at PYLINTRC if it can't find a file in the _default_ -# locations, meaning we can't use this env var to over-ride it -args=() - -if [[ -n "${PYLINTRC:-}" ]]; then - args=(--rcfile "${PYLINTRC}") -fi - -if [[ "${#@}" != "0" ]]; then - pylint::filter_out_files_from_pylint_todo_list "$@" - - if [[ "${#FILTERED_FILES[@]}" == "0" ]]; then - echo "Filtered out all files. Skipping pylint." - exit 0 - fi - args+=("${FILTERED_FILES[@]}") -fi -run_pylint "${args[@]}" diff --git a/scripts/ci/static_checks/refresh_pylint_todo.sh b/scripts/ci/static_checks/refresh_pylint_todo.sh deleted file mode 100755 index 52474b74e6db0..0000000000000 --- a/scripts/ci/static_checks/refresh_pylint_todo.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -export FORCE_ANSWER_TO_QUESTIONS="quit" - -# shellcheck source=scripts/ci/libraries/_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" - -function refresh_pylint_todo() { - docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ - "${AIRFLOW_CI_IMAGE}" \ - "/opt/airflow/scripts/in_container/refresh_pylint_todo.sh" -} - -build_images::prepare_ci_build - -build_images::rebuild_ci_image_if_needed - -refresh_pylint_todo diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh index fdb2d18d1a55d..07a49d93456a6 100644 --- a/scripts/in_container/_in_container_utils.sh +++ b/scripts/in_container/_in_container_utils.sh @@ -141,63 +141,6 @@ function in_container_basic_sanity_check() { in_container_cleanup_pycache } -function in_container_refresh_pylint_todo() { - if [[ ${VERBOSE} == "true" ]]; then - echo - echo "Refreshing list of all non-pylint compliant files. This can take some time." - echo - - echo - echo "Finding list all non-pylint compliant files everywhere except 'tests' folder" - echo - fi - # Using path -prune is much better in the local environment on OSX because we have host - # Files mounted and node_modules is a huge directory which takes many seconds to even scan - # -prune works better than -not path because it skips traversing the whole directory. -not path traverses - # the directory and only excludes it after all of it is scanned - find . \ - -path "./airflow/www/node_modules" -prune -o \ - -path "./airflow/ui/node_modules" -prune -o \ - -path "./airflow/migrations/versions" -prune -o \ - -path "./.eggs" -prune -o \ - -path "./docs/_build" -prune -o \ - -path "./build" -prune -o \ - -path "./tests" -prune -o \ - -name "*.py" \ - -not -name 'webserver_config.py' | - grep ".*.py$" | - xargs pylint | tee "${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_main.txt" - - grep -v "\*\*" <"${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_main.txt" | - grep -v "^$" | grep -v "\-\-\-" | grep -v "^Your code has been" | - awk 'BEGIN{FS=":"}{print "./"$1}' | sort | uniq >"${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_new.txt" - - if [[ ${VERBOSE} == "true" ]]; then - echo - echo "So far found $(wc -l <"${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_new.txt") files" - echo - - echo - echo "Finding list of all non-pylint compliant files in 'tests' folder" - echo - fi - find "./tests" -name "*.py" -print0 | - xargs -0 pylint --disable="${DISABLE_CHECKS_FOR_TESTS}" | tee "${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_tests.txt" - - grep -v "\*\*" <"${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_tests.txt" | - grep -v "^$" | grep -v "\-\-\-" | grep -v "^Your code has been" | - awk 'BEGIN{FS=":"}{print "./"$1}' | sort | uniq >>"${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_new.txt" - - rm -fv "${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_main.txt" "${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_tests.txt" - mv -v "${AIRFLOW_SOURCES}/scripts/ci/pylint_todo_new.txt" "${AIRFLOW_SOURCES}/scripts/ci/pylint_todo.txt" - - if [[ ${VERBOSE} == "true" ]]; then - echo - echo "Found $(wc -l <"${AIRFLOW_SOURCES}/scripts/ci/pylint_todo.txt") files" - echo - fi -} - export DISABLE_CHECKS_FOR_TESTS="missing-docstring,no-self-use,too-many-public-methods,protected-access,do-not-use-asserts" function start_output_heartbeat() { diff --git a/scripts/in_container/refresh_pylint_todo.sh b/scripts/in_container/refresh_pylint_todo.sh deleted file mode 100755 index cc4fa8f3e9d8a..0000000000000 --- a/scripts/in_container/refresh_pylint_todo.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# shellcheck source=scripts/in_container/_in_container_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" - -in_container_refresh_pylint_todo diff --git a/scripts/in_container/run_pylint.sh b/scripts/in_container/run_pylint.sh deleted file mode 100755 index b3f56d826fcfd..0000000000000 --- a/scripts/in_container/run_pylint.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# shellcheck source=scripts/in_container/_in_container_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" - -# Pylint is _very_ unhappy with implicit namespaces, so for this test only, we need to make it not -trap "rm -f airflow/providers/__init__.py" EXIT -touch airflow/providers/__init__.py - - -export PYTHONPATH=${AIRFLOW_SOURCES} -if [[ ${#@} == "0" ]]; then - echo - echo "Running pylint for all sources except 'tests' and 'kubernetes_tests' folder" - echo - - # Using path -prune is much better in the local environment on OSX because we have host - # Files mounted and node_modules is a huge directory which takes many seconds to even scan - # -prune works better than -not path because it skips traversing the whole directory. -not path traverses - # the directory and only excludes it after all of it is scanned - find . \ - -path "./airflow/www/node_modules" -prune -o \ - -path "./airflow/ui/node_modules" -prune -o \ - -path "./airflow/migrations/versions" -prune -o \ - -path "./.eggs" -prune -o \ - -path "./docs/_build" -prune -o \ - -path "./build" -prune -o \ - -name "*.py" \ - -not -name 'webserver_config.py' | \ - grep ".*.py$" | \ - grep -vFf scripts/ci/pylint_todo.txt | sort | xargs pylint -j 0 --output-format=colorized -else - /usr/local/bin/pylint --output-format=colorized "$@" -fi diff --git a/scripts/in_container/update_quarantined_test_status.py b/scripts/in_container/update_quarantined_test_status.py index 825a2d8d878f1..8daede00b3fbd 100755 --- a/scripts/in_container/update_quarantined_test_status.py +++ b/scripts/in_container/update_quarantined_test_status.py @@ -93,7 +93,7 @@ def parse_test_history(line: str) -> Optional[TestHistory]: comment = values[5] if len(values) >= 6 else "" try: states = parse_state_history(values[3]) - except Exception: # noqa + except Exception: states = [] return TestHistory( test_id=the_id, @@ -117,7 +117,7 @@ def parse_body(body: str) -> Dict[str, TestHistory]: break try: status = parse_test_history(line) - except Exception: # noqa + except Exception: continue if status: test_history_map[status.test_id] = status diff --git a/scripts/tools/generate-integrations-json.py b/scripts/tools/generate-integrations-json.py index 9bbc36c2fc042..ba5cc5ac85184 100755 --- a/scripts/tools/generate-integrations-json.py +++ b/scripts/tools/generate-integrations-json.py @@ -20,11 +20,8 @@ import re import shutil -# pylint: disable=no-name-in-module from docs.exts.provider_yaml_utils import load_package_data -# pylint: enable=no-name-in-module - AIRFLOW_SITE_DIR = os.environ.get('AIRFLOW_SITE_DIRECTORY') ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) DOCS_DIR = os.path.join(ROOT_DIR, 'docs') diff --git a/scripts/tools/list-integrations.py b/scripts/tools/list-integrations.py index 8a6863e9296f4..3df61511a6366 100755 --- a/scripts/tools/list-integrations.py +++ b/scripts/tools/list-integrations.py @@ -95,7 +95,7 @@ def _find_clazzes(directory, base_class): sort -n -r """ -parser = argparse.ArgumentParser( # noqa +parser = argparse.ArgumentParser( description=HELP, formatter_class=argparse.RawTextHelpFormatter, epilog=EPILOG ) # argparse handle `-h/--help/` internally diff --git a/setup.py b/setup.py index 922248ea2b0fd..5d6f7528b1da1 100644 --- a/setup.py +++ b/setup.py @@ -74,7 +74,7 @@ def rm_all_files(files: List[str]): for file in files: try: os.remove(file) - except Exception as e: # noqa pylint: disable=broad-except + except Exception as e: logger.warning("Error when removing %s: %s", file, e) def run(self): @@ -104,7 +104,7 @@ def initialize_options(self): def finalize_options(self): """Set final values for options.""" - def run(self): # noqa + def run(self) -> None: """Run a command to compile and build assets.""" subprocess.check_call('./airflow/www/compile_assets.sh') @@ -124,7 +124,7 @@ def initialize_options(self): def finalize_options(self): """Set final values for options.""" - def run(self): # noqa + def run(self) -> None: """List extras.""" print("\n".join(wrap(", ".join(EXTRAS_REQUIREMENTS.keys()), 100))) @@ -507,7 +507,6 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version 'pipdeptree', 'pre-commit', 'pygithub', - 'pylint~=2.8.1', 'pysftp', 'pytest~=6.0', 'pytest-cov', @@ -800,7 +799,7 @@ def sort_extras_requirements() -> Dict[str, List[str]]: Sort both: extras and list of dependencies to make it easier to analyse problems external packages will be first, then if providers are added they are added at the end of the lists. """ - sorted_requirements = dict(sorted(EXTRAS_REQUIREMENTS.items())) # noqa + sorted_requirements = dict(sorted(EXTRAS_REQUIREMENTS.items())) for extra_list in sorted_requirements.values(): extra_list.sort() return sorted_requirements @@ -836,10 +835,8 @@ def get_provider_package_from_package_id(package_id: str): def get_excluded_providers(): """ Returns packages excluded for the current python version. - Currently the only excluded provider is apache hive for Python 3.9. Until https://github.com/dropbox/PyHive/issues/380 is fixed. - """ return ['apache.hive'] if PY39 else [] @@ -855,15 +852,9 @@ def get_all_provider_packages(): class AirflowDistribution(Distribution): - """ - The setuptools.Distribution subclass with Airflow specific behaviour - - The reason for pylint: disable=signature-differs of parse_config_files is explained here: - https://github.com/PyCQA/pylint/issues/3737 - - """ + """The setuptools.Distribution subclass with Airflow specific behaviour""" - def parse_config_files(self, *args, **kwargs): # pylint: disable=signature-differs + def parse_config_files(self, *args, **kwargs) -> None: """ Ensure that when we have been asked to install providers from sources that we don't *also* try to install those providers from PyPI. @@ -872,7 +863,7 @@ def parse_config_files(self, *args, **kwargs): # pylint: disable=signature-diff """ super().parse_config_files(*args, **kwargs) if os.getenv(INSTALL_PROVIDERS_FROM_SOURCES) == 'true': - self.install_requires = [ # noqa pylint: disable=attribute-defined-outside-init + self.install_requires = [ req for req in self.install_requires if not req.startswith('apache-airflow-providers-') ] provider_yaml_files = glob.glob("airflow/providers/**/provider.yaml", recursive=True) diff --git a/tests/api/auth/backend/test_basic_auth.py b/tests/api/auth/backend/test_basic_auth.py index 49d7d30a6386f..81ee6d6d56f3d 100644 --- a/tests/api/auth/backend/test_basic_auth.py +++ b/tests/api/auth/backend/test_basic_auth.py @@ -29,7 +29,7 @@ class TestBasicAuth: def set_attrs(self, minimal_app_for_experimental_api): self.app = minimal_app_for_experimental_api - self.appbuilder = self.app.appbuilder # pylint: disable=no-member + self.appbuilder = self.app.appbuilder role_admin = self.appbuilder.sm.find_role("Admin") tester = self.appbuilder.sm.find_user(username="test") if not tester: diff --git a/tests/api/common/experimental/test_mark_tasks.py b/tests/api/common/experimental/test_mark_tasks.py index 82df7c541fe69..4dab57eb96a3d 100644 --- a/tests/api/common/experimental/test_mark_tasks.py +++ b/tests/api/common/experimental/test_mark_tasks.py @@ -98,7 +98,7 @@ def verify_state(self, dag, task_ids, execution_dates, state, old_tis, session=N assert len(tis) > 0 - for ti in tis: # pylint: disable=too-many-nested-blocks + for ti in tis: assert ti.operator == dag.get_task(ti.task_id).task_type if ti.task_id in task_ids and ti.execution_date in execution_dates: assert ti.state == state diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py index e11ad72d9da0e..ffb61e38cf630 100644 --- a/tests/api_connexion/endpoints/test_dag_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_endpoint.py @@ -59,11 +59,11 @@ def configured_app(minimal_app_for_api): ) create_user(app, username="test_no_permissions", role_name="TestNoPermissions") # type: ignore create_user(app, username="test_granular_permissions", role_name="TestGranularDag") # type: ignore - app.appbuilder.sm.sync_perm_for_dag( # type: ignore # pylint: disable=no-member + app.appbuilder.sm.sync_perm_for_dag( # type: ignore "TEST_DAG_1", access_control={'TestGranularDag': [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]}, ) - app.appbuilder.sm.sync_perm_for_dag( # type: ignore # pylint: disable=no-member + app.appbuilder.sm.sync_perm_for_dag( # type: ignore "TEST_DAG_1", access_control={'TestGranularDag': [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]}, ) diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py b/tests/api_connexion/endpoints/test_dag_run_endpoint.py index 482cbeaeedc2f..e51eca8f551fd 100644 --- a/tests/api_connexion/endpoints/test_dag_run_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py @@ -62,7 +62,7 @@ def configured_app(minimal_app_for_api): role_name="TestGranularDag", permissions=[(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN)], ) - app.appbuilder.sm.sync_perm_for_dag( # type: ignore # pylint: disable=no-member + app.appbuilder.sm.sync_perm_for_dag( # type: ignore "TEST_DAG_ID", access_control={'TestGranularDag': [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]}, ) diff --git a/tests/api_connexion/endpoints/test_extra_link_endpoint.py b/tests/api_connexion/endpoints/test_extra_link_endpoint.py index 20a63e6950345..bec2ed8ebc409 100644 --- a/tests/api_connexion/endpoints/test_extra_link_endpoint.py +++ b/tests/api_connexion/endpoints/test_extra_link_endpoint.py @@ -72,8 +72,8 @@ def setup_attrs(self, configured_app, session) -> None: self.dag = self._create_dag() self.app.dag_bag = DagBag(os.devnull, include_examples=False) - self.app.dag_bag.dags = {self.dag.dag_id: self.dag} # type: ignore # pylint: disable=no-member - self.app.dag_bag.sync_to_db() # type: ignore # pylint: disable=no-member + self.app.dag_bag.dags = {self.dag.dag_id: self.dag} # type: ignore + self.app.dag_bag.sync_to_db() # type: ignore dr = DagRun( dag_id=self.dag.dag_id, diff --git a/tests/api_connexion/endpoints/test_import_error_endpoint.py b/tests/api_connexion/endpoints/test_import_error_endpoint.py index 97be12837b25f..a83c62e849a5e 100644 --- a/tests/api_connexion/endpoints/test_import_error_endpoint.py +++ b/tests/api_connexion/endpoints/test_import_error_endpoint.py @@ -20,7 +20,7 @@ from parameterized import parameterized from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP -from airflow.models.errors import ImportError # pylint: disable=redefined-builtin +from airflow.models.errors import ImportError from airflow.security import permissions from airflow.utils import timezone from airflow.utils.session import provide_session diff --git a/tests/api_connexion/endpoints/test_log_endpoint.py b/tests/api_connexion/endpoints/test_log_endpoint.py index 6189e430fe58d..15caf1995cd89 100644 --- a/tests/api_connexion/endpoints/test_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_log_endpoint.py @@ -66,7 +66,7 @@ class TestGetLog: default_time = "2020-06-10T20:00:00+00:00" @pytest.fixture(autouse=True) - def setup_attrs(self, configured_app, configure_loggers) -> None: # pylint: disable=unused-argument + def setup_attrs(self, configured_app, configure_loggers) -> None: self.app = configured_app self.client = self.app.test_client() # Make sure that the configure_logging is not cached @@ -110,7 +110,7 @@ def configure_loggers(self, tmp_path): logging.config.dictConfig(DEFAULT_LOGGING_CONFIG) def _prepare_db(self): - dagbag = self.app.dag_bag # pylint: disable=no-member + dagbag = self.app.dag_bag dag = DAG(self.DAG_ID, start_date=timezone.parse(self.default_time)) dag.sync_to_db() dagbag.dags.pop(self.DAG_ID, None) @@ -173,7 +173,7 @@ def test_get_logs_of_removed_task(self, session): self._create_dagrun(session) # Recreate DAG without tasks - dagbag = self.app.dag_bag # pylint: disable=no-member + dagbag = self.app.dag_bag dag = DAG(self.DAG_ID, start_date=timezone.parse(self.default_time)) del dagbag.dags[self.DAG_ID] dagbag.bag_dag(dag=dag, root_dag=dag) diff --git a/tests/api_connexion/endpoints/test_task_endpoint.py b/tests/api_connexion/endpoints/test_task_endpoint.py index d7a7c7bc71869..919d115247af6 100644 --- a/tests/api_connexion/endpoints/test_task_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_endpoint.py @@ -75,7 +75,7 @@ def clean_db(): clear_db_serialized_dags() @pytest.fixture(autouse=True) - def setup_attrs(self, configured_app, setup_dag) -> None: # pylint: disable=unused-argument + def setup_attrs(self, configured_app, setup_dag) -> None: self.clean_db() self.app = configured_app self.client = self.app.test_client() # type:ignore diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_task_instance_endpoint.py index b28330e48d0bb..79ff113e6823b 100644 --- a/tests/api_connexion/endpoints/test_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py @@ -880,7 +880,7 @@ def test_should_respond_200( update_extras=False, single_dag_run=False, ) - self.app.dag_bag.sync_to_db() # pylint: disable=no-member + self.app.dag_bag.sync_to_db() response = self.client.post( f"/api/v1/dags/{request_dag}/clearTaskInstances", environ_overrides={"REMOTE_USER": "test"}, @@ -932,9 +932,7 @@ def test_should_respond_200_with_reset_dag_run(self, session): json=payload, ) - failed_dag_runs = ( - session.query(DagRun).filter(DagRun.state == "failed").count() # pylint: disable=W0143 - ) + failed_dag_runs = session.query(DagRun).filter(DagRun.state == "failed").count() assert 200 == response.status_code expected_response = [ { @@ -1024,7 +1022,7 @@ def test_should_raise_400_for_naive_and_bad_datetime(self, payload, expected, se update_extras=False, single_dag_run=False, ) - self.app.dag_bag.sync_to_db() # pylint: disable=no-member + self.app.dag_bag.sync_to_db() response = self.client.post( "/api/v1/dags/example_python_operator/clearTaskInstances", environ_overrides={"REMOTE_USER": "test"}, @@ -1067,7 +1065,7 @@ def test_should_assert_call_mocked_api(self, mock_set_state, session): ] } - dag = self.app.dag_bag.dags['example_python_operator'] # pylint: disable=no-member + dag = self.app.dag_bag.dags['example_python_operator'] task = dag.task_dict['print_the_context'] mock_set_state.assert_called_once_with( commit=False, diff --git a/tests/api_connexion/endpoints/test_xcom_endpoint.py b/tests/api_connexion/endpoints/test_xcom_endpoint.py index 6496b59914ea7..b49fcb2e2389e 100644 --- a/tests/api_connexion/endpoints/test_xcom_endpoint.py +++ b/tests/api_connexion/endpoints/test_xcom_endpoint.py @@ -53,7 +53,7 @@ def configured_app(minimal_app_for_api): (permissions.ACTION_CAN_READ, permissions.RESOURCE_XCOM), ], ) - app.appbuilder.sm.sync_perm_for_dag( # type: ignore # pylint: disable=no-member + app.appbuilder.sm.sync_perm_for_dag( # type: ignore "test-dag-id-1", access_control={'TestGranularDag': [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]}, ) diff --git a/tests/api_connexion/schemas/test_error_schema.py b/tests/api_connexion/schemas/test_error_schema.py index 02d574f10403f..e960bdf984aa0 100644 --- a/tests/api_connexion/schemas/test_error_schema.py +++ b/tests/api_connexion/schemas/test_error_schema.py @@ -21,7 +21,7 @@ import_error_collection_schema, import_error_schema, ) -from airflow.models.errors import ImportError # pylint: disable=redefined-builtin +from airflow.models.errors import ImportError from airflow.utils import timezone from airflow.utils.session import provide_session from tests.test_utils.db import clear_db_import_errors diff --git a/tests/api_connexion/test_basic_auth.py b/tests/api_connexion/test_basic_auth.py index f45d879a5c99e..4b01aeb329756 100644 --- a/tests/api_connexion/test_basic_auth.py +++ b/tests/api_connexion/test_basic_auth.py @@ -31,7 +31,7 @@ class TestBasicAuth: def set_attrs(self, minimal_app_for_api): self.app = minimal_app_for_api - sm = self.app.appbuilder.sm # pylint: disable=no-member,invalid-name + sm = self.app.appbuilder.sm tester = sm.find_user(username="test") if not tester: role_admin = sm.find_role("Admin") diff --git a/tests/build_provider_packages_dependencies.py b/tests/build_provider_packages_dependencies.py index 1e7bdbcc53b8e..2236f5b83f5ce 100644 --- a/tests/build_provider_packages_dependencies.py +++ b/tests/build_provider_packages_dependencies.py @@ -123,7 +123,7 @@ def __init__(self, filename): def process_import(self, import_name: str): self.imports.append(import_name) - def get_import_name_from_import_from(self, node: ImportFrom) -> List[str]: # noqa + def get_import_name_from_import_from(self, node: ImportFrom) -> List[str]: """ Retrieves import name from the "from" import. :param node: ImportFrom name @@ -136,11 +136,11 @@ def get_import_name_from_import_from(self, node: ImportFrom) -> List[str]: # no import_names.append(fullname) return import_names - def visit_Import(self, node: Import): # pylint: disable=invalid-name + def visit_Import(self, node: Import): for alias in node.names: self.process_import(alias.name) - def visit_ImportFrom(self, node: ImportFrom): # pylint: disable=invalid-name + def visit_ImportFrom(self, node: ImportFrom): if node.module == '__future__': return for fullname in self.get_import_name_from_import_from(node): diff --git a/tests/cli/commands/test_celery_command.py b/tests/cli/commands/test_celery_command.py index 4c5d6da8c6129..e2c16685979c1 100644 --- a/tests/cli/commands/test_celery_command.py +++ b/tests/cli/commands/test_celery_command.py @@ -184,7 +184,7 @@ def test_worker_started_with_required_arguments(self, mock_worker, mock_popen, m mock_worker.worker.return_value.run.assert_called_once_with( pool='prefork', optimization='fair', - O='fair', # noqa + O='fair', queues=queues, pidfile=pid_file, concurrency=int(concurrency), diff --git a/tests/cli/commands/test_info_command.py b/tests/cli/commands/test_info_command.py index ffcd6fc317efb..b4482046c70c0 100644 --- a/tests/cli/commands/test_info_command.py +++ b/tests/cli/commands/test_info_command.py @@ -76,7 +76,7 @@ def test_should_remove_pii_from_url(self, before, after): class TestAirflowInfo: @classmethod def setup_class(cls): - # pylint: disable=attribute-defined-outside-init + cls.parser = cli_parser.get_parser() @classmethod diff --git a/tests/cli/commands/test_role_command.py b/tests/cli/commands/test_role_command.py index 8148ede9db5b0..3a00ebded79b8 100644 --- a/tests/cli/commands/test_role_command.py +++ b/tests/cli/commands/test_role_command.py @@ -33,7 +33,7 @@ def _set_attrs(self, app, dagbag, parser): self.app = app self.dagbag = dagbag self.parser = parser - self.appbuilder = self.app.appbuilder # pylint: disable=no-member + self.appbuilder = self.app.appbuilder self.clear_roles_and_roles() yield self.clear_roles_and_roles() diff --git a/tests/cli/commands/test_user_command.py b/tests/cli/commands/test_user_command.py index 221d6d92107f8..8fc4ed360d98f 100644 --- a/tests/cli/commands/test_user_command.py +++ b/tests/cli/commands/test_user_command.py @@ -44,7 +44,7 @@ def _set_attrs(self, app, dagbag, parser): self.app = app self.dagbag = dagbag self.parser = parser - self.appbuilder = self.app.appbuilder # pylint: disable=no-member + self.appbuilder = self.app.appbuilder self.clear_roles_and_roles() yield self.clear_roles_and_roles() diff --git a/tests/cli/commands/test_webserver_command.py b/tests/cli/commands/test_webserver_command.py index 5e44c39f5ea92..a7e581f4120ca 100644 --- a/tests/cli/commands/test_webserver_command.py +++ b/tests/cli/commands/test_webserver_command.py @@ -56,18 +56,18 @@ def test_should_wait_for_workers_to_start(self, mock_sleep): self.monitor._get_num_ready_workers_running.return_value = 0 self.monitor._get_num_workers_running.return_value = 4 self.monitor._check_workers() - self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member - self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member - self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member + self.monitor._spawn_new_workers.assert_not_called() + self.monitor._kill_old_workers.assert_not_called() + self.monitor._reload_gunicorn.assert_not_called() @mock.patch('airflow.cli.commands.webserver_command.sleep') def test_should_kill_excess_workers(self, mock_sleep): self.monitor._get_num_ready_workers_running.return_value = 10 self.monitor._get_num_workers_running.return_value = 10 self.monitor._check_workers() - self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member - self.monitor._kill_old_workers.assert_called_once_with(2) # pylint: disable=no-member - self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member + self.monitor._spawn_new_workers.assert_not_called() + self.monitor._kill_old_workers.assert_called_once_with(2) + self.monitor._reload_gunicorn.assert_not_called() @mock.patch('airflow.cli.commands.webserver_command.sleep') def test_should_start_new_workers_when_missing(self, mock_sleep): @@ -75,9 +75,9 @@ def test_should_start_new_workers_when_missing(self, mock_sleep): self.monitor._get_num_workers_running.return_value = 3 self.monitor._check_workers() # missing one worker, starting just 1 - self.monitor._spawn_new_workers.assert_called_once_with(1) # pylint: disable=no-member - self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member - self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member + self.monitor._spawn_new_workers.assert_called_once_with(1) + self.monitor._kill_old_workers.assert_not_called() + self.monitor._reload_gunicorn.assert_not_called() @mock.patch('airflow.cli.commands.webserver_command.sleep') def test_should_start_new_batch_when_missing_many_workers(self, mock_sleep): @@ -85,17 +85,17 @@ def test_should_start_new_batch_when_missing_many_workers(self, mock_sleep): self.monitor._get_num_workers_running.return_value = 1 self.monitor._check_workers() # missing 3 workers, but starting single batch (2) - self.monitor._spawn_new_workers.assert_called_once_with(2) # pylint: disable=no-member - self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member - self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member + self.monitor._spawn_new_workers.assert_called_once_with(2) + self.monitor._kill_old_workers.assert_not_called() + self.monitor._reload_gunicorn.assert_not_called() @mock.patch('airflow.cli.commands.webserver_command.sleep') def test_should_start_new_workers_when_refresh_interval_has_passed(self, mock_sleep): self.monitor._last_refresh_time -= 200 self.monitor._check_workers() - self.monitor._spawn_new_workers.assert_called_once_with(2) # pylint: disable=no-member - self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member - self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member + self.monitor._spawn_new_workers.assert_called_once_with(2) + self.monitor._kill_old_workers.assert_not_called() + self.monitor._reload_gunicorn.assert_not_called() assert abs(self.monitor._last_refresh_time - time.monotonic()) < 5 @mock.patch('airflow.cli.commands.webserver_command.sleep') @@ -104,25 +104,25 @@ def test_should_reload_when_plugin_has_been_changed(self, mock_sleep): self.monitor._check_workers() - self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member - self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member - self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member + self.monitor._spawn_new_workers.assert_not_called() + self.monitor._kill_old_workers.assert_not_called() + self.monitor._reload_gunicorn.assert_not_called() self.monitor._generate_plugin_state.return_value = {'AA': 32} self.monitor._check_workers() - self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member - self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member - self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member + self.monitor._spawn_new_workers.assert_not_called() + self.monitor._kill_old_workers.assert_not_called() + self.monitor._reload_gunicorn.assert_not_called() self.monitor._generate_plugin_state.return_value = {'AA': 32} self.monitor._check_workers() - self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member - self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member - self.monitor._reload_gunicorn.assert_called_once_with() # pylint: disable=no-member + self.monitor._spawn_new_workers.assert_not_called() + self.monitor._kill_old_workers.assert_not_called() + self.monitor._reload_gunicorn.assert_called_once_with() assert abs(self.monitor._last_refresh_time - time.monotonic()) < 5 @@ -274,7 +274,7 @@ def _wait_pidfile(self, pidfile): try: with open(pidfile) as file: return int(file.read()) - except Exception: # pylint: disable=broad-except + except Exception: if start_time - time.monotonic() > 60: raise time.sleep(1) @@ -287,7 +287,7 @@ def test_cli_webserver_foreground(self): AIRFLOW__WEBSERVER__WORKERS="1", ): # Run webserver in foreground and terminate it. - # pylint: disable=consider-using-with + proc = subprocess.Popen(["airflow", "webserver"]) assert proc.poll() is None @@ -310,7 +310,7 @@ def test_cli_webserver_foreground_with_pid(self): AIRFLOW__CORE__LOAD_EXAMPLES="False", AIRFLOW__WEBSERVER__WORKERS="1", ): - # pylint: disable=consider-using-with + proc = subprocess.Popen(["airflow", "webserver", "--pid", pidfile]) assert proc.poll() is None @@ -336,7 +336,7 @@ def test_cli_webserver_background(self): logfile = f"{tmpdir}/airflow-webserver.log" try: # Run webserver as daemon in background. Note that the wait method is not called. - # pylint: disable=consider-using-with + proc = subprocess.Popen( [ "airflow", @@ -412,7 +412,7 @@ def test_cli_webserver_access_log_format(self): ): access_logfile = f"{tmpdir}/access.log" # Run webserver in foreground and terminate it. - # pylint: disable=consider-using-with + proc = subprocess.Popen( [ "airflow", @@ -428,7 +428,6 @@ def test_cli_webserver_access_log_format(self): # Wait for webserver process time.sleep(10) - # pylint: disable=consider-using-with proc2 = subprocess.Popen(["curl", "http://localhost:8080"]) proc2.wait(10) try: diff --git a/tests/conftest.py b/tests/conftest.py index a5af5baf71093..55e15934a76e0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -33,7 +33,7 @@ os.environ["AWS_DEFAULT_REGION"] = os.environ.get("AWS_DEFAULT_REGION") or "us-east-1" os.environ["CREDENTIALS_DIR"] = os.environ.get('CREDENTIALS_DIR') or "/files/airflow-breeze-config/keys" -from tests.test_utils.perf.perf_kit.sqlalchemy import ( # noqa isort:skip # pylint: disable=wrong-import-position +from tests.test_utils.perf.perf_kit.sqlalchemy import ( # noqa isort:skip count_queries, trace_queries, ) @@ -96,9 +96,9 @@ def pytest_print(text): if columns == ['num']: # It is very unlikely that the user wants to display only numbers, but probably # the user just wants to count the queries. - exit_stack.enter_context(count_queries(print_fn=pytest_print)) # pylint: disable=no-member + exit_stack.enter_context(count_queries(print_fn=pytest_print)) elif any(c for c in ['time', 'trace', 'sql', 'parameters']): - exit_stack.enter_context( # pylint: disable=no-member + exit_stack.enter_context( trace_queries( display_num='num' in columns, display_time='time' in columns, diff --git a/tests/core/test_core.py b/tests/core/test_core.py index ee2df3c541767..c4a4bc557f615 100644 --- a/tests/core/test_core.py +++ b/tests/core/test_core.py @@ -254,10 +254,10 @@ def test_template_non_bool(self): """ class NonBoolObject: - def __len__(self): # pylint: disable=invalid-length-returned + def __len__(self): return NotImplemented - def __bool__(self): # pylint: disable=invalid-bool-returned, bad-option-value + def __bool__(self): return NotImplemented op = OperatorSubclass( @@ -367,11 +367,11 @@ def test_task_fail_duration(self): session = settings.Session() try: op1.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) - except Exception: # pylint: disable=broad-except + except Exception: pass try: op2.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) - except Exception: # pylint: disable=broad-except + except Exception: pass op1_fails = ( session.query(TaskFail) diff --git a/tests/core/test_logging_config.py b/tests/core/test_logging_config.py index 5e0c7de01741d..d1e5b9f323c1c 100644 --- a/tests/core/test_logging_config.py +++ b/tests/core/test_logging_config.py @@ -99,12 +99,9 @@ def reset_logging(): manager = logging.root.manager manager.disabled = logging.NOTSET airflow_loggers = [ - # pylint: disable=no-member - logger - for logger_name, logger in manager.loggerDict.items() - if logger_name.startswith('airflow') + logger for logger_name, logger in manager.loggerDict.items() if logger_name.startswith('airflow') ] - for logger in airflow_loggers: # pylint: disable=too-many-nested-blocks + for logger in airflow_loggers: if isinstance(logger, logging.Logger): logger.setLevel(logging.NOTSET) logger.propagate = True diff --git a/tests/core/test_settings.py b/tests/core/test_settings.py index 2c7bb9f2a7e86..0afdb1d7dd9e4 100644 --- a/tests/core/test_settings.py +++ b/tests/core/test_settings.py @@ -115,7 +115,7 @@ def test_import_with_dunder_all_not_specified(self): settings.import_local_settings() with pytest.raises(AttributeError): - settings.not_policy() # pylint: disable=no-member + settings.not_policy() def test_import_with_dunder_all(self): """ @@ -128,7 +128,7 @@ def test_import_with_dunder_all(self): settings.import_local_settings() task_instance = MagicMock() - settings.test_policy(task_instance) # pylint: disable=no-member + settings.test_policy(task_instance) assert task_instance.run_as_user == "myself" @@ -154,7 +154,7 @@ def test_policy_function(self): settings.import_local_settings() task_instance = MagicMock() - settings.test_policy(task_instance) # pylint: disable=no-member + settings.test_policy(task_instance) assert task_instance.run_as_user == "myself" @@ -182,7 +182,7 @@ def test_custom_policy(self): task_instance = MagicMock() task_instance.owner = 'airflow' with pytest.raises(AirflowClusterPolicyViolation): - settings.task_must_have_owners(task_instance) # pylint: disable=no-member + settings.task_must_have_owners(task_instance) class TestUpdatedConfigNames(unittest.TestCase): diff --git a/tests/dags/subdir1/test_ignore_this.py b/tests/dags/subdir1/test_ignore_this.py index 1e5b97165422c..72a8da4d7b98c 100644 --- a/tests/dags/subdir1/test_ignore_this.py +++ b/tests/dags/subdir1/test_ignore_this.py @@ -17,6 +17,6 @@ # under the License. # needed to work against airflow "safe mode" parsing -from airflow.models import DAG # noqa # pylint: disable=unused-import +from airflow.models import DAG # noqa raise Exception("This dag file should have been ignored!") diff --git a/tests/dags/test_mark_success.py b/tests/dags/test_mark_success.py index 759b39f33457b..d5c05d752c27a 100644 --- a/tests/dags/test_mark_success.py +++ b/tests/dags/test_mark_success.py @@ -30,6 +30,4 @@ dag = DAG(dag_id='test_mark_success', default_args=args) -task = PythonOperator( - task_id='task1', python_callable=lambda x: sleep(x), op_args=[600], dag=dag # pylint: disable=W0108 -) +task = PythonOperator(task_id='task1', python_callable=lambda x: sleep(x), op_args=[600], dag=dag) diff --git a/tests/dags/test_on_failure_callback.py b/tests/dags/test_on_failure_callback.py index 9a166fef2aaa1..9ff14bd205194 100644 --- a/tests/dags/test_on_failure_callback.py +++ b/tests/dags/test_on_failure_callback.py @@ -31,7 +31,7 @@ dag = DAG(dag_id='test_om_failure_callback_dag', default_args=args) -def write_data_to_callback(*arg, **kwargs): # pylint: disable=unused-argument +def write_data_to_callback(*arg, **kwargs): with open(os.environ.get('AIRFLOW_CALLBACK_FILE'), "w+") as f: f.write("Callback fired") diff --git a/tests/dags/test_subdag.py b/tests/dags/test_subdag.py index f2227dc00966c..35daa88d7b4ff 100644 --- a/tests/dags/test_subdag.py +++ b/tests/dags/test_subdag.py @@ -78,4 +78,4 @@ def subdag(parent_dag_name, child_dag_name, args): task_id='some-other-task', ) - start >> section_1 >> some_other_task # pylint: disable=W0104 + start >> section_1 >> some_other_task diff --git a/tests/decorators/test_python.py b/tests/decorators/test_python.py index 59849fc01c3f6..25077fccda93b 100644 --- a/tests/decorators/test_python.py +++ b/tests/decorators/test_python.py @@ -117,25 +117,25 @@ def test_infer_multiple_outputs_using_typing(self): def identity_dict(x: int, y: int) -> Dict[str, int]: return {"x": x, "y": y} - assert identity_dict(5, 5).operator.multiple_outputs is True # pylint: disable=maybe-no-member + assert identity_dict(5, 5).operator.multiple_outputs is True @task_decorator def identity_tuple(x: int, y: int) -> Tuple[int, int]: return x, y - assert identity_tuple(5, 5).operator.multiple_outputs is False # pylint: disable=maybe-no-member + assert identity_tuple(5, 5).operator.multiple_outputs is False @task_decorator def identity_int(x: int) -> int: return x - assert identity_int(5).operator.multiple_outputs is False # pylint: disable=maybe-no-member + assert identity_int(5).operator.multiple_outputs is False @task_decorator def identity_notyping(x: int): return x - assert identity_notyping(5).operator.multiple_outputs is False # pylint: disable=maybe-no-member + assert identity_notyping(5).operator.multiple_outputs is False def test_manual_multiple_outputs_false_with_typings(self): @task_decorator(multiple_outputs=False) @@ -152,12 +152,12 @@ def identity2(x: int, y: int) -> Dict[int, int]: state=State.RUNNING, ) - res.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) # pylint: disable=maybe-no-member + res.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) ti = dr.get_task_instances()[0] - assert res.operator.multiple_outputs is False # pylint: disable=maybe-no-member - assert ti.xcom_pull() == [8, 4] # pylint: disable=maybe-no-member + assert res.operator.multiple_outputs is False + assert ti.xcom_pull() == [8, 4] assert ti.xcom_pull(key="return_value_0") is None assert ti.xcom_pull(key="return_value_1") is None @@ -176,11 +176,11 @@ def identity_tuple(x: int, y: int) -> Tuple[int, int]: state=State.RUNNING, ) - ident.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) # pylint: disable=maybe-no-member + ident.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) ti = dr.get_task_instances()[0] - assert not ident.operator.multiple_outputs # pylint: disable=maybe-no-member + assert not ident.operator.multiple_outputs assert ti.xcom_pull() == [35, 36] assert ti.xcom_pull(key="return_value_0") is None assert ti.xcom_pull(key="return_value_1") is None @@ -193,10 +193,10 @@ def add_number(num: int) -> int: return num + 2 with pytest.raises(TypeError): - add_number(2, 3) # pylint: disable=too-many-function-args + add_number(2, 3) with pytest.raises(TypeError): - add_number() # pylint: disable=no-value-for-parameter - add_number('test') # pylint: disable=no-value-for-parameter + add_number() + add_number('test') def test_fail_method(self): """Tests that @task will fail if signature is not binding.""" @@ -227,7 +227,7 @@ def add_number(num: int): ) with pytest.raises(AirflowException): - # pylint: disable=maybe-no-member + ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) def test_fail_multiple_outputs_no_dict(self): @@ -245,7 +245,7 @@ def add_number(num: int): ) with pytest.raises(AirflowException): - # pylint: disable=maybe-no-member + ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) def test_python_callable_arguments_are_templatized(self): @@ -271,7 +271,7 @@ def test_python_callable_arguments_are_templatized(self): start_date=DEFAULT_DATE, state=State.RUNNING, ) - ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) # pylint: disable=maybe-no-member + ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) ds_templated = DEFAULT_DATE.date().isoformat() assert len(recorded_calls) == 1 @@ -302,7 +302,7 @@ def test_python_callable_keyword_arguments_are_templatized(self): start_date=DEFAULT_DATE, state=State.RUNNING, ) - ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) # pylint: disable=maybe-no-member + ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) assert len(recorded_calls) == 1 self._assert_calls_equal( @@ -341,8 +341,8 @@ def do_run(): do_run_2 = do_run() assert ['do_run', 'do_run__1', 'do_run__2'] == self.dag.task_ids - assert do_run_1.operator.task_id == 'do_run__1' # pylint: disable=maybe-no-member - assert do_run_2.operator.task_id == 'do_run__2' # pylint: disable=maybe-no-member + assert do_run_1.operator.task_id == 'do_run__1' + assert do_run_2.operator.task_id == 'do_run__2' def test_multiple_calls_in_task_group(self): """Test calling task multiple times in a TaskGroup""" @@ -393,7 +393,7 @@ def return_dict(number: int): state=State.RUNNING, ) - ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) # pylint: disable=maybe-no-member + ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) ti = dr.get_task_instances()[0] assert ti.xcom_pull(key='number') == test_number + 1 @@ -409,7 +409,7 @@ def do_run(): with self.dag: ret = do_run() - assert ret.operator.owner == 'airflow' # pylint: disable=maybe-no-member + assert ret.operator.owner == 'airflow' @task_decorator def test_apply_default_raise(unknow): @@ -417,14 +417,14 @@ def test_apply_default_raise(unknow): with pytest.raises(TypeError): with self.dag: - test_apply_default_raise() # pylint: disable=no-value-for-parameter + test_apply_default_raise() @task_decorator def test_apply_default(owner): return owner with self.dag: - ret = test_apply_default() # pylint: disable=no-value-for-parameter + ret = test_apply_default() assert 'owner' in ret.operator.op_kwargs def test_xcom_arg(self): @@ -442,7 +442,7 @@ def add_num(number: int, num2: int = 2): with self.dag: bigger_number = add_2(test_number) - ret = add_num(bigger_number, XComArg(bigger_number.operator)) # pylint: disable=maybe-no-member + ret = add_num(bigger_number, XComArg(bigger_number.operator)) dr = self.dag.create_dagrun( run_id=DagRunType.MANUAL, @@ -451,12 +451,11 @@ def add_num(number: int, num2: int = 2): state=State.RUNNING, ) - bigger_number.operator.run( # pylint: disable=maybe-no-member - start_date=DEFAULT_DATE, end_date=DEFAULT_DATE - ) - ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) # pylint: disable=maybe-no-member + bigger_number.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) + + ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) ti_add_num = [ti for ti in dr.get_task_instances() if ti.task_id == 'add_num'][0] - assert ti_add_num.xcom_pull(key=ret.key) == (test_number + 2) * 2 # pylint: disable=maybe-no-member + assert ti_add_num.xcom_pull(key=ret.key) == (test_number + 2) * 2 def test_dag_task(self): """Tests dag.task property to generate task""" @@ -498,4 +497,4 @@ def add_2(number: int): with self.dag: ret = add_2(test_number) - assert ret.operator.doc_md.strip(), "Adds 2 to number." # pylint: disable=maybe-no-member + assert ret.operator.doc_md.strip(), "Adds 2 to number." diff --git a/tests/decorators/test_python_virtualenv.py b/tests/decorators/test_python_virtualenv.py index 5ef4bad1b8549..13e7be0ca730f 100644 --- a/tests/decorators/test_python_virtualenv.py +++ b/tests/decorators/test_python_virtualenv.py @@ -70,7 +70,7 @@ def test_no_system_site_packages(self): @task.virtualenv(system_site_packages=False, python_version=PYTHON_VERSION, use_dill=True) def f(): try: - import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + import funcsigs # noqa: F401 except ImportError: return True raise Exception @@ -78,7 +78,7 @@ def f(): with self.dag: ret = f() - ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) # pylint: disable=no-member + ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) def test_system_site_packages(self): @task.virtualenv( @@ -88,7 +88,7 @@ def test_system_site_packages(self): use_dill=True, ) def f(): - import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + import funcsigs # noqa: F401 with self.dag: ret = f() @@ -103,7 +103,7 @@ def test_with_requirements_pinned(self): use_dill=True, ) def f(): - import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported + import funcsigs if funcsigs.__version__ != '0.4': raise Exception @@ -121,7 +121,7 @@ def test_unpinned_requirements(self): use_dill=True, ) def f(): - import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + import funcsigs # noqa: F401 with self.dag: ret = f() @@ -142,7 +142,7 @@ def f(): def test_python_2(self): @task.virtualenv(python_version=2, requirements=['dill']) def f(): - {}.iteritems() # pylint: disable=no-member + {}.iteritems() with self.dag: ret = f() @@ -152,22 +152,22 @@ def f(): def test_python_2_7(self): @task.virtualenv(python_version='2.7', requirements=['dill']) def f(): - {}.iteritems() # pylint: disable=no-member + {}.iteritems() return True with self.dag: ret = f() - ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) # pylint: disable=no-member + ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) def test_python_3(self): @task.virtualenv(python_version=3, use_dill=False, requirements=['dill']) def f(): - import sys # pylint: disable=reimported,unused-import,redefined-outer-name + import sys print(sys.version) try: - {}.iteritems() # pylint: disable=no-member + {}.iteritems() except AttributeError: return raise Exception @@ -187,9 +187,9 @@ def _invert_python_major_version(): def test_string_args(self): @task.virtualenv(python_version=self._invert_python_major_version(), string_args=[1, 2, 1]) def f(): - global virtualenv_string_args # pylint: disable=global-statement,global-variable-not-assigned - print(virtualenv_string_args) # pylint: disable=undefined-variable - if virtualenv_string_args[0] != virtualenv_string_args[2]: # pylint: disable=undefined-variable + global virtualenv_string_args + print(virtualenv_string_args) + if virtualenv_string_args[0] != virtualenv_string_args[2]: raise Exception with self.dag: @@ -208,7 +208,7 @@ def f(a, b, c=False, d=False): with self.dag: ret = f(0, 1, c=True) - ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) # pylint: disable=no-member + ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) def test_return_none(self): @task.virtualenv diff --git a/tests/executors/test_celery_executor.py b/tests/executors/test_celery_executor.py index d15ca9a77175e..e24a3dd9779a9 100644 --- a/tests/executors/test_celery_executor.py +++ b/tests/executors/test_celery_executor.py @@ -25,10 +25,10 @@ from unittest import mock # leave this it is used by the test worker -import celery.contrib.testing.tasks # noqa: F401 pylint: disable=unused-import +import celery.contrib.testing.tasks # noqa: F401 import pytest from celery import Celery -from celery.backends.base import BaseBackend, BaseKeyValueStoreBackend # noqa +from celery.backends.base import BaseBackend, BaseKeyValueStoreBackend from celery.backends.database import DatabaseBackend from celery.contrib.testing.worker import start_worker from celery.result import AsyncResult @@ -141,7 +141,7 @@ def fake_execute_command(command): ] # "Enqueue" them. We don't have a real SimpleTaskInstance, so directly edit the dict - for (key, simple_ti, command, queue, task) in task_tuples_to_send: # pylint: disable=W0612 + for (key, simple_ti, command, queue, task) in task_tuples_to_send: executor.queued_tasks[key] = (command, 1, queue, simple_ti) executor.task_publish_retries[key] = 1 @@ -447,7 +447,7 @@ def test_should_support_db_backend(self, mock_session): with mock.patch.object(celery_executor.app, 'backend', mock_backend), self.assertLogs( "airflow.executors.celery_executor.BulkStateFetcher", level="DEBUG" ) as cm: - mock_session = mock_backend.ResultSession.return_value # pylint: disable=no-member + mock_session = mock_backend.ResultSession.return_value mock_session.query.return_value.filter.return_value.all.return_value = [ mock.MagicMock(**{"to_dict.return_value": {"status": "SUCCESS", "task_id": "123"}}) ] @@ -524,7 +524,8 @@ def register_signals(): signal.signal(signal.SIGUSR2, orig_sigusr2) -def test_send_tasks_to_celery_hang(register_signals): # pylint: disable=unused-argument +@pytest.mark.quarantined +def test_send_tasks_to_celery_hang(register_signals): """ Test that celery_executor does not hang after many runs. """ diff --git a/tests/executors/test_kubernetes_executor.py b/tests/executors/test_kubernetes_executor.py index 57b5af3dd81e1..312f71fd41252 100644 --- a/tests/executors/test_kubernetes_executor.py +++ b/tests/executors/test_kubernetes_executor.py @@ -47,7 +47,6 @@ AirflowKubernetesScheduler = None # type: ignore -# pylint: disable=unused-argument class TestAirflowKubernetesScheduler(unittest.TestCase): @staticmethod def _gen_random_string(seed, str_len): @@ -123,9 +122,7 @@ def test_execution_date_serialize_deserialize(self): @mock.patch('airflow.executors.kubernetes_executor.get_kube_client') @mock.patch('airflow.executors.kubernetes_executor.client') @mock.patch('airflow.executors.kubernetes_executor.KubernetesJobWatcher') - def test_delete_pod_successfully( - self, mock_watcher, mock_client, mock_kube_client - ): # pylint: disable=unused-argument + def test_delete_pod_successfully(self, mock_watcher, mock_client, mock_kube_client): pod_id = "my-pod-1" namespace = "my-namespace-1" @@ -143,9 +140,7 @@ def test_delete_pod_successfully( @mock.patch('airflow.executors.kubernetes_executor.get_kube_client') @mock.patch('airflow.executors.kubernetes_executor.client') @mock.patch('airflow.executors.kubernetes_executor.KubernetesJobWatcher') - def test_delete_pod_raises_404( - self, mock_watcher, mock_client, mock_kube_client - ): # pylint: disable=unused-argument + def test_delete_pod_raises_404(self, mock_watcher, mock_client, mock_kube_client): pod_id = "my-pod-1" namespace = "my-namespace-2" @@ -166,9 +161,7 @@ def test_delete_pod_raises_404( @mock.patch('airflow.executors.kubernetes_executor.get_kube_client') @mock.patch('airflow.executors.kubernetes_executor.client') @mock.patch('airflow.executors.kubernetes_executor.KubernetesJobWatcher') - def test_delete_pod_404_not_raised( - self, mock_watcher, mock_client, mock_kube_client - ): # pylint: disable=unused-argument + def test_delete_pod_404_not_raised(self, mock_watcher, mock_client, mock_kube_client): pod_id = "my-pod-1" namespace = "my-namespace-3" @@ -405,8 +398,6 @@ def test_change_state_failed_no_deletion( assert executor.event_buffer[key][0] == State.FAILED mock_delete_pod.assert_not_called() - # pylint: enable=unused-argument - @mock.patch('airflow.executors.kubernetes_executor.KubernetesJobWatcher') @mock.patch('airflow.executors.kubernetes_executor.get_kube_client') @mock.patch('airflow.executors.kubernetes_executor.AirflowKubernetesScheduler.delete_pod') diff --git a/tests/executors/test_local_executor.py b/tests/executors/test_local_executor.py index 92ddd026afa90..dfb0d51ae88d9 100644 --- a/tests/executors/test_local_executor.py +++ b/tests/executors/test_local_executor.py @@ -35,7 +35,7 @@ def execution_parallelism_subprocess(self, mock_check_call, parallelism=0): success_command = ['airflow', 'tasks', 'run', 'true', 'some_parameter', '2020-10-07'] fail_command = ['airflow', 'tasks', 'run', 'false', 'task_id', '2020-10-07'] - def fake_execute_command(command, close_fds=True): # pylint: disable=unused-argument + def fake_execute_command(command, close_fds=True): if command != success_command: raise subprocess.CalledProcessError(returncode=1, cmd=command) else: @@ -95,22 +95,22 @@ def test_execution_subprocess_unlimited_parallelism(self): settings, 'EXECUTE_TASKS_NEW_PYTHON_INTERPRETER', new_callable=mock.PropertyMock ) as option: option.return_value = True - self.execution_parallelism_subprocess(parallelism=0) # pylint: disable=no-value-for-parameter + self.execution_parallelism_subprocess(parallelism=0) def test_execution_subprocess_limited_parallelism(self): with mock.patch.object( settings, 'EXECUTE_TASKS_NEW_PYTHON_INTERPRETER', new_callable=mock.PropertyMock ) as option: option.return_value = True - self.execution_parallelism_subprocess(parallelism=2) # pylint: disable=no-value-for-parameter + self.execution_parallelism_subprocess(parallelism=2) @mock.patch.object(settings, 'EXECUTE_TASKS_NEW_PYTHON_INTERPRETER', False) def test_execution_unlimited_parallelism_fork(self): - self.execution_parallelism_fork(parallelism=0) # pylint: disable=no-value-for-parameter + self.execution_parallelism_fork(parallelism=0) @mock.patch.object(settings, 'EXECUTE_TASKS_NEW_PYTHON_INTERPRETER', False) def test_execution_limited_parallelism_fork(self): - self.execution_parallelism_fork(parallelism=2) # pylint: disable=no-value-for-parameter + self.execution_parallelism_fork(parallelism=2) @mock.patch('airflow.executors.local_executor.LocalExecutor.sync') @mock.patch('airflow.executors.base_executor.BaseExecutor.trigger_tasks') diff --git a/tests/hooks/test_subprocess.py b/tests/hooks/test_subprocess.py index 336ef91ab8af0..4693da730417a 100644 --- a/tests/hooks/test_subprocess.py +++ b/tests/hooks/test_subprocess.py @@ -37,7 +37,7 @@ class TestSubprocessHook(unittest.TestCase): ('empty env', {}, {OS_ENV_KEY: ''}), ('no env', None, {OS_ENV_KEY: OS_ENV_VAL}), ] - ) # pylint: disable=unused-argument + ) def test_env(self, name, env, expected): """ Test that env variables are exported correctly to the command environment. diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py index 7b772dfbf06aa..154b485c89427 100644 --- a/tests/jobs/test_backfill_job.py +++ b/tests/jobs/test_backfill_job.py @@ -1168,7 +1168,6 @@ def test_backfill_execute_subdag(self): session.query(DagRun) .filter(DagRun.dag_id == subdag.dag_id) .filter(DagRun.execution_date == start_date) - # pylint: disable=comparison-with-callable .filter(DagRun.state == State.SUCCESS) .count() ) diff --git a/tests/jobs/test_local_task_job.py b/tests/jobs/test_local_task_job.py index 1eaad46dd0992..2a50d3569d3e0 100644 --- a/tests/jobs/test_local_task_job.py +++ b/tests/jobs/test_local_task_job.py @@ -122,7 +122,7 @@ def test_localtaskjob_heartbeat(self): job1.task_runner = StandardTaskRunner(job1) job1.task_runner.process = mock.Mock() with pytest.raises(AirflowException): - job1.heartbeat_callback() # pylint: disable=no-value-for-parameter + job1.heartbeat_callback() job1.task_runner.process.pid = 1 ti.state = State.RUNNING @@ -135,7 +135,7 @@ def test_localtaskjob_heartbeat(self): job1.task_runner.process.pid = 2 with pytest.raises(AirflowException): - job1.heartbeat_callback() # pylint: disable=no-value-for-parameter + job1.heartbeat_callback() @mock.patch('airflow.jobs.local_task_job.psutil') def test_localtaskjob_heartbeat_with_run_as_user(self, psutil_mock): @@ -509,7 +509,7 @@ def success_callback(context): dag = DAG(dag_id='test_mark_success', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) def task_function(ti): - # pylint: disable=unused-argument + time.sleep(60) # This should not happen -- the state change should be noticed and the task should get killed with shared_mem_lock: @@ -581,7 +581,7 @@ def failure_callback(context): dag = DAG(dag_id='test_mark_failure', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) def task_function(ti): - # pylint: disable=unused-argument + time.sleep(60) # This should not happen -- the state change should be noticed and the task should get killed with shared_mem_lock: diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 36bc1b584a191..0d1f5306e2f56 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. # -# pylint: disable=attribute-defined-outside-init + import datetime import os import shutil @@ -1588,7 +1588,7 @@ def test_find_executable_task_instances_concurrency_queued(self): session.rollback() # TODO: This is a hack, I think I need to just remove the setting and have it on always - def test_find_executable_task_instances_task_concurrency(self): # pylint: disable=too-many-statements + def test_find_executable_task_instances_task_concurrency(self): dag_id = 'SchedulerJobTest.test_find_executable_task_instances_task_concurrency' task_id_1 = 'dummy' task_id_2 = 'dummy2' @@ -2495,7 +2495,7 @@ def evaluate_dagrun( run_kwargs=None, advance_execution_date=False, session=None, - ): # pylint: disable=unused-argument + ): """ Helper for testing DagRun states with simple two-task DAGS. @@ -3224,7 +3224,7 @@ def do_schedule(mock_dagbag): self.scheduler_job.heartrate = 0 self.scheduler_job.run() - do_schedule() # pylint: disable=no-value-for-parameter + do_schedule() with create_session() as session: ti = ( session.query(TaskInstance) @@ -3259,7 +3259,7 @@ def run_with_error(ti, ignore_ti_state=False): # To verify that task does get re-queued. executor.do_update = True - do_schedule() # pylint: disable=no-value-for-parameter + do_schedule() ti.refresh_from_db() assert ti.state == State.SUCCESS @@ -3332,7 +3332,7 @@ def test_dag_get_active_runs(self): try: running_date = running_dates[0] - except Exception: # pylint: disable=broad-except + except Exception: running_date = 'Except' assert execution_date == running_date, 'Running Date must match Execution Date' @@ -3593,7 +3593,7 @@ def test_list_py_file_paths(self): 'test_zip_invalid_cron.zip', 'test_ignore_this.py', } - for root, _, files in os.walk(TEST_DAG_FOLDER): # pylint: disable=too-many-nested-blocks + for root, _, files in os.walk(TEST_DAG_FOLDER): for file_name in files: if file_name.endswith('.py') or file_name.endswith('.zip'): if file_name not in ignored_files: @@ -3606,7 +3606,7 @@ def test_list_py_file_paths(self): 'helper.py', } example_dag_folder = airflow.example_dags.__path__[0] - for root, _, files in os.walk(example_dag_folder): # pylint: disable=too-many-nested-blocks + for root, _, files in os.walk(example_dag_folder): for file_name in files: if file_name.endswith('.py') or file_name.endswith('.zip'): if file_name not in ['__init__.py'] and file_name not in ignored_files: @@ -4427,11 +4427,11 @@ def tearDown(self): [ # expected, dag_count, task_count # One DAG with one task per DAG file - (24, 1, 1), # noqa + (24, 1, 1), # One DAG with five tasks per DAG file - (28, 1, 5), # noqa + (28, 1, 5), # 10 DAGs with 10 tasks per DAG file - (195, 10, 10), # noqa + (195, 10, 10), ] ) def test_execute_queries_count_with_harvested_dags(self, expected_query_count, dag_count, task_count): @@ -4490,35 +4490,35 @@ def test_execute_queries_count_with_harvested_dags(self, expected_query_count, d [ # expected, dag_count, task_count, start_ago, schedule_interval, shape # One DAG with one task per DAG file - ([9, 9, 9, 9], 1, 1, "1d", "None", "no_structure"), # noqa - ([9, 9, 9, 9], 1, 1, "1d", "None", "linear"), # noqa - ([21, 12, 12, 12], 1, 1, "1d", "@once", "no_structure"), # noqa - ([21, 12, 12, 12], 1, 1, "1d", "@once", "linear"), # noqa - ([21, 22, 24, 26], 1, 1, "1d", "30m", "no_structure"), # noqa - ([21, 22, 24, 26], 1, 1, "1d", "30m", "linear"), # noqa - ([21, 22, 24, 26], 1, 1, "1d", "30m", "binary_tree"), # noqa - ([21, 22, 24, 26], 1, 1, "1d", "30m", "star"), # noqa - ([21, 22, 24, 26], 1, 1, "1d", "30m", "grid"), # noqa + ([9, 9, 9, 9], 1, 1, "1d", "None", "no_structure"), + ([9, 9, 9, 9], 1, 1, "1d", "None", "linear"), + ([21, 12, 12, 12], 1, 1, "1d", "@once", "no_structure"), + ([21, 12, 12, 12], 1, 1, "1d", "@once", "linear"), + ([21, 22, 24, 26], 1, 1, "1d", "30m", "no_structure"), + ([21, 22, 24, 26], 1, 1, "1d", "30m", "linear"), + ([21, 22, 24, 26], 1, 1, "1d", "30m", "binary_tree"), + ([21, 22, 24, 26], 1, 1, "1d", "30m", "star"), + ([21, 22, 24, 26], 1, 1, "1d", "30m", "grid"), # One DAG with five tasks per DAG file - ([9, 9, 9, 9], 1, 5, "1d", "None", "no_structure"), # noqa - ([9, 9, 9, 9], 1, 5, "1d", "None", "linear"), # noqa - ([21, 12, 12, 12], 1, 5, "1d", "@once", "no_structure"), # noqa - ([22, 13, 13, 13], 1, 5, "1d", "@once", "linear"), # noqa - ([21, 22, 24, 26], 1, 5, "1d", "30m", "no_structure"), # noqa - ([22, 24, 27, 30], 1, 5, "1d", "30m", "linear"), # noqa - ([22, 24, 27, 30], 1, 5, "1d", "30m", "binary_tree"), # noqa - ([22, 24, 27, 30], 1, 5, "1d", "30m", "star"), # noqa - ([22, 24, 27, 30], 1, 5, "1d", "30m", "grid"), # noqa + ([9, 9, 9, 9], 1, 5, "1d", "None", "no_structure"), + ([9, 9, 9, 9], 1, 5, "1d", "None", "linear"), + ([21, 12, 12, 12], 1, 5, "1d", "@once", "no_structure"), + ([22, 13, 13, 13], 1, 5, "1d", "@once", "linear"), + ([21, 22, 24, 26], 1, 5, "1d", "30m", "no_structure"), + ([22, 24, 27, 30], 1, 5, "1d", "30m", "linear"), + ([22, 24, 27, 30], 1, 5, "1d", "30m", "binary_tree"), + ([22, 24, 27, 30], 1, 5, "1d", "30m", "star"), + ([22, 24, 27, 30], 1, 5, "1d", "30m", "grid"), # 10 DAGs with 10 tasks per DAG file - ([9, 9, 9, 9], 10, 10, "1d", "None", "no_structure"), # noqa - ([9, 9, 9, 9], 10, 10, "1d", "None", "linear"), # noqa - ([84, 27, 27, 27], 10, 10, "1d", "@once", "no_structure"), # noqa - ([94, 40, 40, 40], 10, 10, "1d", "@once", "linear"), # noqa - ([84, 88, 88, 88], 10, 10, "1d", "30m", "no_structure"), # noqa - ([94, 114, 114, 114], 10, 10, "1d", "30m", "linear"), # noqa - ([94, 108, 108, 108], 10, 10, "1d", "30m", "binary_tree"), # noqa - ([94, 108, 108, 108], 10, 10, "1d", "30m", "star"), # noqa - ([94, 108, 108, 108], 10, 10, "1d", "30m", "grid"), # noqa + ([9, 9, 9, 9], 10, 10, "1d", "None", "no_structure"), + ([9, 9, 9, 9], 10, 10, "1d", "None", "linear"), + ([84, 27, 27, 27], 10, 10, "1d", "@once", "no_structure"), + ([94, 40, 40, 40], 10, 10, "1d", "@once", "linear"), + ([84, 88, 88, 88], 10, 10, "1d", "30m", "no_structure"), + ([94, 114, 114, 114], 10, 10, "1d", "30m", "linear"), + ([94, 108, 108, 108], 10, 10, "1d", "30m", "binary_tree"), + ([94, 108, 108, 108], 10, 10, "1d", "30m", "star"), + ([94, 108, 108, 108], 10, 10, "1d", "30m", "grid"), ] ) def test_process_dags_queries_count( diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py index e030c4e289ddf..fa02b4eea2e51 100644 --- a/tests/models/test_baseoperator.py +++ b/tests/models/test_baseoperator.py @@ -61,7 +61,7 @@ def __ne__(self, other): # Essentially similar to airflow.models.baseoperator.BaseOperator class DummyClass(metaclass=BaseOperatorMeta): - def __init__(self, test_param, params=None, default_args=None): # pylint: disable=unused-argument + def __init__(self, test_param, params=None, default_args=None): self.test_param = test_param def set_xcomargs_dependencies(self): @@ -84,11 +84,11 @@ def test_apply(self): def test_default_args(self): default_args = {'test_param': True} - dummy_class = DummyClass(default_args=default_args) # pylint: disable=no-value-for-parameter + dummy_class = DummyClass(default_args=default_args) assert dummy_class.test_param default_args = {'test_param': True, 'test_sub_param': True} - dummy_subclass = DummySubClass(default_args=default_args) # pylint: disable=no-value-for-parameter + dummy_subclass = DummySubClass(default_args=default_args) assert dummy_class.test_param assert dummy_subclass.test_sub_param @@ -98,16 +98,16 @@ def test_default_args(self): assert dummy_subclass.test_sub_param with pytest.raises(AirflowException, match='Argument.*test_sub_param.*required'): - DummySubClass(default_args=default_args) # pylint: disable=no-value-for-parameter + DummySubClass(default_args=default_args) def test_incorrect_default_args(self): default_args = {'test_param': True, 'extra_param': True} - dummy_class = DummyClass(default_args=default_args) # pylint: disable=no-value-for-parameter + dummy_class = DummyClass(default_args=default_args) assert dummy_class.test_param default_args = {'random_params': True} with pytest.raises(AirflowException, match='Argument.*test_param.*required'): - DummyClass(default_args=default_args) # pylint: disable=no-value-for-parameter + DummyClass(default_args=default_args) @parameterized.expand( [ @@ -392,7 +392,7 @@ def test_chain_not_support_type(self): dag = DAG(dag_id='test_chain', start_date=datetime.now()) [op1, op2] = [DummyOperator(task_id=f't{i}', dag=dag) for i in range(1, 3)] with pytest.raises(TypeError): - chain([op1, op2], 1) # noqa + chain([op1, op2], 1) def test_chain_different_length_iterable(self): dag = DAG(dag_id='test_chain', start_date=datetime.now()) diff --git a/tests/models/test_connection.py b/tests/models/test_connection.py index 29eed1c5be0eb..6990940de6cf8 100644 --- a/tests/models/test_connection.py +++ b/tests/models/test_connection.py @@ -185,7 +185,7 @@ def test_connection_extra_with_encryption_rotate_fernet_key(self): ), UriTestCaseConfig( test_conn_uri='scheme://user:password@host%2Flocation:1234/schema?' - '__extra__=%7B%22my_val%22%3A+%5B%22list%22%2C+%22of%22%2C+%22values%22%5D%2C+%22extra%22%3A+%7B%22nested%22%3A+%7B%22json%22%3A+%22val%22%7D%7D%7D', # noqa: E501 # pylint: disable=C0301 + '__extra__=%7B%22my_val%22%3A+%5B%22list%22%2C+%22of%22%2C+%22values%22%5D%2C+%22extra%22%3A+%7B%22nested%22%3A+%7B%22json%22%3A+%22val%22%7D%7D%7D', # noqa: E501 test_conn_attributes=dict( conn_type='scheme', host='host/location', @@ -350,7 +350,6 @@ def test_connection_extra_with_encryption_rotate_fernet_key(self): ), ] - # pylint: disable=undefined-variable @parameterized.expand([(x,) for x in test_from_uri_params], UriTestCaseConfig.uri_test_name) def test_connection_from_uri(self, test_config: UriTestCaseConfig): @@ -373,7 +372,6 @@ def test_connection_from_uri(self, test_config: UriTestCaseConfig): self.mask_secret.assert_has_calls(expected_calls) - # pylint: disable=undefined-variable @parameterized.expand([(x,) for x in test_from_uri_params], UriTestCaseConfig.uri_test_name) def test_connection_get_uri_from_uri(self, test_config: UriTestCaseConfig): """ @@ -395,7 +393,6 @@ def test_connection_get_uri_from_uri(self, test_config: UriTestCaseConfig): assert connection.schema == new_conn.schema assert connection.extra_dejson == new_conn.extra_dejson - # pylint: disable=undefined-variable @parameterized.expand([(x,) for x in test_from_uri_params], UriTestCaseConfig.uri_test_name) def test_connection_get_uri_from_conn(self, test_config: UriTestCaseConfig): """ diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index bd9fdb7155d62..939baf4931cc6 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -482,7 +482,7 @@ def jinja_udf(name): jinja_env = dag.get_template_env() assert 'hello' in jinja_env.filters - assert jinja_env.filters['hello'] == jinja_udf # pylint: disable=comparison-with-callable + assert jinja_env.filters['hello'] == jinja_udf def test_resolve_template_files_value(self): @@ -908,7 +908,6 @@ def test_dag_is_deactivated_upon_dagfile_deletion(self): orm_dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).one() assert not orm_dag.is_active - # pylint: disable=no-member session.execute(DagModel.__table__.delete().where(DagModel.dag_id == dag_id)) session.close() @@ -1166,7 +1165,7 @@ class DAGsubclass(DAG): dag_.last_loaded = dag.last_loaded # test identity equality - assert dag == dag # pylint: disable=comparison-with-itself + assert dag == dag # test dag (in)equality based on _comps assert dag_eq == dag @@ -1868,7 +1867,7 @@ def return_num(num): # Test that if arg is not passed it raises a type error as expected. with pytest.raises(TypeError): - noop_pipeline() # pylint: disable=no-value-for-parameter + noop_pipeline() def test_dag_param_resolves(self): """Test that dag param is correctly resolved by operator""" @@ -1880,7 +1879,7 @@ def return_num(num): return num xcom_arg = return_num(value) - self.operator = xcom_arg.operator # pylint: disable=maybe-no-member + self.operator = xcom_arg.operator dag = xcom_pass_to_op() @@ -1907,7 +1906,7 @@ def return_num(num): assert isinstance(value, DagParam) xcom_arg = return_num(value) - self.operator = xcom_arg.operator # pylint: disable=maybe-no-member + self.operator = xcom_arg.operator dag = xcom_pass_to_op() new_value = 52 @@ -1933,7 +1932,7 @@ def return_num(num): return num xcom_arg = return_num(value) - self.operator = xcom_arg.operator # pylint: disable=maybe-no-member + self.operator = xcom_arg.operator dag = xcom_pass_to_op() assert dag.params['value'] == self.VALUE diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py index 0c52c49c99247..b4edc0c7a4f0e 100644 --- a/tests/models/test_dagbag.py +++ b/tests/models/test_dagbag.py @@ -149,7 +149,7 @@ def create_dag(): def my_flow(): pass - my_dag = my_flow() # noqa # pylint: disable=unused-variable + my_dag = my_flow() # noqa source_lines = [line[12:] for line in inspect.getsource(create_dag).splitlines(keepends=True)[1:]] with NamedTemporaryFile("w+", encoding="utf8") as tf_1, NamedTemporaryFile( @@ -380,7 +380,7 @@ def validate_dags(self, expected_parent_dag, actual_found_dags, actual_dagbag, s def test_load_subdags(self): # Define Dag to load def standard_subdag(): - import datetime # pylint: disable=redefined-outer-name,reimported + import datetime from airflow.models import DAG from airflow.operators.dummy import DummyOperator @@ -431,7 +431,7 @@ def subdag_1(): # Define Dag to load def nested_subdags(): - import datetime # pylint: disable=redefined-outer-name,reimported + import datetime from airflow.models import DAG from airflow.operators.dummy import DummyOperator @@ -521,7 +521,7 @@ def test_skip_cycle_dags(self): # Define Dag to load def basic_cycle(): - import datetime # pylint: disable=redefined-outer-name,reimported + import datetime from airflow.models import DAG from airflow.operators.dummy import DummyOperator @@ -551,7 +551,7 @@ def basic_cycle(): # Define Dag to load def nested_subdag_cycle(): - import datetime # pylint: disable=redefined-outer-name,reimported + import datetime from airflow.models import DAG from airflow.operators.dummy import DummyOperator diff --git a/tests/models/test_dagparam.py b/tests/models/test_dagparam.py index 618eb888530e1..0d9ffd6fa870e 100644 --- a/tests/models/test_dagparam.py +++ b/tests/models/test_dagparam.py @@ -60,7 +60,6 @@ def return_num(num): state=State.RUNNING, ) - # pylint: disable=maybe-no-member xcom_arg.operator.run(start_date=self.DEFAULT_DATE, end_date=self.DEFAULT_DATE) ti = dr.get_task_instances()[0] @@ -87,7 +86,6 @@ def return_num(num): conf={'value': new_value}, ) - # pylint: disable=maybe-no-member xcom_arg.operator.run(start_date=self.DEFAULT_DATE, end_date=self.DEFAULT_DATE) ti = dr.get_task_instances()[0] @@ -113,7 +111,6 @@ def return_num(num): state=State.RUNNING, ) - # pylint: disable=maybe-no-member xcom_arg.operator.run(start_date=self.DEFAULT_DATE, end_date=self.DEFAULT_DATE) ti = dr.get_task_instances()[0] diff --git a/tests/models/test_pool.py b/tests/models/test_pool.py index 7981e230f2af7..60137dd96973a 100644 --- a/tests/models/test_pool.py +++ b/tests/models/test_pool.py @@ -59,10 +59,10 @@ def test_open_slots(self): session.commit() session.close() - assert 3 == pool.open_slots() # pylint: disable=no-value-for-parameter - assert 1 == pool.running_slots() # pylint: disable=no-value-for-parameter - assert 1 == pool.queued_slots() # pylint: disable=no-value-for-parameter - assert 2 == pool.occupied_slots() # pylint: disable=no-value-for-parameter + assert 3 == pool.open_slots() + assert 1 == pool.running_slots() + assert 1 == pool.queued_slots() + assert 2 == pool.occupied_slots() assert { "default_pool": { "open": 128, @@ -98,10 +98,10 @@ def test_infinite_slots(self): session.commit() session.close() - assert float('inf') == pool.open_slots() # pylint: disable=no-value-for-parameter - assert 1 == pool.running_slots() # pylint: disable=no-value-for-parameter - assert 1 == pool.queued_slots() # pylint: disable=no-value-for-parameter - assert 2 == pool.occupied_slots() # pylint: disable=no-value-for-parameter + assert float('inf') == pool.open_slots() + assert 1 == pool.running_slots() + assert 1 == pool.queued_slots() + assert 2 == pool.occupied_slots() assert { "default_pool": { "open": 128, diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index 35c00fe8368cf..b6cd8b8ff0b29 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -80,7 +80,7 @@ def wrap_task_instance(self, ti): self.task_state_in_callback = "" self.callback_ran = False - def success_handler(self, context): # pylint: disable=unused-argument + def success_handler(self, context): self.callback_ran = True session = settings.Session() temp_instance = ( @@ -737,7 +737,7 @@ def run_ti_and_assert( assert ti.start_date == expected_start_date assert ti.end_date == expected_end_date assert ti.duration == expected_duration - trs = TaskReschedule.find_for_task_instance(ti) # pylint: disable=no-value-for-parameter + trs = TaskReschedule.find_for_task_instance(ti) assert len(trs) == expected_task_reschedule_count date1 = timezone.utcnow() @@ -838,7 +838,7 @@ def run_ti_and_assert( assert ti.start_date == expected_start_date assert ti.end_date == expected_end_date assert ti.duration == expected_duration - trs = TaskReschedule.find_for_task_instance(ti) # pylint: disable=no-value-for-parameter + trs = TaskReschedule.find_for_task_instance(ti) assert len(trs) == expected_task_reschedule_count date1 = timezone.utcnow() @@ -852,7 +852,7 @@ def run_ti_and_assert( assert ti.state == State.NONE assert ti._try_number == 0 # Check that reschedules for ti have also been cleared. - trs = TaskReschedule.find_for_task_instance(ti) # pylint: disable=no-value-for-parameter + trs = TaskReschedule.find_for_task_instance(ti) assert not trs def test_depends_on_past(self): @@ -958,7 +958,7 @@ def test_check_task_dependencies( run_date = task.start_date + datetime.timedelta(days=5) ti = TI(downstream, run_date) - dep_results = TriggerRuleDep()._evaluate_trigger_rule( # pylint: disable=no-value-for-parameter + dep_results = TriggerRuleDep()._evaluate_trigger_rule( ti=ti, successes=successes, skipped=skipped, diff --git a/tests/models/test_xcom.py b/tests/models/test_xcom.py index 0339edbb90fec..4cf87aa4ad5ea 100644 --- a/tests/models/test_xcom.py +++ b/tests/models/test_xcom.py @@ -247,7 +247,7 @@ def test_xcom_get_many(self): @mock.patch("airflow.models.xcom.XCom.orm_deserialize_value") def test_xcom_init_on_load_uses_orm_deserialize_value(self, mock_orm_deserialize): - # pylint: disable=unexpected-keyword-arg + instance = BaseXCom( key="key", value="value", @@ -256,6 +256,6 @@ def test_xcom_init_on_load_uses_orm_deserialize_value(self, mock_orm_deserialize task_id="task_id", dag_id="dag_id", ) - # pylint: enable=unexpected-keyword-arg + instance.init_on_load() mock_orm_deserialize.assert_called_once_with() diff --git a/tests/operators/test_email.py b/tests/operators/test_email.py index 465cddefccb1a..5419796fbadbf 100644 --- a/tests/operators/test_email.py +++ b/tests/operators/test_email.py @@ -59,5 +59,5 @@ def test_execute(self): with conf_vars({('email', 'email_backend'): 'tests.operators.test_email.send_email_test'}): self._run_as_operator() assert send_email_test.call_count == 1 - resulting_files = send_email_test.call_args[1]['files'] # pylint: disable=unsubscriptable-object + resulting_files = send_email_test.call_args[1]['files'] assert resulting_files[0] == '/tmp/Report-A-2016-01-01.csv' diff --git a/tests/operators/test_python.py b/tests/operators/test_python.py index eae84b1de7f6a..3b1ad920076e3 100644 --- a/tests/operators/test_python.py +++ b/tests/operators/test_python.py @@ -789,7 +789,7 @@ def f(): def test_no_system_site_packages(self): def f(): try: - import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + import funcsigs # noqa: F401 except ImportError: return True raise Exception @@ -798,13 +798,13 @@ def f(): def test_system_site_packages(self): def f(): - import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + import funcsigs # noqa: F401 self._run_as_operator(f, requirements=['funcsigs'], system_site_packages=True) def test_with_requirements_pinned(self): def f(): - import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported + import funcsigs if funcsigs.__version__ != '0.4': raise Exception @@ -813,13 +813,13 @@ def f(): def test_unpinned_requirements(self): def f(): - import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + import funcsigs # noqa: F401 self._run_as_operator(f, requirements=['funcsigs', 'dill'], system_site_packages=False) def test_range_requirements(self): def f(): - import funcsigs # noqa: F401 # pylint: disable=redefined-outer-name,reimported,unused-import + import funcsigs # noqa: F401 self._run_as_operator(f, requirements=['funcsigs>1.0', 'dill'], system_site_packages=False) @@ -832,24 +832,24 @@ def f(): def test_python_2(self): def f(): - {}.iteritems() # pylint: disable=no-member + {}.iteritems() self._run_as_operator(f, python_version=2, requirements=['dill']) def test_python_2_7(self): def f(): - {}.iteritems() # pylint: disable=no-member + {}.iteritems() return True self._run_as_operator(f, python_version='2.7', requirements=['dill']) def test_python_3(self): def f(): - import sys # pylint: disable=reimported,unused-import,redefined-outer-name + import sys print(sys.version) try: - {}.iteritems() # pylint: disable=no-member + {}.iteritems() except AttributeError: return raise Exception @@ -883,7 +883,7 @@ def f(a): def test_string_args(self): def f(): - global virtualenv_string_args # pylint: disable=global-statement + global virtualenv_string_args print(virtualenv_string_args) if virtualenv_string_args[0] != virtualenv_string_args[2]: raise Exception @@ -956,7 +956,7 @@ def f( task, # other **context, - ): # pylint: disable=unused-argument,too-many-arguments,too-many-locals + ): pass self._run_as_operator(f, use_dill=True, system_site_packages=True, requirements=None) @@ -990,7 +990,7 @@ def f( prev_start_date_success, # other **context, - ): # pylint: disable=unused-argument,too-many-arguments,too-many-locals + ): pass self._run_as_operator( @@ -1020,7 +1020,7 @@ def f( yesterday_ds_nodash, # other **context, - ): # pylint: disable=unused-argument,too-many-arguments,too-many-locals + ): pass self._run_as_operator(f, use_dill=True, system_site_packages=False, requirements=None) @@ -1084,7 +1084,7 @@ def test_nested_context(self): new_context = {"ContextId": i} # Like 15 nested with statements ctx_obj = set_current_context(new_context) - ctx_obj.__enter__() # pylint: disable=E1101 + ctx_obj.__enter__() ctx_list.append(ctx_obj) for i in reversed(range(max_stack_depth)): # Iterate over contexts in reverse order - stack is LIFO diff --git a/tests/operators/test_sql.py b/tests/operators/test_sql.py index 623b667c1ab21..3706fd6d4a0c1 100644 --- a/tests/operators/test_sql.py +++ b/tests/operators/test_sql.py @@ -72,7 +72,6 @@ return_value=Connection(conn_id='sql_default', conn_type='postgres'), ) class TestSQLCheckOperatorDbHook: - # pylint: disable=attribute-defined-outside-init def setup_method(self): self.task_id = "test_task" self.conn_id = "sql_default" diff --git a/tests/operators/test_weekday.py b/tests/operators/test_weekday.py index 1a16518ccb6af..a1ba54edb6393 100644 --- a/tests/operators/test_weekday.py +++ b/tests/operators/test_weekday.py @@ -198,7 +198,7 @@ def test_branch_follow_false(self): def test_branch_with_no_weekday(self): """Check if BranchDayOfWeekOperator raises exception on missing weekday""" with self.assertRaises(AirflowException): - BranchDayOfWeekOperator( # pylint: disable=missing-kwoa + BranchDayOfWeekOperator( task_id="make_choice", follow_task_ids_if_true="branch_1", follow_task_ids_if_false="branch_2", diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py index 23640a8367312..778f2b78f579f 100644 --- a/tests/plugins/test_plugins_manager.py +++ b/tests/plugins/test_plugins_manager.py @@ -64,7 +64,7 @@ class TestPluginsRBAC: @pytest.fixture(autouse=True) def _set_attrs(self, app): self.app = app - self.appbuilder = app.appbuilder # pylint: disable=no-member + self.appbuilder = app.appbuilder def test_flaskappbuilder_views(self): from tests.plugins.test_plugin import v_appbuilder_package @@ -135,7 +135,7 @@ class AirflowNoMenuViewsPlugin(AirflowPlugin): appbuilder_class_name = str(v_nomenu_appbuilder_package['view'].__class__.__name__) with mock_plugin_manager(plugins=[AirflowNoMenuViewsPlugin()]): - appbuilder = application.create_app(testing=True).appbuilder # pylint: disable=no-member + appbuilder = application.create_app(testing=True).appbuilder plugin_views = [view for view in appbuilder.baseviews if view.blueprint.name == appbuilder_class_name] diff --git a/tests/providers/amazon/aws/hooks/conftest.py b/tests/providers/amazon/aws/hooks/conftest.py index 7932c5dd2c5fc..77c948511177e 100644 --- a/tests/providers/amazon/aws/hooks/conftest.py +++ b/tests/providers/amazon/aws/hooks/conftest.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=redefined-outer-name import boto3 import pytest diff --git a/tests/providers/amazon/aws/hooks/test_base_aws.py b/tests/providers/amazon/aws/hooks/test_base_aws.py index 383880d1fae8c..c5343ad04bafb 100644 --- a/tests/providers/amazon/aws/hooks/test_base_aws.py +++ b/tests/providers/amazon/aws/hooks/test_base_aws.py @@ -55,7 +55,7 @@ def test_get_resource_type_returns_a_boto3_resource_of_the_requested_type(self): resource_from_hook = hook.get_resource_type('dynamodb') # this table needs to be created in production - table = resource_from_hook.create_table( # pylint: disable=no-member + table = resource_from_hook.create_table( TableName='test_airflow', KeySchema=[ {'AttributeName': 'id', 'KeyType': 'HASH'}, @@ -74,7 +74,7 @@ def test_get_session_returns_a_boto3_session(self): hook = AwsBaseHook(aws_conn_id='aws_default', resource_type='dynamodb') session_from_hook = hook.get_session() resource_from_session = session_from_hook.resource('dynamodb') - table = resource_from_session.create_table( # pylint: disable=no-member + table = resource_from_session.create_table( TableName='test_airflow', KeySchema=[ {'AttributeName': 'id', 'KeyType': 'HASH'}, @@ -290,7 +290,7 @@ def __call__(self): return True -def _always_true_predicate(e: Exception): # pylint: disable=unused-argument +def _always_true_predicate(e: Exception): return True @@ -299,7 +299,7 @@ def _retryable_test(thing): return thing() -def _always_false_predicate(e: Exception): # pylint: disable=unused-argument +def _always_false_predicate(e: Exception): return False diff --git a/tests/providers/amazon/aws/hooks/test_batch_client.py b/tests/providers/amazon/aws/hooks/test_batch_client.py index 6f331fd8d2902..dc931426b599f 100644 --- a/tests/providers/amazon/aws/hooks/test_batch_client.py +++ b/tests/providers/amazon/aws/hooks/test_batch_client.py @@ -16,7 +16,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=missing-docstring import unittest from unittest import mock diff --git a/tests/providers/amazon/aws/hooks/test_batch_waiters.py b/tests/providers/amazon/aws/hooks/test_batch_waiters.py index b852c2e9f140c..31c12d3794b42 100644 --- a/tests/providers/amazon/aws/hooks/test_batch_waiters.py +++ b/tests/providers/amazon/aws/hooks/test_batch_waiters.py @@ -16,8 +16,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=missing-docstring, redefined-outer-name - """ Test AwsBatchWaiters diff --git a/tests/providers/amazon/aws/hooks/test_s3.py b/tests/providers/amazon/aws/hooks/test_s3.py index b56d4fca7bf1e..2e435f2bb84bd 100644 --- a/tests/providers/amazon/aws/hooks/test_s3.py +++ b/tests/providers/amazon/aws/hooks/test_s3.py @@ -234,13 +234,13 @@ def test_get_wildcard_key(self, s3_bucket): def test_load_string(self, s3_bucket): hook = S3Hook() hook.load_string("Contént", "my_key", s3_bucket) - resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member + resource = boto3.resource('s3').Object(s3_bucket, 'my_key') assert resource.get()['Body'].read() == b'Cont\xC3\xA9nt' def test_load_string_compress(self, s3_bucket): hook = S3Hook() hook.load_string("Contént", "my_key", s3_bucket, compression='gzip') - resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member + resource = boto3.resource('s3').Object(s3_bucket, 'my_key') data = gz.decompress(resource.get()['Body'].read()) assert data == b'Cont\xC3\xA9nt' @@ -260,7 +260,7 @@ def test_load_string_acl(self, s3_bucket): def test_load_bytes(self, s3_bucket): hook = S3Hook() hook.load_bytes(b"Content", "my_key", s3_bucket) - resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member + resource = boto3.resource('s3').Object(s3_bucket, 'my_key') assert resource.get()['Body'].read() == b'Content' def test_load_bytes_acl(self, s3_bucket): @@ -277,7 +277,7 @@ def test_load_fileobj(self, s3_bucket): temp_file.write(b"Content") temp_file.seek(0) hook.load_file_obj(temp_file, "my_key", s3_bucket) - resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member + resource = boto3.resource('s3').Object(s3_bucket, 'my_key') assert resource.get()['Body'].read() == b'Content' def test_load_fileobj_acl(self, s3_bucket): @@ -288,7 +288,7 @@ def test_load_fileobj_acl(self, s3_bucket): hook.load_file_obj(temp_file, "my_key", s3_bucket, acl_policy='public-read') response = boto3.client('s3').get_object_acl( Bucket=s3_bucket, Key="my_key", RequestPayer='requester' - ) # pylint: disable=no-member # noqa: E501 # pylint: disable=C0301 + ) assert (response['Grants'][1]['Permission'] == 'READ') and ( response['Grants'][0]['Permission'] == 'FULL_CONTROL' ) @@ -299,7 +299,7 @@ def test_load_file_gzip(self, s3_bucket): temp_file.write(b"Content") temp_file.seek(0) hook.load_file(temp_file.name, "my_key", s3_bucket, gzip=True) - resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member + resource = boto3.resource('s3').Object(s3_bucket, 'my_key') assert gz.decompress(resource.get()['Body'].read()) == b'Content' os.unlink(temp_file.name) @@ -311,7 +311,7 @@ def test_load_file_acl(self, s3_bucket): hook.load_file(temp_file.name, "my_key", s3_bucket, gzip=True, acl_policy='public-read') response = boto3.client('s3').get_object_acl( Bucket=s3_bucket, Key="my_key", RequestPayer='requester' - ) # pylint: disable=no-member # noqa: E501 # pylint: disable=C0301 + ) assert (response['Grants'][1]['Permission'] == 'READ') and ( response['Grants'][0]['Permission'] == 'FULL_CONTROL' ) @@ -326,7 +326,7 @@ def test_copy_object_acl(self, s3_bucket): hook.copy_object("my_key", "my_key", s3_bucket, s3_bucket) response = boto3.client('s3').get_object_acl( Bucket=s3_bucket, Key="my_key", RequestPayer='requester' - ) # pylint: disable=no-member # noqa: E501 # pylint: disable=C0301 + ) assert (response['Grants'][0]['Permission'] == 'FULL_CONTROL') and (len(response['Grants']) == 1) @mock_s3 @@ -461,7 +461,7 @@ def test_should_pass_extra_args(self, s3_bucket): temp_file.write(b"Content") temp_file.seek(0) hook.load_file_obj(temp_file, "my_key", s3_bucket, acl_policy='public-read') - resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member + resource = boto3.resource('s3').Object(s3_bucket, 'my_key') assert resource.get()['ContentLanguage'] == "value" @mock_s3 diff --git a/tests/providers/amazon/aws/log/test_s3_task_handler.py b/tests/providers/amazon/aws/log/test_s3_task_handler.py index 067a248b3f635..323bef4f4e636 100644 --- a/tests/providers/amazon/aws/log/test_s3_task_handler.py +++ b/tests/providers/amazon/aws/log/test_s3_task_handler.py @@ -75,7 +75,7 @@ def tearDown(self): if self.s3_task_handler.handler: try: os.remove(self.s3_task_handler.handler.baseFilename) - except Exception: # pylint: disable=broad-except + except Exception: pass def test_hook(self): @@ -179,24 +179,14 @@ def test_write(self): self.s3_task_handler.s3_write('text', self.remote_log_location) # We shouldn't expect any error logs in the default working case. mock_error.assert_not_called() - body = ( - boto3.resource('s3') - .Object('bucket', self.remote_log_key) # pylint: disable=no-member - .get()['Body'] - .read() - ) + body = boto3.resource('s3').Object('bucket', self.remote_log_key).get()['Body'].read() assert body == b'text' def test_write_existing(self): self.conn.put_object(Bucket='bucket', Key=self.remote_log_key, Body=b'previous ') self.s3_task_handler.s3_write('text', self.remote_log_location) - body = ( - boto3.resource('s3') - .Object('bucket', self.remote_log_key) # pylint: disable=no-member - .get()['Body'] - .read() - ) + body = boto3.resource('s3').Object('bucket', self.remote_log_key).get()['Body'].read() assert body == b'previous \ntext' @@ -213,7 +203,7 @@ def test_close(self): self.s3_task_handler.close() # Should not raise - boto3.resource('s3').Object('bucket', self.remote_log_key).get() # pylint: disable=no-member + boto3.resource('s3').Object('bucket', self.remote_log_key).get() def test_close_no_upload(self): self.ti.raw = True @@ -222,4 +212,4 @@ def test_close_no_upload(self): self.s3_task_handler.close() with pytest.raises(ClientError): - boto3.resource('s3').Object('bucket', self.remote_log_key).get() # pylint: disable=no-member + boto3.resource('s3').Object('bucket', self.remote_log_key).get() diff --git a/tests/providers/amazon/aws/operators/test_athena.py b/tests/providers/amazon/aws/operators/test_athena.py index c1dd723447192..263db769cced0 100644 --- a/tests/providers/amazon/aws/operators/test_athena.py +++ b/tests/providers/amazon/aws/operators/test_athena.py @@ -43,7 +43,6 @@ result_configuration = {'OutputLocation': MOCK_DATA['outputLocation']} -# pylint: disable=unused-argument class TestAWSAthenaOperator(unittest.TestCase): def setUp(self): args = { @@ -211,6 +210,3 @@ def test_xcom_push_and_pull(self, mock_conn, mock_run_query, mock_check_query_st ti.run() assert ti.xcom_pull(task_ids='test_aws_athena_operator') == ATHENA_QUERY_ID - - -# pylint: enable=unused-argument diff --git a/tests/providers/amazon/aws/operators/test_batch.py b/tests/providers/amazon/aws/operators/test_batch.py index 2c5b9c83e715d..af3e0d7a1afd8 100644 --- a/tests/providers/amazon/aws/operators/test_batch.py +++ b/tests/providers/amazon/aws/operators/test_batch.py @@ -17,7 +17,6 @@ # under the License. # -# pylint: disable=missing-docstring import unittest from unittest import mock diff --git a/tests/providers/amazon/aws/operators/test_ecs.py b/tests/providers/amazon/aws/operators/test_ecs.py index 96717c37fe174..3dffb2ecd9fd9 100644 --- a/tests/providers/amazon/aws/operators/test_ecs.py +++ b/tests/providers/amazon/aws/operators/test_ecs.py @@ -79,7 +79,7 @@ def set_up_operator(self, aws_hook_mock, **kwargs): self.ecs.get_hook() def setUp(self): - self.set_up_operator() # pylint: disable=no-value-for-parameter + self.set_up_operator() def test_init(self): assert self.ecs.region_name == 'eu-west-1' @@ -106,7 +106,7 @@ def test_template_fields_overrides(self): @mock.patch.object(ECSOperator, '_check_success_task') def test_execute_without_failures(self, launch_type, tags, check_mock, wait_mock): - self.set_up_operator(launch_type=launch_type, tags=tags) # pylint: disable=no-value-for-parameter + self.set_up_operator(launch_type=launch_type, tags=tags) client_mock = self.aws_hook_mock.return_value.get_conn.return_value client_mock.run_task.return_value = RESPONSE_WITHOUT_FAILURES @@ -238,16 +238,16 @@ def test_host_terminated_raises(self): 'stoppedReason': 'Host EC2 (instance i-1234567890abcdef) terminated.', "containers": [ { - "containerArn": "arn:aws:ecs:us-east-1:012345678910:container/e1ed7aac-d9b2-4315-8726-d2432bf11868", # noqa: E501 # pylint: disable=line-too-long + "containerArn": "arn:aws:ecs:us-east-1:012345678910:container/e1ed7aac-d9b2-4315-8726-d2432bf11868", # noqa: E501 "lastStatus": "RUNNING", "name": "wordpress", - "taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55", # noqa: E501 # pylint: disable=line-too-long + "taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55", # noqa: E501 } ], "desiredStatus": "STOPPED", "lastStatus": "STOPPED", - "taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55", # noqa: E501 # pylint: disable=line-too-long - "taskDefinitionArn": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:11", # noqa: E501 # pylint: disable=line-too-long + "taskArn": "arn:aws:ecs:us-east-1:012345678910:task/d8c67b3c-ac87-4ffe-a847-4785bc3a8b55", # noqa: E501 + "taskDefinitionArn": "arn:aws:ecs:us-east-1:012345678910:task-definition/hello_world:11", # noqa: E501 } ] } @@ -283,7 +283,7 @@ def test_check_success_task_not_raises(self): @mock.patch.object(ECSOperator, '_start_task') def test_reattach_successful(self, launch_type, tags, start_mock, check_mock, wait_mock): - self.set_up_operator(launch_type=launch_type, tags=tags) # pylint: disable=no-value-for-parameter + self.set_up_operator(launch_type=launch_type, tags=tags) client_mock = self.aws_hook_mock.return_value.get_conn.return_value client_mock.describe_task_definition.return_value = {'taskDefinition': {'family': 'f'}} client_mock.list_tasks.return_value = { diff --git a/tests/providers/amazon/aws/operators/test_glacier_system.py b/tests/providers/amazon/aws/operators/test_glacier_system.py index 99a5315504b25..c9a237ee978be 100644 --- a/tests/providers/amazon/aws/operators/test_glacier_system.py +++ b/tests/providers/amazon/aws/operators/test_glacier_system.py @@ -31,7 +31,7 @@ def setUp(self): GoogleSystemTest.create_gcs_bucket(BUCKET) def tearDown(self): - GoogleSystemTest.delete_gcs_bucket(BUCKET) # pylint: disable=no-member + GoogleSystemTest.delete_gcs_bucket(BUCKET) def test_run_example_dag(self): self.run_dag(dag_id="example_glacier_to_gcs", dag_folder=AWS_DAG_FOLDER) diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_training.py b/tests/providers/amazon/aws/operators/test_sagemaker_training.py index 4aeca8c65e077..f7847b5609eda 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_training.py +++ b/tests/providers/amazon/aws/operators/test_sagemaker_training.py @@ -61,7 +61,6 @@ } -# pylint: disable=unused-argument class TestSageMakerTrainingOperator(unittest.TestCase): def setUp(self): self.sagemaker = SageMakerTrainingOperator( diff --git a/tests/providers/amazon/aws/sensors/test_s3_key.py b/tests/providers/amazon/aws/sensors/test_s3_key.py index 32e168cc97c78..a675bdf5325fc 100644 --- a/tests/providers/amazon/aws/sensors/test_s3_key.py +++ b/tests/providers/amazon/aws/sensors/test_s3_key.py @@ -158,7 +158,7 @@ def test_poke(self, paginate_return_value, poke_return_value, mock_hook): mock_paginator = mock.Mock() mock_paginator.paginate.return_value = [] mock_conn = mock.Mock() - # pylint: disable=no-member + mock_conn.return_value.get_paginator.return_value = mock_paginator mock_hook.return_value.get_conn = mock_conn mock_paginator.paginate.return_value = [paginate_return_value] diff --git a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py index 63246d6b5c84a..2ba0ff6377b9d 100644 --- a/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py @@ -27,7 +27,7 @@ class DynamodbToS3Test(unittest.TestCase): def setUp(self): self.output_queue = [] - def mock_upload_file(self, Filename, Bucket, Key): # pylint: disable=unused-argument,invalid-name + def mock_upload_file(self, Filename, Bucket, Key): with open(Filename) as f: lines = f.readlines() for line in lines: diff --git a/tests/providers/apache/beam/hooks/test_beam.py b/tests/providers/apache/beam/hooks/test_beam.py index d0d713e1129e8..5945e8f2cf6f5 100644 --- a/tests/providers/apache/beam/hooks/test_beam.py +++ b/tests/providers/apache/beam/hooks/test_beam.py @@ -60,7 +60,7 @@ def test_start_python_pipeline(self, mock_runner): wait_for_done = mock_runner.return_value.wait_for_done process_line_callback = MagicMock() - hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + hook.start_python_pipeline( variables=copy.deepcopy(BEAM_VARIABLES_PY), py_file=PY_FILE, py_options=PY_OPTIONS, @@ -92,7 +92,7 @@ def test_start_python_pipeline_with_custom_interpreter(self, _, py_interpreter, wait_for_done = mock_runner.return_value.wait_for_done process_line_callback = MagicMock() - hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + hook.start_python_pipeline( variables=copy.deepcopy(BEAM_VARIABLES_PY), py_file=PY_FILE, py_options=PY_OPTIONS, @@ -128,7 +128,7 @@ def test_start_python_pipeline_with_non_empty_py_requirements_and_without_system mock_virtualenv.return_value = '/dummy_dir/bin/python' process_line_callback = MagicMock() - hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + hook.start_python_pipeline( variables=copy.deepcopy(BEAM_VARIABLES_PY), py_file=PY_FILE, py_options=PY_OPTIONS, @@ -161,7 +161,7 @@ def test_start_python_pipeline_with_empty_py_requirements_and_without_system_pac process_line_callback = MagicMock() with self.assertRaisesRegex(AirflowException, "Invalid method invocation."): - hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + hook.start_python_pipeline( variables=copy.deepcopy(BEAM_VARIABLES_PY), py_file=PY_FILE, py_options=PY_OPTIONS, @@ -178,7 +178,7 @@ def test_start_java_pipeline(self, mock_runner): wait_for_done = mock_runner.return_value.wait_for_done process_line_callback = MagicMock() - hook.start_java_pipeline( # pylint: disable=no-value-for-parameter + hook.start_java_pipeline( jar=JAR_FILE, variables=copy.deepcopy(BEAM_VARIABLES_JAVA), process_line_callback=process_line_callback, @@ -201,7 +201,7 @@ def test_start_java_pipeline_with_job_class(self, mock_runner): wait_for_done = mock_runner.return_value.wait_for_done process_line_callback = MagicMock() - hook.start_java_pipeline( # pylint: disable=no-value-for-parameter + hook.start_java_pipeline( jar=JAR_FILE, variables=copy.deepcopy(BEAM_VARIABLES_JAVA), job_class=JOB_CLASS, diff --git a/tests/providers/apache/cassandra/hooks/test_cassandra.py b/tests/providers/apache/cassandra/hooks/test_cassandra.py index 0a3889bf174f4..8b588e737b948 100644 --- a/tests/providers/apache/cassandra/hooks/test_cassandra.py +++ b/tests/providers/apache/cassandra/hooks/test_cassandra.py @@ -163,7 +163,7 @@ def _assert_get_lb_policy( assert isinstance(policy, expected_policy_type) if expected_child_policy_type: assert isinstance(policy._child_policy, expected_child_policy_type) - except Exception: # pylint: disable=broad-except + except Exception: thrown = True assert should_throw == thrown diff --git a/tests/providers/apache/druid/hooks/test_druid.py b/tests/providers/apache/druid/hooks/test_druid.py index fca5d064cfa53..b3637cf67493a 100644 --- a/tests/providers/apache/druid/hooks/test_druid.py +++ b/tests/providers/apache/druid/hooks/test_druid.py @@ -242,7 +242,7 @@ def test_get_pandas_df(self): df = self.db_hook().get_pandas_df(statement) assert column == df.columns[0] - for i in range(len(result_sets)): # pylint: disable=consider-using-enumerate + for i in range(len(result_sets)): assert result_sets[i][0] == df.values.tolist()[i][0] assert self.conn.close.call_count == 1 assert self.cur.close.call_count == 1 diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/tests/providers/apache/hive/hooks/test_hive.py index 94ff8b7240683..fe2f0045009a9 100644 --- a/tests/providers/apache/hive/hooks/test_hive.py +++ b/tests/providers/apache/hive/hooks/test_hive.py @@ -483,7 +483,7 @@ def test_get_tables(self): # static_babynames_partitioned self.hook.metastore.__enter__().get_tables.assert_called_with( db_name='airflow', pattern='static_babynames_partitioned*' ) - # pylint: disable=no-member + self.hook.metastore.__enter__().get_table_objects_by_name.assert_called_with( 'airflow', ['static_babynames_partitioned'] ) diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py index 881ea137465e3..3daee5082a73d 100644 --- a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-member + import unittest from collections import OrderedDict @@ -49,19 +49,19 @@ def setUp(self): self.kwargs = dict(sql='sql', hive_table='table', task_id='test_mssql_to_hive', dag=None) def test_type_map_binary(self): - # pylint: disable=c-extension-no-member, no-member + mapped_type = MsSqlToHiveOperator(**self.kwargs).type_map(pymssql.BINARY.value) assert mapped_type == 'INT' def test_type_map_decimal(self): - # pylint: disable=c-extension-no-member, no-member + mapped_type = MsSqlToHiveOperator(**self.kwargs).type_map(pymssql.DECIMAL.value) assert mapped_type == 'FLOAT' def test_type_map_number(self): - # pylint: disable=c-extension-no-member, no-member + mapped_type = MsSqlToHiveOperator(**self.kwargs).type_map(pymssql.NUMBER.value) assert mapped_type == 'INT' diff --git a/tests/providers/apache/livy/hooks/test_livy.py b/tests/providers/apache/livy/hooks/test_livy.py index 86b7acaa6fc39..d3ebe8a1c0d7f 100644 --- a/tests/providers/apache/livy/hooks/test_livy.py +++ b/tests/providers/apache/livy/hooks/test_livy.py @@ -93,7 +93,7 @@ def test_build_body(self): driver_cores=2, driver_memory='1M', executor_memory='1m', - executor_cores='1', # noqa + executor_cores='1', num_executors='10', ) @@ -154,10 +154,10 @@ def test_validate_size_format(self): with self.subTest('numeric'): with pytest.raises(ValueError): - LivyHook._validate_size_format(1) # noqa + LivyHook._validate_size_format(1) with self.subTest('None'): - assert LivyHook._validate_size_format(None) # noqa + assert LivyHook._validate_size_format(None) def test_validate_list_of_stringables(self): with self.subTest('valid list'): @@ -192,11 +192,11 @@ def test_validate_list_of_stringables(self): with self.subTest('None'): with pytest.raises(ValueError): - LivyHook._validate_list_of_stringables(None) # noqa + LivyHook._validate_list_of_stringables(None) with self.subTest('int'): with pytest.raises(ValueError): - LivyHook._validate_list_of_stringables(1) # noqa + LivyHook._validate_list_of_stringables(1) with self.subTest('string'): with pytest.raises(ValueError): @@ -217,17 +217,17 @@ def test_validate_extra_conf(self): with self.subTest('none'): try: - LivyHook._validate_extra_conf(None) # noqa + LivyHook._validate_extra_conf(None) except ValueError: self.fail("Exception raised") with self.subTest('not a dict 1'): with pytest.raises(ValueError): - LivyHook._validate_extra_conf('k1=v1') # noqa + LivyHook._validate_extra_conf('k1=v1') with self.subTest('not a dict 2'): with pytest.raises(ValueError): - LivyHook._validate_extra_conf([('k1', 'v1'), ('k2', 0)]) # noqa + LivyHook._validate_extra_conf([('k1', 'v1'), ('k2', 0)]) with self.subTest('nested dict'): with pytest.raises(ValueError): @@ -441,7 +441,7 @@ def test_check_session_id(self): with self.subTest('None'): with pytest.raises(TypeError): - LivyHook._validate_session_id(None) # noqa + LivyHook._validate_session_id(None) with self.subTest('random string'): with pytest.raises(TypeError): diff --git a/tests/providers/apache/pinot/hooks/test_pinot.py b/tests/providers/apache/pinot/hooks/test_pinot.py index 763704fc73c13..5a26d35f550aa 100644 --- a/tests/providers/apache/pinot/hooks/test_pinot.py +++ b/tests/providers/apache/pinot/hooks/test_pinot.py @@ -263,7 +263,7 @@ def test_get_pandas_df(self): self.cur.fetchall.return_value = result_sets df = self.db_hook().get_pandas_df(statement) assert column == df.columns[0] - for i in range(len(result_sets)): # pylint: disable=consider-using-enumerate + for i in range(len(result_sets)): assert result_sets[i][0] == df.values.tolist()[i][0] diff --git a/tests/providers/asana/hooks/test_asana.py b/tests/providers/asana/hooks/test_asana.py index 220b2395a0d3f..c0b4307ebd7f2 100644 --- a/tests/providers/asana/hooks/test_asana.py +++ b/tests/providers/asana/hooks/test_asana.py @@ -61,9 +61,7 @@ def test_merge_create_task_parameters_default_project(self): with patch.object(AsanaHook, "get_connection", return_value=conn): hook = AsanaHook() expected_merged_params = {"name": "test", "projects": ["1"]} - self.assertEqual( - expected_merged_params, hook._merge_create_task_parameters("test", {}) # pylint: disable=W0212 - ) + self.assertEqual(expected_merged_params, hook._merge_create_task_parameters("test", {})) def test_merge_create_task_parameters_specified_project(self): """ @@ -77,7 +75,7 @@ def test_merge_create_task_parameters_specified_project(self): expected_merged_params = {"name": "test", "projects": ["1", "2"]} self.assertEqual( expected_merged_params, - hook._merge_create_task_parameters("test", {"projects": ["1", "2"]}), # pylint: disable=W0212 + hook._merge_create_task_parameters("test", {"projects": ["1", "2"]}), ) def test_merge_create_task_parameters_specified_workspace(self): @@ -90,9 +88,7 @@ def test_merge_create_task_parameters_specified_workspace(self): with patch.object(AsanaHook, "get_connection", return_value=conn): hook = AsanaHook() expected_merged_params = {"name": "test", "workspace": "1"} - self.assertEqual( - expected_merged_params, hook._merge_create_task_parameters("test", {}) # pylint: disable=W0212 - ) + self.assertEqual(expected_merged_params, hook._merge_create_task_parameters("test", {})) def test_merge_create_task_parameters_default_project_overrides_default_workspace(self): """ @@ -108,9 +104,7 @@ def test_merge_create_task_parameters_default_project_overrides_default_workspac with patch.object(AsanaHook, "get_connection", return_value=conn): hook = AsanaHook() expected_merged_params = {"name": "test", "projects": ["1"]} - self.assertEqual( - expected_merged_params, hook._merge_create_task_parameters("test", {}) # pylint: disable=W0212 - ) + self.assertEqual(expected_merged_params, hook._merge_create_task_parameters("test", {})) def test_merge_create_task_parameters_specified_project_overrides_default_workspace(self): """ @@ -128,7 +122,7 @@ def test_merge_create_task_parameters_specified_project_overrides_default_worksp expected_merged_params = {"name": "test", "projects": ["2"]} self.assertEqual( expected_merged_params, - hook._merge_create_task_parameters("test", {"projects": ["2"]}), # pylint: disable=W0212 + hook._merge_create_task_parameters("test", {"projects": ["2"]}), ) def test_merge_find_task_parameters_default_project(self): @@ -141,9 +135,7 @@ def test_merge_find_task_parameters_default_project(self): with patch.object(AsanaHook, "get_connection", return_value=conn): hook = AsanaHook() expected_merged_params = {"project": "1"} - self.assertEqual( - expected_merged_params, hook._merge_find_task_parameters({}) # pylint: disable=W0212 - ) + self.assertEqual(expected_merged_params, hook._merge_find_task_parameters({})) def test_merge_find_task_parameters_specified_project(self): """ @@ -157,7 +149,7 @@ def test_merge_find_task_parameters_specified_project(self): expected_merged_params = {"project": "2"} self.assertEqual( expected_merged_params, - hook._merge_find_task_parameters({"project": "2"}), # pylint: disable=W0212 + hook._merge_find_task_parameters({"project": "2"}), ) def test_merge_find_task_parameters_default_workspace(self): @@ -172,7 +164,7 @@ def test_merge_find_task_parameters_default_workspace(self): expected_merged_params = {"workspace": "1", "assignee": "1"} self.assertEqual( expected_merged_params, - hook._merge_find_task_parameters({"assignee": "1"}), # pylint: disable=W0212 + hook._merge_find_task_parameters({"assignee": "1"}), ) def test_merge_find_task_parameters_specified_workspace(self): @@ -187,7 +179,7 @@ def test_merge_find_task_parameters_specified_workspace(self): expected_merged_params = {"workspace": "2", "assignee": "1"} self.assertEqual( expected_merged_params, - hook._merge_find_task_parameters({"workspace": "2", "assignee": "1"}), # pylint: disable=W0212 + hook._merge_find_task_parameters({"workspace": "2", "assignee": "1"}), ) def test_merge_find_task_parameters_default_project_overrides_workspace(self): @@ -203,9 +195,7 @@ def test_merge_find_task_parameters_default_project_overrides_workspace(self): with patch.object(AsanaHook, "get_connection", return_value=conn): hook = AsanaHook() expected_merged_params = {"project": "1"} - self.assertEqual( - expected_merged_params, hook._merge_find_task_parameters({}) # pylint: disable=W0212 - ) + self.assertEqual(expected_merged_params, hook._merge_find_task_parameters({})) def test_merge_find_task_parameters_specified_project_overrides_workspace(self): """ @@ -223,7 +213,7 @@ def test_merge_find_task_parameters_specified_project_overrides_workspace(self): expected_merged_params = {"project": "2"} self.assertEqual( expected_merged_params, - hook._merge_find_task_parameters({"project": "2"}), # pylint: disable=W0212 + hook._merge_find_task_parameters({"project": "2"}), ) def test_merge_project_parameters(self): @@ -235,9 +225,7 @@ def test_merge_project_parameters(self): with patch.object(AsanaHook, "get_connection", return_value=conn): hook = AsanaHook() expected_merged_params = {"workspace": "1", "name": "name"} - self.assertEqual( - expected_merged_params, hook._merge_project_parameters({"name": "name"}) # pylint: disable=W0212 - ) + self.assertEqual(expected_merged_params, hook._merge_project_parameters({"name": "name"})) def test_merge_project_parameters_override(self): """ @@ -250,5 +238,5 @@ def test_merge_project_parameters_override(self): expected_merged_params = {"workspace": "2"} self.assertEqual( expected_merged_params, - hook._merge_project_parameters({"workspace": "2"}), # pylint: disable=W0212 + hook._merge_project_parameters({"workspace": "2"}), ) diff --git a/tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py b/tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py index cbd0080ae8fc1..4b7937dffca8c 100644 --- a/tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py +++ b/tests/providers/cncf/kubernetes/operators/test_kubernetes_pod.py @@ -1,4 +1,3 @@ -# pylint: disable=unused-argument # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/tests/providers/docker/hooks/test_docker.py b/tests/providers/docker/hooks/test_docker.py index da4995c24d6da..2fe08b23d82f2 100644 --- a/tests/providers/docker/hooks/test_docker.py +++ b/tests/providers/docker/hooks/test_docker.py @@ -82,7 +82,7 @@ def test_get_conn_with_standard_config(self, _): ) client = hook.get_conn() assert client is not None - except Exception: # pylint: disable=broad-except + except Exception: self.fail('Could not get connection from Airflow') def test_get_conn_with_extra_config(self, _): @@ -92,7 +92,7 @@ def test_get_conn_with_extra_config(self, _): ) client = hook.get_conn() assert client is not None - except Exception: # pylint: disable=broad-except + except Exception: self.fail('Could not get connection from Airflow') def test_conn_with_standard_config_passes_parameters(self, _): @@ -100,7 +100,7 @@ def test_conn_with_standard_config_passes_parameters(self, _): docker_conn_id='docker_default', base_url='unix://var/run/docker.sock', version='auto' ) client = hook.get_conn() - client.login.assert_called_once_with( # pylint: disable=no-member + client.login.assert_called_once_with( username='some_user', password='some_p4$$w0rd', registry='some.docker.registry.com', @@ -113,7 +113,7 @@ def test_conn_with_extra_config_passes_parameters(self, _): docker_conn_id='docker_with_extras', base_url='unix://var/run/docker.sock', version='auto' ) client = hook.get_conn() - client.login.assert_called_once_with( # pylint: disable=no-member + client.login.assert_called_once_with( username='some_user', password='some_p4$$w0rd', registry='another.docker.registry.com:9876', diff --git a/tests/providers/docker/operators/test_docker.py b/tests/providers/docker/operators/test_docker.py index a2442b4b1d155..84dd9c1ca3818 100644 --- a/tests/providers/docker/operators/test_docker.py +++ b/tests/providers/docker/operators/test_docker.py @@ -158,7 +158,7 @@ def test_execute_tls(self, tls_class_mock): def test_execute_unicode_logs(self): self.client_mock.attach.return_value = ['unicode container log 😁'] - originalRaiseExceptions = logging.raiseExceptions # pylint: disable=invalid-name + originalRaiseExceptions = logging.raiseExceptions logging.raiseExceptions = True operator = DockerOperator(image='ubuntu', owner='unittest', task_id='unittest') diff --git a/tests/providers/elasticsearch/hooks/test_elasticsearch.py b/tests/providers/elasticsearch/hooks/test_elasticsearch.py index 06057b057535a..8c1251221ddce 100644 --- a/tests/providers/elasticsearch/hooks/test_elasticsearch.py +++ b/tests/providers/elasticsearch/hooks/test_elasticsearch.py @@ -39,7 +39,7 @@ class UnitTestElasticsearchHook(ElasticsearchHook): @mock.patch('airflow.providers.elasticsearch.hooks.elasticsearch.connect') def test_get_conn(self, mock_connect): - self.db_hook.test_conn_id = 'non_default' # pylint: disable=attribute-defined-outside-init + self.db_hook.test_conn_id = 'non_default' self.db_hook.get_conn() mock_connect.assert_called_with(host='localhost', port=9200, scheme='http', user=None, password=None) diff --git a/tests/providers/elasticsearch/log/elasticmock/__init__.py b/tests/providers/elasticsearch/log/elasticmock/__init__.py index c2a9080ab3a92..e60eef3d230a0 100644 --- a/tests/providers/elasticsearch/log/elasticmock/__init__.py +++ b/tests/providers/elasticsearch/log/elasticmock/__init__.py @@ -49,7 +49,7 @@ ELASTIC_INSTANCES = {} # type: Dict[str, FakeElasticsearch] -def _get_elasticmock(hosts=None, *args, **kwargs): # pylint: disable=unused-argument +def _get_elasticmock(hosts=None, *args, **kwargs): host = _normalize_hosts(hosts)[0] elastic_key = f"{host.get('host', 'localhost')}:{host.get('port', 9200)}" diff --git a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py index cf81a1a79e259..d8498c540775a 100644 --- a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py +++ b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py @@ -47,7 +47,6 @@ from .utilities import get_random_id -# pylint: disable=redefined-builtin,unused-argument class FakeElasticsearch(Elasticsearch): __documents_dict = None @@ -332,7 +331,7 @@ def suggest(self, body, index=None): ] return result_dict - def _find_match(self, index, doc_type, body): # pylint: disable=unused-argument + def _find_match(self, index, doc_type, body): searchable_indexes = self._normalize_index_to_list(index) searchable_doc_types = self._normalize_doc_type_to_list(doc_type) @@ -396,6 +395,3 @@ def _normalize_doc_type_to_list(doc_type): raise ValueError("Invalid param 'index'") return searchable_doc_types - - -# pylint: enable=redefined-builtin diff --git a/tests/providers/elasticsearch/log/test_es_task_handler.py b/tests/providers/elasticsearch/log/test_es_task_handler.py index 40257221d12e4..9a5ec3b0ef1c8 100644 --- a/tests/providers/elasticsearch/log/test_es_task_handler.py +++ b/tests/providers/elasticsearch/log/test_es_task_handler.py @@ -38,7 +38,7 @@ from .elasticmock import elasticmock -class TestElasticsearchTaskHandler(unittest.TestCase): # pylint: disable=too-many-instance-attributes +class TestElasticsearchTaskHandler(unittest.TestCase): DAG_ID = 'dag_for_testing_file_task_handler' TASK_ID = 'task_for_testing_file_log_handler' EXECUTION_DATE = datetime(2016, 1, 1) @@ -68,9 +68,7 @@ def setUp(self): self.offset_field, ) - self.es = elasticsearch.Elasticsearch( # pylint: disable=invalid-name - hosts=[{'host': 'localhost', 'port': 9200}] - ) + self.es = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}]) self.index_name = 'test_index' self.doc_type = 'log' self.test_message = 'some random stuff' diff --git a/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py b/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py index f28b0d2393f0e..4e4b5d87e351f 100644 --- a/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py +++ b/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py @@ -1,4 +1,3 @@ -# pylint: disable=no-member # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -21,7 +20,7 @@ from google.api_core.exceptions import NotFound, PermissionDenied from google.cloud.secretmanager_v1.types import AccessSecretVersionResponse -from airflow.providers.google.cloud._internal_client.secret_manager_client import _SecretManagerClient # noqa +from airflow.providers.google.cloud._internal_client.secret_manager_client import _SecretManagerClient from airflow.version import version INTERNAL_CLIENT_MODULE = "airflow.providers.google.cloud._internal_client.secret_manager_client" diff --git a/tests/providers/google/cloud/hooks/test_automl.py b/tests/providers/google/cloud/hooks/test_automl.py index c9de712b520b3..8685fa2391391 100644 --- a/tests/providers/google/cloud/hooks/test_automl.py +++ b/tests/providers/google/cloud/hooks/test_automl.py @@ -73,7 +73,7 @@ def test_get_conn(self, mock_automl_client, mock_client_info): ) @mock.patch("airflow.providers.google.cloud.hooks.automl.PredictionServiceClient") def test_prediction_client(self, mock_prediction_client, mock_client_info): - client = self.hook.prediction_client # pylint: disable=unused-variable # noqa + client = self.hook.prediction_client # noqa mock_prediction_client.assert_called_once_with(credentials=CREDENTIALS, client_info=CLIENT_INFO) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.create_model") diff --git a/tests/providers/google/cloud/hooks/test_bigquery.py b/tests/providers/google/cloud/hooks/test_bigquery.py index 79057419b0cf6..8500694e6d04d 100644 --- a/tests/providers/google/cloud/hooks/test_bigquery.py +++ b/tests/providers/google/cloud/hooks/test_bigquery.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=not-callable + import re import unittest @@ -1234,7 +1234,7 @@ def test_description(self, mock_get_service): @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_service") def test_close(self, mock_get_service): bq_cursor = self.hook.get_cursor() - result = bq_cursor.close() # pylint: disable=assignment-from-no-return + result = bq_cursor.close() assert result is None @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_service") @@ -1745,7 +1745,6 @@ def test_create_empty_table_with_kms(self, mock_bq_client, mock_table): retry=DEFAULT_RETRY, ) - # pylint: disable=too-many-locals @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_table") def test_create_external_table_with_kms(self, mock_create): external_project_dataset_table = f"{PROJECT_ID}.{DATASET_ID}.{TABLE_ID}" diff --git a/tests/providers/google/cloud/hooks/test_cloud_build.py b/tests/providers/google/cloud/hooks/test_cloud_build.py index ebf7163206757..a66ae6be2c24e 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_build.py +++ b/tests/providers/google/cloud/hooks/test_cloud_build.py @@ -129,7 +129,7 @@ def test_error_operation(self, _, get_conn_mock, mock_project_id): execute_mock = mock.Mock(**{"side_effect": [TEST_WAITING_OPERATION, TEST_ERROR_OPERATION]}) service_mock.operations.return_value.get.return_value.execute = execute_mock with pytest.raises(AirflowException, match="error"): - self.hook.create_build(body={}) # pylint: disable=no-value-for-parameter + self.hook.create_build(body={}) class TestGcpComputeHookWithDefaultProjectIdFromConnection(unittest.TestCase): @@ -171,7 +171,7 @@ def test_build_immediately_complete(self, get_conn_mock, mock_project_id): service_mock.operations.return_value.get.return_value.execute.return_value = TEST_DONE_OPERATION - result = self.hook.create_build(body={}) # pylint: disable=no-value-for-parameter + result = self.hook.create_build(body={}) service_mock.projects.return_value.builds.return_value.create.assert_called_once_with( body={}, projectId='example-project' @@ -202,7 +202,7 @@ def test_waiting_operation(self, _, get_conn_mock, mock_project_id): ) service_mock.operations.return_value.get.return_value.execute = execute_mock - result = self.hook.create_build(body={}) # pylint: disable=no-value-for-parameter + result = self.hook.create_build(body={}) assert result == TEST_BUILD @@ -223,7 +223,7 @@ def test_error_operation(self, _, get_conn_mock, mock_project_id): execute_mock = mock.Mock(**{"side_effect": [TEST_WAITING_OPERATION, TEST_ERROR_OPERATION]}) service_mock.operations.return_value.get.return_value.execute = execute_mock with pytest.raises(AirflowException, match="error"): - self.hook.create_build(body={}) # pylint: disable=no-value-for-parameter + self.hook.create_build(body={}) class TestCloudBuildHookWithoutProjectId(unittest.TestCase): @@ -254,7 +254,7 @@ def test_cloud_build_client_creation(self, mock_build, mock_authorize): @mock.patch("airflow.providers.google.cloud.hooks.cloud_build.CloudBuildHook.get_conn") def test_create_build(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException) as ctx: - self.hook.create_build(body={}) # pylint: disable=no-value-for-parameter + self.hook.create_build(body={}) assert ( "The project id must be passed either as keyword project_id parameter or as project_id extra in " diff --git a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py b/tests/providers/google/cloud/hooks/test_cloud_memorystore.py index 9e6f442236b4a..c2f9c33b50067 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py +++ b/tests/providers/google/cloud/hooks/test_cloud_memorystore.py @@ -76,7 +76,7 @@ def setUp( @mock.patch("airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreHook.get_conn") def test_create_instance_when_exists(self, mock_get_conn, mock_project_id): mock_get_conn.return_value.get_instance.return_value = Instance(name=TEST_NAME) - result = self.hook.create_instance( # pylint: disable=no-value-for-parameter + result = self.hook.create_instance( location=TEST_LOCATION, instance_id=TEST_INSTANCE_ID, instance=Instance(name=TEST_NAME), @@ -104,7 +104,7 @@ def test_create_instance_when_not_exists(self, mock_get_conn, mock_project_id): Instance(name=TEST_NAME), ] mock_get_conn.return_value.create_instance.return_value.result.return_value = Instance(name=TEST_NAME) - result = self.hook.create_instance( # pylint: disable=no-value-for-parameter + result = self.hook.create_instance( location=TEST_LOCATION, instance_id=TEST_INSTANCE_ID, instance=Instance(name=TEST_NAME), @@ -140,7 +140,7 @@ def test_create_instance_when_not_exists(self, mock_get_conn, mock_project_id): ) @mock.patch("airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreHook.get_conn") def test_delete_instance(self, mock_get_conn, mock_project_id): - self.hook.delete_instance( # pylint: disable=no-value-for-parameter + self.hook.delete_instance( location=TEST_LOCATION, instance=TEST_INSTANCE_ID, retry=TEST_RETRY, @@ -161,7 +161,7 @@ def test_delete_instance(self, mock_get_conn, mock_project_id): ) @mock.patch("airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreHook.get_conn") def test_get_instance(self, mock_get_conn, mock_project_id): - self.hook.get_instance( # pylint: disable=no-value-for-parameter + self.hook.get_instance( location=TEST_LOCATION, instance=TEST_INSTANCE_ID, retry=TEST_RETRY, @@ -182,7 +182,7 @@ def test_get_instance(self, mock_get_conn, mock_project_id): ) @mock.patch("airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreHook.get_conn") def test_list_instances(self, mock_get_conn, mock_project_id): - self.hook.list_instances( # pylint: disable=no-value-for-parameter + self.hook.list_instances( location=TEST_LOCATION, page_size=TEST_PAGE_SIZE, retry=TEST_RETRY, @@ -203,7 +203,7 @@ def test_list_instances(self, mock_get_conn, mock_project_id): ) @mock.patch("airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreHook.get_conn") def test_update_instance(self, mock_get_conn, mock_project_id): - self.hook.update_instance( # pylint: disable=no-value-for-parameter + self.hook.update_instance( update_mask=TEST_UPDATE_MASK, instance=Instance(name=TEST_NAME), location=TEST_LOCATION, @@ -437,7 +437,7 @@ def test_update_instance(self, mock_get_conn): @mock.patch("airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreHook.get_conn") def test_update_instance_without_project_id(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.update_instance( # pylint: disable=no-value-for-parameter + self.hook.update_instance( update_mask=TEST_UPDATE_MASK, instance=Instance(name=TEST_NAME), retry=TEST_RETRY, @@ -466,7 +466,7 @@ def setUp( ) def test_create_instance_when_exists(self, mock_get_conn, mock_project_id): mock_get_conn.return_value.get_instance.return_value = cloud_memcache.Instance(name=TEST_NAME) - result = self.hook.create_instance( # pylint: disable=no-value-for-parameter + result = self.hook.create_instance( location=TEST_LOCATION, instance_id=TEST_INSTANCE_ID, instance=cloud_memcache.Instance(name=TEST_NAME), @@ -495,7 +495,7 @@ def test_create_instance_when_not_exists(self, mock_get_conn, mock_project_id): mock_get_conn.return_value.create_instance.return_value.result.return_value = cloud_memcache.Instance( name=TEST_NAME ) - result = self.hook.create_instance( # pylint: disable=no-value-for-parameter + result = self.hook.create_instance( location=TEST_LOCATION, instance_id=TEST_INSTANCE_ID, instance=cloud_memcache.Instance(name=TEST_NAME), @@ -531,7 +531,7 @@ def test_create_instance_when_not_exists(self, mock_get_conn, mock_project_id): "airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreMemcachedHook.get_conn" ) def test_delete_instance(self, mock_get_conn, mock_project_id): - self.hook.delete_instance( # pylint: disable=no-value-for-parameter + self.hook.delete_instance( location=TEST_LOCATION, instance=TEST_INSTANCE_ID, retry=TEST_RETRY, @@ -551,7 +551,7 @@ def test_delete_instance(self, mock_get_conn, mock_project_id): "airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreMemcachedHook.get_conn" ) def test_get_instance(self, mock_get_conn, mock_project_id): - self.hook.get_instance( # pylint: disable=no-value-for-parameter + self.hook.get_instance( location=TEST_LOCATION, instance=TEST_INSTANCE_ID, retry=TEST_RETRY, @@ -571,7 +571,7 @@ def test_get_instance(self, mock_get_conn, mock_project_id): "airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreMemcachedHook.get_conn" ) def test_list_instances(self, mock_get_conn, mock_project_id): - self.hook.list_instances( # pylint: disable=no-value-for-parameter + self.hook.list_instances( location=TEST_LOCATION, retry=TEST_RETRY, timeout=TEST_TIMEOUT, @@ -593,7 +593,7 @@ def test_list_instances(self, mock_get_conn, mock_project_id): "airflow.providers.google.cloud.hooks.cloud_memorystore.CloudMemorystoreMemcachedHook.get_conn" ) def test_update_instance(self, mock_get_conn, mock_project_id): - self.hook.update_instance( # pylint: disable=no-value-for-parameter + self.hook.update_instance( update_mask=TEST_UPDATE_MASK_MEMCACHED, instance=cloud_memcache.Instance(name=TEST_NAME), location=TEST_LOCATION, diff --git a/tests/providers/google/cloud/hooks/test_cloud_sql.py b/tests/providers/google/cloud/hooks/test_cloud_sql.py index 9e9ec70b4db31..2ee19394cdc4f 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_sql.py +++ b/tests/providers/google/cloud/hooks/test_cloud_sql.py @@ -16,7 +16,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-lines import json import unittest @@ -54,9 +53,7 @@ def test_instance_import_exception(self, mock_get_credentials): side_effect=HttpError(resp=httplib2.Response({'status': 400}), content=b'Error content') ) with pytest.raises(AirflowException) as ctx: - self.cloudsql_hook.import_instance( # pylint: disable=no-value-for-parameter - instance='instance', body={} - ) + self.cloudsql_hook.import_instance(instance='instance', body={}) err = ctx.value assert "Importing instance " in str(err) assert 1 == mock_get_credentials.call_count @@ -70,9 +67,7 @@ def test_instance_export_exception(self, mock_get_credentials): side_effect=HttpError(resp=httplib2.Response({'status': 400}), content=b'Error content') ) with pytest.raises(HttpError) as ctx: - self.cloudsql_hook.export_instance( # pylint: disable=no-value-for-parameter - instance='instance', body={} - ) + self.cloudsql_hook.export_instance(instance='instance', body={}) err = ctx.value assert 400 == err.resp.status assert 1 == mock_get_credentials.call_count @@ -88,9 +83,7 @@ def test_instance_import(self, wait_for_operation_to_complete, get_conn, mock_ge execute_method = import_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.import_instance( # pylint: disable=no-value-for-parameter - instance='instance', body={} - ) + self.cloudsql_hook.import_instance(instance='instance', body={}) import_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) @@ -110,9 +103,7 @@ def test_instance_export(self, wait_for_operation_to_complete, get_conn, mock_ge execute_method = export_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.export_instance( # pylint: disable=no-value-for-parameter - instance='instance', body={} - ) + self.cloudsql_hook.export_instance(instance='instance', body={}) export_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) @@ -159,7 +150,7 @@ def test_get_instance(self, wait_for_operation_to_complete, get_conn, mock_get_c execute_method = get_method.return_value.execute execute_method.return_value = {"name": "instance"} wait_for_operation_to_complete.return_value = None - res = self.cloudsql_hook.get_instance(instance='instance') # pylint: disable=no-value-for-parameter + res = self.cloudsql_hook.get_instance(instance='instance') assert res is not None assert 'instance' == res['name'] get_method.assert_called_once_with(instance='instance', project='example-project') @@ -178,7 +169,7 @@ def test_create_instance(self, wait_for_operation_to_complete, get_conn, mock_ge execute_method = insert_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.create_instance(body={}) # pylint: disable=no-value-for-parameter + self.cloudsql_hook.create_instance(body={}) insert_method.assert_called_once_with(body={}, project='example-project') execute_method.assert_called_once_with(num_retries=5) @@ -212,7 +203,7 @@ def test_create_instance_with_in_progress_retry( {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.create_instance(body={}) # pylint: disable=no-value-for-parameter + self.cloudsql_hook.create_instance(body={}) assert 1 == mock_get_credentials.call_count assert 2 == insert_method.call_count @@ -246,9 +237,7 @@ def test_patch_instance_with_in_progress_retry( {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.patch_instance( # pylint: disable=no-value-for-parameter - instance='instance', body={} - ) + self.cloudsql_hook.patch_instance(instance='instance', body={}) assert 1 == mock_get_credentials.call_count assert 2 == patch_method.call_count @@ -268,9 +257,7 @@ def test_patch_instance(self, wait_for_operation_to_complete, get_conn, mock_get execute_method = patch_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.patch_instance( # pylint: disable=no-value-for-parameter - instance='instance', body={} - ) + self.cloudsql_hook.patch_instance(instance='instance', body={}) patch_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) @@ -290,7 +277,7 @@ def test_delete_instance(self, wait_for_operation_to_complete, get_conn, mock_ge execute_method = delete_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.delete_instance(instance='instance') # pylint: disable=no-value-for-parameter + self.cloudsql_hook.delete_instance(instance='instance') delete_method.assert_called_once_with(instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) @@ -324,7 +311,7 @@ def test_delete_instance_with_in_progress_retry( {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.delete_instance(instance='instance') # pylint: disable=no-value-for-parameter + self.cloudsql_hook.delete_instance(instance='instance') assert 1 == mock_get_credentials.call_count assert 2 == delete_method.call_count @@ -344,9 +331,7 @@ def test_get_database(self, wait_for_operation_to_complete, get_conn, mock_get_c execute_method = get_method.return_value.execute execute_method.return_value = {"name": "database"} wait_for_operation_to_complete.return_value = None - res = self.cloudsql_hook.get_database( # pylint: disable=no-value-for-parameter - database='database', instance='instance' - ) + res = self.cloudsql_hook.get_database(database='database', instance='instance') assert res is not None assert 'database' == res['name'] get_method.assert_called_once_with( @@ -367,9 +352,7 @@ def test_create_database(self, wait_for_operation_to_complete, get_conn, mock_ge execute_method = insert_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.create_database( # pylint: disable=no-value-for-parameter - instance='instance', body={} - ) + self.cloudsql_hook.create_database(instance='instance', body={}) insert_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) @@ -403,9 +386,7 @@ def test_create_database_with_in_progress_retry( {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.create_database( # pylint: disable=no-value-for-parameter - instance='instance', body={} - ) + self.cloudsql_hook.create_database(instance='instance', body={}) assert 1 == mock_get_credentials.call_count assert 2 == insert_method.call_count @@ -425,9 +406,7 @@ def test_patch_database(self, wait_for_operation_to_complete, get_conn, mock_get execute_method = patch_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.patch_database( # pylint: disable=no-value-for-parameter - instance='instance', database='database', body={} - ) + self.cloudsql_hook.patch_database(instance='instance', database='database', body={}) patch_method.assert_called_once_with( body={}, database='database', instance='instance', project='example-project' @@ -463,9 +442,7 @@ def test_patch_database_with_in_progress_retry( {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.patch_database( # pylint: disable=no-value-for-parameter - instance='instance', database='database', body={} - ) + self.cloudsql_hook.patch_database(instance='instance', database='database', body={}) assert 1 == mock_get_credentials.call_count assert 2 == patch_method.call_count @@ -485,9 +462,7 @@ def test_delete_database(self, wait_for_operation_to_complete, get_conn, mock_ge execute_method = delete_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.delete_database( # pylint: disable=no-value-for-parameter - instance='instance', database='database' - ) + self.cloudsql_hook.delete_database(instance='instance', database='database') delete_method.assert_called_once_with( database='database', instance='instance', project='example-project' @@ -523,9 +498,7 @@ def test_delete_database_with_in_progress_retry( {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None - self.cloudsql_hook.delete_database( # pylint: disable=no-value-for-parameter - instance='instance', database='database' - ) + self.cloudsql_hook.delete_database(instance='instance', database='database') assert 1 == mock_get_credentials.call_count assert 2 == delete_method.call_count diff --git a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py b/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py index 3b423177ee26e..f2fe78863358d 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py +++ b/tests/providers/google/cloud/hooks/test_cloud_storage_transfer_service.py @@ -132,7 +132,6 @@ def setUp(self): 'airflow.providers.google.cloud.hooks.cloud_storage_transfer_service' '.CloudDataTransferServiceHook.get_conn' ) - # pylint: disable=unused-argument def test_pass_name_on_create_job( self, get_conn: MagicMock, @@ -450,9 +449,7 @@ def test_wait_for_transfer_job_failed(self, mock_get_conn, mock_sleep, mock_proj 'airflow.providers.google.cloud.hooks.cloud_storage_transfer_service' '.CloudDataTransferServiceHook.get_conn' ) - def test_wait_for_transfer_job_expect_failed( - self, get_conn, mock_sleep, mock_project_id - ): # pylint: disable=unused-argument + def test_wait_for_transfer_job_expect_failed(self, get_conn, mock_sleep, mock_project_id): list_method = get_conn.return_value.transferOperations.return_value.list list_execute_method = list_method.return_value.execute list_execute_method.return_value = { @@ -598,9 +595,7 @@ def test_get_transfer_job(self, get_conn, mock_project_id): get_method = get_conn.return_value.transferJobs.return_value.get execute_method = get_method.return_value.execute execute_method.return_value = TEST_TRANSFER_JOB - res = self.gct_hook.get_transfer_job( # pylint: disable=no-value-for-parameter - job_name=TEST_TRANSFER_JOB_NAME - ) + res = self.gct_hook.get_transfer_job(job_name=TEST_TRANSFER_JOB_NAME) assert res is not None assert TEST_TRANSFER_JOB_NAME == res[NAME] get_method.assert_called_once_with(jobName=TEST_TRANSFER_JOB_NAME, projectId='example-project') @@ -806,9 +801,7 @@ def test_get_transfer_job(self, get_conn, mock_project_id): execute_method = get_method.return_value.execute execute_method.return_value = TEST_TRANSFER_JOB with pytest.raises(AirflowException) as ctx: - self.gct_hook.get_transfer_job( # pylint: disable=no-value-for-parameter - job_name=TEST_TRANSFER_JOB_NAME - ) + self.gct_hook.get_transfer_job(job_name=TEST_TRANSFER_JOB_NAME) assert ( 'The project id must be passed either as keyword project_id ' 'parameter or as project_id extra in Google Cloud connection definition. ' @@ -896,11 +889,9 @@ def test_update_transfer_job(self, get_conn, mock_project_id): 'airflow.providers.google.cloud.hooks.cloud_storage_transfer_service' '.CloudDataTransferServiceHook.get_conn' ) - def test_delete_transfer_job(self, get_conn, mock_project_id): # pylint: disable=unused-argument + def test_delete_transfer_job(self, get_conn, mock_project_id): with pytest.raises(AirflowException) as ctx: - self.gct_hook.delete_transfer_job( # pylint: disable=no-value-for-parameter - job_name=TEST_TRANSFER_JOB_NAME - ) + self.gct_hook.delete_transfer_job(job_name=TEST_TRANSFER_JOB_NAME) assert ( 'The project id must be passed either as keyword project_id parameter or as project_id extra in ' diff --git a/tests/providers/google/cloud/hooks/test_compute.py b/tests/providers/google/cloud/hooks/test_compute.py index 94692b1ed2ab7..2f53b4b6c89a5 100644 --- a/tests/providers/google/cloud/hooks/test_compute.py +++ b/tests/providers/google/cloud/hooks/test_compute.py @@ -16,7 +16,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-lines import unittest from unittest import mock diff --git a/tests/providers/google/cloud/hooks/test_datacatalog.py b/tests/providers/google/cloud/hooks/test_datacatalog.py index 99d785fa61637..465d4c98faf57 100644 --- a/tests/providers/google/cloud/hooks/test_datacatalog.py +++ b/tests/providers/google/cloud/hooks/test_datacatalog.py @@ -177,7 +177,7 @@ def setUp( ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_create_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.create_entry( # pylint: disable=no-value-for-parameter + self.hook.create_entry( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry_id=TEST_ENTRY_ID, @@ -203,7 +203,7 @@ def test_create_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_create_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.create_entry_group( # pylint: disable=no-value-for-parameter + self.hook.create_entry_group( location=TEST_LOCATION, entry_group_id=TEST_ENTRY_GROUP_ID, entry_group=TEST_ENTRY_GROUP, @@ -228,7 +228,7 @@ def test_create_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_create_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.create_tag( # pylint: disable=no-value-for-parameter + self.hook.create_tag( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -254,7 +254,7 @@ def test_create_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_create_tag_protobuff(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.create_tag( # pylint: disable=no-value-for-parameter + self.hook.create_tag( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -280,7 +280,7 @@ def test_create_tag_protobuff(self, mock_get_conn, mock_get_creds_and_project_id ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_create_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.create_tag_template( # pylint: disable=no-value-for-parameter + self.hook.create_tag_template( location=TEST_LOCATION, tag_template_id=TEST_TAG_TEMPLATE_ID, tag_template=TEST_TAG_TEMPLATE, @@ -305,7 +305,7 @@ def test_create_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_create_tag_template_field(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.create_tag_template_field( # pylint: disable=no-value-for-parameter + self.hook.create_tag_template_field( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, @@ -331,7 +331,7 @@ def test_create_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_delete_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.delete_entry( # pylint: disable=no-value-for-parameter + self.hook.delete_entry( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -354,7 +354,7 @@ def test_delete_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_delete_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.delete_entry_group( # pylint: disable=no-value-for-parameter + self.hook.delete_entry_group( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, retry=TEST_RETRY, @@ -376,7 +376,7 @@ def test_delete_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_delete_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.delete_tag( # pylint: disable=no-value-for-parameter + self.hook.delete_tag( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -400,7 +400,7 @@ def test_delete_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_delete_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.delete_tag_template( # pylint: disable=no-value-for-parameter + self.hook.delete_tag_template( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, force=TEST_FORCE, @@ -421,7 +421,7 @@ def test_delete_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_delete_tag_template_field(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.delete_tag_template_field( # pylint: disable=no-value-for-parameter + self.hook.delete_tag_template_field( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, field=TEST_TAG_TEMPLATE_FIELD_ID, @@ -446,7 +446,7 @@ def test_delete_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_get_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.get_entry( # pylint: disable=no-value-for-parameter + self.hook.get_entry( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -469,7 +469,7 @@ def test_get_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_get_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.get_entry_group( # pylint: disable=no-value-for-parameter + self.hook.get_entry_group( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, read_mask=TEST_READ_MASK, @@ -493,7 +493,7 @@ def test_get_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_get_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.get_tag_template( # pylint: disable=no-value-for-parameter + self.hook.get_tag_template( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, retry=TEST_RETRY, @@ -515,7 +515,7 @@ def test_get_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_list_tags(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.list_tags( # pylint: disable=no-value-for-parameter + self.hook.list_tags( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -544,7 +544,7 @@ def test_get_tag_for_template_name(self, mock_get_conn, mock_get_creds_and_proje tag_2 = mock.MagicMock(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)) mock_get_conn.return_value.list_tags.return_value = [tag_1, tag_2] - result = self.hook.get_tag_for_template_name( # pylint: disable=no-value-for-parameter + result = self.hook.get_tag_for_template_name( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -570,7 +570,7 @@ def test_get_tag_for_template_name(self, mock_get_conn, mock_get_creds_and_proje ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_rename_tag_template_field(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.rename_tag_template_field( # pylint: disable=no-value-for-parameter + self.hook.rename_tag_template_field( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, field=TEST_TAG_TEMPLATE_FIELD_ID, @@ -595,7 +595,7 @@ def test_rename_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_update_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.update_entry( # pylint: disable=no-value-for-parameter + self.hook.update_entry( entry=TEST_ENTRY, update_mask=TEST_UPDATE_MASK, location=TEST_LOCATION, @@ -621,7 +621,7 @@ def test_update_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_update_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.update_tag( # pylint: disable=no-value-for-parameter + self.hook.update_tag( tag=deepcopy(TEST_TAG), update_mask=TEST_UPDATE_MASK, location=TEST_LOCATION, @@ -645,7 +645,7 @@ def test_update_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_update_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.update_tag_template( # pylint: disable=no-value-for-parameter + self.hook.update_tag_template( tag_template=TEST_TAG_TEMPLATE, update_mask=TEST_UPDATE_MASK, location=TEST_LOCATION, @@ -670,7 +670,7 @@ def test_update_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) ) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_update_tag_template_field(self, mock_get_conn, mock_get_creds_and_project_id) -> None: - self.hook.update_tag_template_field( # pylint: disable=no-value-for-parameter + self.hook.update_tag_template_field( tag_template_field=TEST_TAG_TEMPLATE_FIELD, update_mask=TEST_UPDATE_MASK, tag_template=TEST_TAG_TEMPLATE_ID, @@ -1247,7 +1247,7 @@ def setUp( @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_create_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.create_entry( # pylint: disable=no-value-for-parameter + self.hook.create_entry( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry_id=TEST_ENTRY_ID, @@ -1264,7 +1264,7 @@ def test_create_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_create_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.create_entry_group( # pylint: disable=no-value-for-parameter + self.hook.create_entry_group( location=TEST_LOCATION, entry_group_id=TEST_ENTRY_GROUP_ID, entry_group=TEST_ENTRY_GROUP, @@ -1281,7 +1281,7 @@ def test_create_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) def test_create_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.create_tag( # pylint: disable=no-value-for-parameter + self.hook.create_tag( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -1300,7 +1300,7 @@ def test_create_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: def test_create_tag_protobuff(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.create_tag( # pylint: disable=no-value-for-parameter + self.hook.create_tag( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -1319,7 +1319,7 @@ def test_create_tag_protobuff(self, mock_get_conn, mock_get_creds_and_project_id def test_create_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.create_tag_template( # pylint: disable=no-value-for-parameter + self.hook.create_tag_template( location=TEST_LOCATION, tag_template_id=TEST_TAG_TEMPLATE_ID, tag_template=TEST_TAG_TEMPLATE, @@ -1336,7 +1336,7 @@ def test_create_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) def test_create_tag_template_field(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.create_tag_template_field( # pylint: disable=no-value-for-parameter + self.hook.create_tag_template_field( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, @@ -1354,7 +1354,7 @@ def test_create_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje def test_delete_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.delete_entry( # pylint: disable=no-value-for-parameter + self.hook.delete_entry( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -1370,7 +1370,7 @@ def test_delete_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_delete_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.delete_entry_group( # pylint: disable=no-value-for-parameter + self.hook.delete_entry_group( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, retry=TEST_RETRY, @@ -1385,7 +1385,7 @@ def test_delete_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_delete_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.delete_tag( # pylint: disable=no-value-for-parameter + self.hook.delete_tag( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -1402,7 +1402,7 @@ def test_delete_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_delete_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.delete_tag_template( # pylint: disable=no-value-for-parameter + self.hook.delete_tag_template( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, force=TEST_FORCE, @@ -1418,7 +1418,7 @@ def test_delete_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_delete_tag_template_field(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.delete_tag_template_field( # pylint: disable=no-value-for-parameter + self.hook.delete_tag_template_field( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, field=TEST_TAG_TEMPLATE_FIELD_ID, @@ -1435,7 +1435,7 @@ def test_delete_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_get_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.get_entry( # pylint: disable=no-value-for-parameter + self.hook.get_entry( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -1451,7 +1451,7 @@ def test_get_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_get_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.get_entry_group( # pylint: disable=no-value-for-parameter + self.hook.get_entry_group( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, read_mask=TEST_READ_MASK, @@ -1467,7 +1467,7 @@ def test_get_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_get_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.get_tag_template( # pylint: disable=no-value-for-parameter + self.hook.get_tag_template( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, retry=TEST_RETRY, @@ -1482,7 +1482,7 @@ def test_get_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_list_tags(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.list_tags( # pylint: disable=no-value-for-parameter + self.hook.list_tags( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -1503,7 +1503,7 @@ def test_get_tag_for_template_name(self, mock_get_conn, mock_get_creds_and_proje mock_get_conn.return_value.list_tags.return_value = [tag_1, tag_2] with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.get_tag_for_template_name( # pylint: disable=no-value-for-parameter + self.hook.get_tag_for_template_name( location=TEST_LOCATION, entry_group=TEST_ENTRY_GROUP_ID, entry=TEST_ENTRY_ID, @@ -1520,7 +1520,7 @@ def test_get_tag_for_template_name(self, mock_get_conn, mock_get_creds_and_proje @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_rename_tag_template_field(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.rename_tag_template_field( # pylint: disable=no-value-for-parameter + self.hook.rename_tag_template_field( location=TEST_LOCATION, tag_template=TEST_TAG_TEMPLATE_ID, field=TEST_TAG_TEMPLATE_FIELD_ID, @@ -1537,7 +1537,7 @@ def test_rename_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_update_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.update_entry( # pylint: disable=no-value-for-parameter + self.hook.update_entry( entry=TEST_ENTRY, update_mask=TEST_UPDATE_MASK, location=TEST_LOCATION, @@ -1555,7 +1555,7 @@ def test_update_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_update_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.update_tag( # pylint: disable=no-value-for-parameter + self.hook.update_tag( tag=deepcopy(TEST_TAG), update_mask=TEST_UPDATE_MASK, location=TEST_LOCATION, @@ -1574,7 +1574,7 @@ def test_update_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_update_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.update_tag_template( # pylint: disable=no-value-for-parameter + self.hook.update_tag_template( tag_template=TEST_TAG_TEMPLATE, update_mask=TEST_UPDATE_MASK, location=TEST_LOCATION, @@ -1591,7 +1591,7 @@ def test_update_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) @mock.patch("airflow.providers.google.cloud.hooks.datacatalog.CloudDataCatalogHook.get_conn") def test_update_tag_template_field(self, mock_get_conn, mock_get_creds_and_project_id) -> None: with pytest.raises(AirflowException, match=TEST_MESSAGE): - self.hook.update_tag_template_field( # pylint: disable=no-value-for-parameter + self.hook.update_tag_template_field( tag_template_field=TEST_TAG_TEMPLATE_FIELD, update_mask=TEST_UPDATE_MASK, tag_template=TEST_TAG_TEMPLATE_ID, diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py b/tests/providers/google/cloud/hooks/test_dataflow.py index 0314e5cbe3504..1ef3900664be5 100644 --- a/tests/providers/google/cloud/hooks/test_dataflow.py +++ b/tests/providers/google/cloud/hooks/test_dataflow.py @@ -179,7 +179,7 @@ def mock_init( gcp_conn_id, delegate_to=None, impersonation_chain=None, -): # pylint: disable=unused-argument +): pass @@ -209,7 +209,7 @@ def test_start_python_dataflow(self, mock_callback_on_job_id, mock_dataflow_wait job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_python_dataflow( job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_PY, dataflow=PY_FILE, @@ -254,7 +254,7 @@ def test_start_python_dataflow_with_custom_region_as_variable( passed_variables["region"] = TEST_LOCATION with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_python_dataflow( job_name=JOB_NAME, variables=passed_variables, dataflow=PY_FILE, @@ -298,7 +298,7 @@ def test_start_python_dataflow_with_custom_region_as_parameter( passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_python_dataflow( job_name=JOB_NAME, variables=passed_variables, dataflow=PY_FILE, @@ -344,7 +344,7 @@ def test_start_python_dataflow_with_multiple_extra_packages( passed_variables['extra-package'] = ['a.whl', 'b.whl'] with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_python_dataflow( job_name=JOB_NAME, variables=passed_variables, dataflow=PY_FILE, @@ -394,7 +394,7 @@ def test_start_python_dataflow_with_custom_interpreter( job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_python_dataflow( job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_PY, dataflow=PY_FILE, @@ -447,7 +447,7 @@ def test_start_python_dataflow_with_non_empty_py_requirements_and_without_system job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_python_dataflow( job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_PY, dataflow=PY_FILE, @@ -489,7 +489,7 @@ def test_start_python_dataflow_with_empty_py_requirements_and_without_system_pac with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"), self.assertRaisesRegex( AirflowException, "Invalid method invocation." ): - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_python_dataflow( job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_PY, dataflow=PY_FILE, @@ -511,7 +511,7 @@ def test_start_java_dataflow(self, mock_callback_on_job_id, mock_dataflow_wait_f job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_java_dataflow( job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_JAVA, jar=JAR_FILE, @@ -551,7 +551,7 @@ def test_start_java_dataflow_with_multiple_values_in_variables( passed_variables['mock-option'] = ['a.whl', 'b.whl'] with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_java_dataflow( job_name=JOB_NAME, variables=passed_variables, jar=JAR_FILE, @@ -591,7 +591,7 @@ def test_start_java_dataflow_with_custom_region_as_variable( passed_variables['region'] = TEST_LOCATION with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_java_dataflow( job_name=JOB_NAME, variables=passed_variables, jar=JAR_FILE, @@ -628,7 +628,7 @@ def test_start_java_dataflow_with_custom_region_as_parameter( job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): - self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_java_dataflow( job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_JAVA, jar=JAR_FILE, @@ -806,7 +806,7 @@ def test_start_template_dataflow(self, mock_conn, mock_controller, mock_uuid): ) launch_method.return_value.execute.return_value = {"job": {"id": TEST_JOB_ID}} variables = {'zone': 'us-central1-f', 'tempLocation': 'gs://test/temp'} - self.dataflow_hook.start_template_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_template_dataflow( job_name=JOB_NAME, variables=copy.deepcopy(variables), parameters=PARAMETERS, @@ -848,7 +848,7 @@ def test_start_template_dataflow_with_custom_region_as_variable( mock_conn.return_value.projects.return_value.locations.return_value.templates.return_value.launch ) launch_method.return_value.execute.return_value = {"job": {"id": TEST_JOB_ID}} - self.dataflow_hook.start_template_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_template_dataflow( job_name=JOB_NAME, variables={'region': TEST_LOCATION}, parameters=PARAMETERS, @@ -887,7 +887,7 @@ def test_start_template_dataflow_with_custom_region_as_parameter( ) launch_method.return_value.execute.return_value = {"job": {"id": TEST_JOB_ID}} - self.dataflow_hook.start_template_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_template_dataflow( job_name=JOB_NAME, variables={}, parameters=PARAMETERS, @@ -932,7 +932,7 @@ def test_start_template_dataflow_with_runtime_env(self, mock_conn, mock_dataflow .launch) # fmt: on method.return_value.execute.return_value = {'job': {'id': TEST_JOB_ID}} - self.dataflow_hook.start_template_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_template_dataflow( job_name=JOB_NAME, variables=options_with_runtime_env, parameters=PARAMETERS, @@ -980,7 +980,7 @@ def test_start_template_dataflow_update_runtime_env(self, mock_conn, mock_datafl .launch) # fmt: on method.return_value.execute.return_value = {'job': {'id': TEST_JOB_ID}} - self.dataflow_hook.start_template_dataflow( # pylint: disable=no-value-for-parameter + self.dataflow_hook.start_template_dataflow( job_name=JOB_NAME, variables=options_with_runtime_env, parameters=PARAMETERS, diff --git a/tests/providers/google/cloud/hooks/test_datafusion.py b/tests/providers/google/cloud/hooks/test_datafusion.py index 294f8c5b69ef5..29199a29f7cdb 100644 --- a/tests/providers/google/cloud/hooks/test_datafusion.py +++ b/tests/providers/google/cloud/hooks/test_datafusion.py @@ -35,8 +35,6 @@ INSTANCE_URL = "http://datafusion.instance.com" RUNTIME_ARGS = {"arg1": "a", "arg2": "b"} -# pylint: disable=redefined-outer-name - @pytest.fixture def hook(): diff --git a/tests/providers/google/cloud/hooks/test_dataprep.py b/tests/providers/google/cloud/hooks/test_dataprep.py index d00e540bc10d2..bbe3fa3d93f41 100644 --- a/tests/providers/google/cloud/hooks/test_dataprep.py +++ b/tests/providers/google/cloud/hooks/test_dataprep.py @@ -63,7 +63,7 @@ def test_get_jobs_for_job_group_should_pass_after_retry(self, mock_get_request): side_effect=[mock.MagicMock(), HTTPError()], ) def test_get_jobs_for_job_group_should_not_retry_after_success(self, mock_get_request): - # pylint: disable=no-member + self.hook.get_jobs_for_job_group.retry.sleep = mock.Mock() self.hook.get_jobs_for_job_group(JOB_ID) assert mock_get_request.call_count == 1 @@ -73,7 +73,7 @@ def test_get_jobs_for_job_group_should_not_retry_after_success(self, mock_get_re side_effect=[HTTPError(), HTTPError(), HTTPError(), HTTPError(), mock.MagicMock()], ) def test_get_jobs_for_job_group_should_retry_after_four_errors(self, mock_get_request): - # pylint: disable=no-member + self.hook.get_jobs_for_job_group.retry.sleep = mock.Mock() self.hook.get_jobs_for_job_group(JOB_ID) assert mock_get_request.call_count == 5 @@ -84,7 +84,7 @@ def test_get_jobs_for_job_group_should_retry_after_four_errors(self, mock_get_re ) def test_get_jobs_for_job_group_raise_error_after_five_calls(self, mock_get_request): with pytest.raises(RetryError) as ctx: - # pylint: disable=no-member + self.hook.get_jobs_for_job_group.retry.sleep = mock.Mock() self.hook.get_jobs_for_job_group(JOB_ID) assert "HTTPError" in str(ctx.value) @@ -115,7 +115,7 @@ def test_get_job_group_should_pass_after_retry(self, mock_get_request): side_effect=[mock.MagicMock(), HTTPError()], ) def test_get_job_group_should_not_retry_after_success(self, mock_get_request): - self.hook.get_job_group.retry.sleep = mock.Mock() # pylint: disable=no-member + self.hook.get_job_group.retry.sleep = mock.Mock() self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED) assert mock_get_request.call_count == 1 @@ -130,7 +130,7 @@ def test_get_job_group_should_not_retry_after_success(self, mock_get_request): ], ) def test_get_job_group_should_retry_after_four_errors(self, mock_get_request): - self.hook.get_job_group.retry.sleep = mock.Mock() # pylint: disable=no-member + self.hook.get_job_group.retry.sleep = mock.Mock() self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED) assert mock_get_request.call_count == 5 @@ -140,7 +140,7 @@ def test_get_job_group_should_retry_after_four_errors(self, mock_get_request): ) def test_get_job_group_raise_error_after_five_calls(self, mock_get_request): with pytest.raises(RetryError) as ctx: - # pylint: disable=no-member + self.hook.get_job_group.retry.sleep = mock.Mock() self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED) assert "HTTPError" in str(ctx.value) @@ -172,7 +172,7 @@ def test_run_job_group_should_pass_after_retry(self, mock_get_request): side_effect=[mock.MagicMock(), HTTPError()], ) def test_run_job_group_should_not_retry_after_success(self, mock_get_request): - self.hook.run_job_group.retry.sleep = mock.Mock() # pylint: disable=no-member + self.hook.run_job_group.retry.sleep = mock.Mock() self.hook.run_job_group(body_request=DATA) assert mock_get_request.call_count == 1 @@ -187,7 +187,7 @@ def test_run_job_group_should_not_retry_after_success(self, mock_get_request): ], ) def test_run_job_group_should_retry_after_four_errors(self, mock_get_request): - self.hook.run_job_group.retry.sleep = mock.Mock() # pylint: disable=no-member + self.hook.run_job_group.retry.sleep = mock.Mock() self.hook.run_job_group(body_request=DATA) assert mock_get_request.call_count == 5 @@ -197,7 +197,7 @@ def test_run_job_group_should_retry_after_four_errors(self, mock_get_request): ) def test_run_job_group_raise_error_after_five_calls(self, mock_get_request): with pytest.raises(RetryError) as ctx: - # pylint: disable=no-member + self.hook.run_job_group.retry.sleep = mock.Mock() self.hook.run_job_group(body_request=DATA) assert "HTTPError" in str(ctx.value) diff --git a/tests/providers/google/cloud/hooks/test_dataproc.py b/tests/providers/google/cloud/hooks/test_dataproc.py index a37eb6c344d4a..af58497ac8a4f 100644 --- a/tests/providers/google/cloud/hooks/test_dataproc.py +++ b/tests/providers/google/cloud/hooks/test_dataproc.py @@ -20,7 +20,7 @@ from unittest import mock import pytest -from google.cloud.dataproc_v1beta2 import JobStatus # pylint: disable=no-name-in-module +from google.cloud.dataproc_v1beta2 import JobStatus from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.dataproc import DataprocHook, DataProcJobBuilder diff --git a/tests/providers/google/cloud/hooks/test_datastore.py b/tests/providers/google/cloud/hooks/test_datastore.py index 6c16773ff65da..210d44c503a07 100644 --- a/tests/providers/google/cloud/hooks/test_datastore.py +++ b/tests/providers/google/cloud/hooks/test_datastore.py @@ -34,7 +34,7 @@ def mock_init( gcp_conn_id, delegate_to=None, impersonation_chain=None, -): # pylint: disable=unused-argument +): pass @@ -82,9 +82,7 @@ def test_allocate_ids_no_project_id(self, mock_get_conn, mock_project_id): partial_keys = [] with pytest.raises(AirflowException) as ctx: - self.datastore_hook.allocate_ids( # pylint: disable=no-value-for-parameter - partial_keys=partial_keys - ) + self.datastore_hook.allocate_ids(partial_keys=partial_keys) assert "project_id" in str(ctx.value) @patch('airflow.providers.google.cloud.hooks.datastore.DatastoreHook.get_conn') @@ -113,7 +111,7 @@ def test_begin_transaction(self, mock_get_conn): def test_begin_transaction_no_project_id(self, mock_get_conn, mock_project_id): self.datastore_hook.connection = mock_get_conn.return_value with pytest.raises(AirflowException) as ctx: - self.datastore_hook.begin_transaction() # pylint: disable=no-value-for-parameter + self.datastore_hook.begin_transaction() assert "project_id" in str(ctx.value) @patch('airflow.providers.google.cloud.hooks.datastore.DatastoreHook.get_conn') @@ -142,7 +140,7 @@ def test_commit_no_project_id(self, mock_get_conn, mock_project_id): body = {'item': 'a'} with pytest.raises(AirflowException) as ctx: - self.datastore_hook.commit(body=body) # pylint: disable=no-value-for-parameter + self.datastore_hook.commit(body=body) assert "project_id" in str(ctx.value) @patch('airflow.providers.google.cloud.hooks.datastore.DatastoreHook.get_conn') @@ -180,7 +178,7 @@ def test_lookup_no_project_id(self, mock_get_conn, mock_project_id): transaction = 'transaction' with pytest.raises(AirflowException) as ctx: - self.datastore_hook.lookup( # pylint: disable=no-value-for-parameter + self.datastore_hook.lookup( keys=keys, read_consistency=read_consistency, transaction=transaction, @@ -212,7 +210,7 @@ def test_rollback_no_project_id(self, mock_get_conn, mock_project_id): transaction = 'transaction' with pytest.raises(AirflowException) as ctx: - self.datastore_hook.rollback(transaction=transaction) # pylint: disable=no-value-for-parameter + self.datastore_hook.rollback(transaction=transaction) assert "project_id" in str(ctx.value) @patch('airflow.providers.google.cloud.hooks.datastore.DatastoreHook.get_conn') @@ -241,7 +239,7 @@ def test_run_query_no_project_id(self, mock_get_conn, mock_project_id): body = {'item': 'a'} with pytest.raises(AirflowException) as ctx: - self.datastore_hook.run_query(body=body) # pylint: disable=no-value-for-parameter + self.datastore_hook.run_query(body=body) assert "project_id" in str(ctx.value) @patch('airflow.providers.google.cloud.hooks.datastore.DatastoreHook.get_conn') @@ -341,7 +339,7 @@ def test_export_to_storage_bucket_no_project_id(self, mock_get_conn, mock_projec labels = {} with pytest.raises(AirflowException) as ctx: - self.datastore_hook.export_to_storage_bucket( # pylint: disable=no-value-for-parameter + self.datastore_hook.export_to_storage_bucket( bucket=bucket, namespace=namespace, entity_filter=entity_filter, @@ -397,7 +395,7 @@ def test_import_from_storage_bucket_no_project_id(self, mock_get_conn, mock_proj labels = {} with pytest.raises(AirflowException) as ctx: - self.datastore_hook.import_from_storage_bucket( # pylint: disable=no-value-for-parameter + self.datastore_hook.import_from_storage_bucket( bucket=bucket, file=file, namespace=namespace, diff --git a/tests/providers/google/cloud/hooks/test_dlp.py b/tests/providers/google/cloud/hooks/test_dlp.py index 5aac84a75a09c..dedf6dabaff51 100644 --- a/tests/providers/google/cloud/hooks/test_dlp.py +++ b/tests/providers/google/cloud/hooks/test_dlp.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=R0904, C0111, C0302 + """ This module contains various unit tests for functions in CloudDLPHook @@ -101,7 +101,7 @@ def test_cancel_dlp_job_without_dlp_job_id(self, _): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_cancel_dlp_job_without_parent(self, _, mock_project_id): with pytest.raises(AirflowException): - self.hook.cancel_dlp_job(dlp_job_id=DLP_JOB_ID) # pylint: disable=no-value-for-parameter + self.hook.cancel_dlp_job(dlp_job_id=DLP_JOB_ID) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -172,7 +172,7 @@ def test_create_dlp_job(self, get_conn): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_create_dlp_job_without_project_id(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.create_dlp_job() # pylint: disable=no-value-for-parameter + self.hook.create_dlp_job() @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_create_dlp_job_with_wait_until_finished(self, get_conn): @@ -255,7 +255,7 @@ def test_create_job_trigger(self, get_conn): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_create_job_trigger_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.create_job_trigger() # pylint: disable=no-value-for-parameter + self.hook.create_job_trigger() @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -328,7 +328,7 @@ def test_deidentify_content(self, get_conn): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_deidentify_content_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.deidentify_content() # pylint: disable=no-value-for-parameter + self.hook.deidentify_content() @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -393,7 +393,7 @@ def test_delete_dlp_job_without_dlp_job_id(self, _): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_delete_dlp_job_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.delete_dlp_job(dlp_job_id=DLP_JOB_ID) # pylint: disable=no-value-for-parameter + self.hook.delete_dlp_job(dlp_job_id=DLP_JOB_ID) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -458,7 +458,7 @@ def test_delete_job_trigger_without_trigger_id(self, _): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_delete_job_trigger_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.delete_job_trigger(job_trigger_id=TRIGGER_ID) # pylint: disable=no-value-for-parameter + self.hook.delete_job_trigger(job_trigger_id=TRIGGER_ID) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -573,7 +573,7 @@ def test_get_dlp_job_without_dlp_job_id(self, _): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_get_dlp_job_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.get_dlp_job(dlp_job_id=DLP_JOB_ID) # pylint: disable=no-value-for-parameter + self.hook.get_dlp_job(dlp_job_id=DLP_JOB_ID) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -644,7 +644,7 @@ def test_get_job_trigger_without_trigger_id(self, _): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_get_job_trigger_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.get_job_trigger(job_trigger_id=TRIGGER_ID) # pylint: disable=no-value-for-parameter + self.hook.get_job_trigger(job_trigger_id=TRIGGER_ID) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -720,7 +720,7 @@ def test_inspect_content(self, get_conn): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_inspect_content_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.inspect_content() # pylint: disable=no-value-for-parameter + self.hook.inspect_content() @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -789,7 +789,7 @@ def test_list_dlp_jobs(self, get_conn): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_list_dlp_jobs_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.list_dlp_jobs() # pylint: disable=no-value-for-parameter + self.hook.list_dlp_jobs() @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_list_info_types(self, get_conn): @@ -867,7 +867,7 @@ def test_list_job_triggers(self, get_conn): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_list_job_triggers_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.list_job_triggers() # pylint: disable=no-value-for-parameter + self.hook.list_job_triggers() @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -937,7 +937,7 @@ def test_redact_image(self, get_conn): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_redact_image_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.redact_image() # pylint: disable=no-value-for-parameter + self.hook.redact_image() @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_reidentify_content(self, get_conn): @@ -965,7 +965,7 @@ def test_reidentify_content(self, get_conn): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_reidentify_content_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.reidentify_content() # pylint: disable=no-value-for-parameter + self.hook.reidentify_content() @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', @@ -1097,7 +1097,7 @@ def test_update_job_trigger_without_job_trigger_id(self, _): @mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn") def test_update_job_trigger_without_parent(self, mock_get_conn, mock_project_id): with pytest.raises(AirflowException): - self.hook.update_job_trigger(job_trigger_id=TRIGGER_ID) # pylint: disable=no-value-for-parameter + self.hook.update_job_trigger(job_trigger_id=TRIGGER_ID) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', diff --git a/tests/providers/google/cloud/hooks/test_functions.py b/tests/providers/google/cloud/hooks/test_functions.py index e1b59356ceb2c..36f36a0a2d5fe 100644 --- a/tests/providers/google/cloud/hooks/test_functions.py +++ b/tests/providers/google/cloud/hooks/test_functions.py @@ -186,9 +186,7 @@ def test_delete_function(self, wait_for_operation_to_complete, get_conn): execute_method = delete_method.return_value.execute wait_for_operation_to_complete.return_value = None execute_method.return_value = {"name": "operation_id"} - res = self.gcf_function_hook.delete_function( # pylint: disable=assignment-from-no-return - name=GCF_FUNCTION - ) + res = self.gcf_function_hook.delete_function(name=GCF_FUNCTION) assert res is None delete_method.assert_called_once_with(name='function') execute_method.assert_called_once_with(num_retries=5) @@ -204,9 +202,7 @@ def test_update_function(self, wait_for_operation_to_complete, get_conn): execute_method = patch_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None - res = self.gcf_function_hook.update_function( # pylint: disable=assignment-from-no-return - update_mask=['a', 'b', 'c'], name=GCF_FUNCTION, body={} - ) + res = self.gcf_function_hook.update_function(update_mask=['a', 'b', 'c'], name=GCF_FUNCTION, body={}) assert res is None patch_method.assert_called_once_with(body={}, name='function', updateMask='a,b,c') execute_method.assert_called_once_with(num_retries=5) diff --git a/tests/providers/google/cloud/hooks/test_gcs.py b/tests/providers/google/cloud/hooks/test_gcs.py index 8af62ef4762c5..7c8d94c676cac 100644 --- a/tests/providers/google/cloud/hooks/test_gcs.py +++ b/tests/providers/google/cloud/hooks/test_gcs.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-lines + import copy import io import os @@ -284,7 +284,7 @@ def test_copy(self, mock_service, mock_bucket): copy_method.return_value = destination_blob # When - response = self.gcs_hook.copy( # pylint: disable=assignment-from-no-return + response = self.gcs_hook.copy( source_bucket=source_bucket, source_object=source_object, destination_bucket=destination_bucket, @@ -369,7 +369,7 @@ def test_rewrite(self, mock_service, mock_bucket): rewrite_method.side_effect = [(None, mock.ANY, mock.ANY), (mock.ANY, mock.ANY, mock.ANY)] # When - response = self.gcs_hook.rewrite( # pylint: disable=assignment-from-no-return + response = self.gcs_hook.rewrite( source_bucket=source_bucket, source_object=source_object, destination_bucket=destination_bucket, @@ -424,9 +424,7 @@ def test_delete(self, mock_service, mock_bucket): delete_method = get_blob_method.return_value.delete delete_method.return_value = blob_to_be_deleted - response = self.gcs_hook.delete( # pylint: disable=assignment-from-no-return - bucket_name=test_bucket, object_name=test_object - ) + response = self.gcs_hook.delete(bucket_name=test_bucket, object_name=test_object) assert response is None @mock.patch(GCS_STRING.format('GCSHook.get_conn')) @@ -598,7 +596,7 @@ def test_compose(self, mock_service, mock_blob): ) @mock.patch(GCS_STRING.format('GCSHook.get_conn')) - def test_compose_with_empty_source_objects(self, mock_service): # pylint: disable=unused-argument + def test_compose_with_empty_source_objects(self, mock_service): test_bucket = 'test_bucket' test_source_objects = [] test_destination_object = 'test_object_composed' @@ -613,7 +611,7 @@ def test_compose_with_empty_source_objects(self, mock_service): # pylint: disab assert str(ctx.value) == 'source_objects cannot be empty.' @mock.patch(GCS_STRING.format('GCSHook.get_conn')) - def test_compose_without_bucket(self, mock_service): # pylint: disable=unused-argument + def test_compose_without_bucket(self, mock_service): test_bucket = None test_source_objects = ['test_object_1', 'test_object_2', 'test_object_3'] test_destination_object = 'test_object_composed' @@ -628,7 +626,7 @@ def test_compose_without_bucket(self, mock_service): # pylint: disable=unused-a assert str(ctx.value) == 'bucket_name and destination_object cannot be empty.' @mock.patch(GCS_STRING.format('GCSHook.get_conn')) - def test_compose_without_destination_object(self, mock_service): # pylint: disable=unused-argument + def test_compose_without_destination_object(self, mock_service): test_bucket = 'test_bucket' test_source_objects = ['test_object_1', 'test_object_2', 'test_object_3'] test_destination_object = None @@ -781,7 +779,7 @@ def setUp(self): self.gcs_hook = gcs.GCSHook(gcp_conn_id='test') # generate a 384KiB test file (larger than the minimum 256KiB multipart chunk size) - # pylint: disable=consider-using-with + self.testfile = tempfile.NamedTemporaryFile(delete=False) self.testfile.write(b"x" * 393216) self.testfile.flush() diff --git a/tests/providers/google/cloud/hooks/test_gdm.py b/tests/providers/google/cloud/hooks/test_gdm.py index 23b7b280d912d..48795c79214c5 100644 --- a/tests/providers/google/cloud/hooks/test_gdm.py +++ b/tests/providers/google/cloud/hooks/test_gdm.py @@ -30,7 +30,7 @@ def mock_init( gcp_conn_id, delegate_to=None, impersonation_chain=None, -): # pylint: disable=unused-argument +): pass diff --git a/tests/providers/google/cloud/hooks/test_kms.py b/tests/providers/google/cloud/hooks/test_kms.py index 4de1dfbc6e91b..b9f7c1c50811a 100644 --- a/tests/providers/google/cloud/hooks/test_kms.py +++ b/tests/providers/google/cloud/hooks/test_kms.py @@ -49,7 +49,7 @@ def mock_init( gcp_conn_id, delegate_to=None, impersonation_chain=None, -): # pylint: disable=unused-argument +): pass diff --git a/tests/providers/google/cloud/hooks/test_kubernetes_engine.py b/tests/providers/google/cloud/hooks/test_kubernetes_engine.py index efbe346279fad..f42d7932fed68 100644 --- a/tests/providers/google/cloud/hooks/test_kubernetes_engine.py +++ b/tests/providers/google/cloud/hooks/test_kubernetes_engine.py @@ -115,7 +115,7 @@ def test_delete_cluster_error(self, wait_mock, convert_mock, mock_project_id): self.gke_hook._client.delete_cluster.side_effect = AirflowException('400') with pytest.raises(AirflowException): - self.gke_hook.delete_cluster(name='a-cluster') # pylint: disable=no-value-for-parameter + self.gke_hook.delete_cluster(name='a-cluster') wait_mock.assert_not_called() convert_mock.assert_not_called() @@ -189,7 +189,7 @@ def test_create_cluster_error(self, wait_mock, convert_mock): mock_cluster_proto = None with pytest.raises(AirflowException): - self.gke_hook.create_cluster(mock_cluster_proto) # pylint: disable=no-value-for-parameter + self.gke_hook.create_cluster(mock_cluster_proto) wait_mock.assert_not_called() convert_mock.assert_not_called() diff --git a/tests/providers/google/cloud/hooks/test_life_sciences.py b/tests/providers/google/cloud/hooks/test_life_sciences.py index a071802333403..fb1165e9fc6df 100644 --- a/tests/providers/google/cloud/hooks/test_life_sciences.py +++ b/tests/providers/google/cloud/hooks/test_life_sciences.py @@ -90,10 +90,10 @@ def test_run_pipeline_immediately_complete(self, get_conn_mock, mock_project_id) .get.return_value \ .execute.return_value = TEST_DONE_OPERATION - result = self.hook.run_pipeline(body={}, # pylint: disable=no-value-for-parameter + result = self.hook.run_pipeline(body={}, location=TEST_LOCATION) parent = self.hook. \ - _location_path(location=TEST_LOCATION) # pylint: disable=no-value-for-parameter + _location_path(location=TEST_LOCATION) service_mock.projects.return_value.locations.return_value \ .pipelines.return_value.run \ .assert_called_once_with(body={}, @@ -231,7 +231,6 @@ def test_waiting_operation(self, _, get_conn_mock, mock_project_id): .execute = execute_mock # fmt: on - # pylint: disable=no-value-for-parameter result = self.hook.run_pipeline(body={}, location=TEST_LOCATION) assert result == TEST_OPERATION @@ -261,7 +260,7 @@ def test_error_operation(self, _, get_conn_mock, mock_project_id): # fmt: on with pytest.raises(AirflowException, match="error"): - self.hook.run_pipeline(body={}, location=TEST_LOCATION) # pylint: disable=no-value-for-parameter + self.hook.run_pipeline(body={}, location=TEST_LOCATION) class TestLifeSciencesHookWithoutProjectId(unittest.TestCase): @@ -288,9 +287,9 @@ def test_life_science_client_creation(self, mock_build, mock_authorize): return_value=None, ) @mock.patch("airflow.providers.google.cloud.hooks.life_sciences.LifeSciencesHook.get_conn") - def test_run_pipeline(self, get_conn_mock, mock_project_id): # pylint: disable=unused-argument + def test_run_pipeline(self, get_conn_mock, mock_project_id): with pytest.raises(AirflowException) as ctx: - self.hook.run_pipeline(body={}, location=TEST_LOCATION) # pylint: disable=no-value-for-parameter + self.hook.run_pipeline(body={}, location=TEST_LOCATION) assert ( "The project id must be passed either as keyword project_id parameter or as project_id extra in " diff --git a/tests/providers/google/cloud/hooks/test_pubsub.py b/tests/providers/google/cloud/hooks/test_pubsub.py index 628d6192ec7e7..637a54a1d7c47 100644 --- a/tests/providers/google/cloud/hooks/test_pubsub.py +++ b/tests/providers/google/cloud/hooks/test_pubsub.py @@ -55,7 +55,7 @@ def mock_init( gcp_conn_id, delegate_to=None, impersonation_chain=None, -): # pylint: disable=unused-argument +): pass @@ -257,9 +257,7 @@ def test_delete_subscription_api_call_error(self, mock_service): @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) @mock.patch(PUBSUB_STRING.format('uuid4'), new_callable=mock.Mock(return_value=lambda: TEST_UUID)) - def test_create_subscription_without_subscription_name( - self, mock_uuid, mock_service - ): # noqa # pylint: disable=unused-argument,line-too-long + def test_create_subscription_without_subscription_name(self, mock_uuid, mock_service): create_method = mock_service.create_subscription expected_name = EXPANDED_SUBSCRIPTION.replace(TEST_SUBSCRIPTION, f'sub-{TEST_UUID}') diff --git a/tests/providers/google/cloud/hooks/test_secret_manager.py b/tests/providers/google/cloud/hooks/test_secret_manager.py index 1f6e9d358533f..98853fbb9cf31 100644 --- a/tests/providers/google/cloud/hooks/test_secret_manager.py +++ b/tests/providers/google/cloud/hooks/test_secret_manager.py @@ -1,4 +1,3 @@ -# pylint: disable=no-member # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file diff --git a/tests/providers/google/cloud/hooks/test_stackdriver.py b/tests/providers/google/cloud/hooks/test_stackdriver.py index eb674e41176e8..c44f8e0bca41d 100644 --- a/tests/providers/google/cloud/hooks/test_stackdriver.py +++ b/tests/providers/google/cloud/hooks/test_stackdriver.py @@ -125,7 +125,7 @@ def test_stackdriver_enable_alert_policy(self, mock_policy_client, mock_get_cred metadata=(), ) mask = FieldMask(paths=["enabled"]) - alert_policy_disabled.enabled = True # pylint: disable=no-member + alert_policy_disabled.enabled = True mock_policy_client.return_value.update_alert_policy.assert_called_once_with( request=dict(alert_policy=alert_policy_disabled, update_mask=mask), retry=DEFAULT, @@ -158,7 +158,7 @@ def test_stackdriver_disable_alert_policy(self, mock_policy_client, mock_get_cre metadata=(), ) mask = FieldMask(paths=["enabled"]) - alert_policy_enabled.enabled = False # pylint: disable=no-member + alert_policy_enabled.enabled = False mock_policy_client.return_value.update_alert_policy.assert_called_once_with( request=dict(alert_policy=alert_policy_enabled, update_mask=mask), retry=DEFAULT, @@ -206,7 +206,7 @@ def test_stackdriver_upsert_alert_policy( alert_policy_to_create.name = None alert_policy_to_create.creation_record = None alert_policy_to_create.mutation_record = None - # pylint: disable=unsubscriptable-object + alert_policy_to_create.conditions[0].name = None mock_policy_client.return_value.create_alert_policy.assert_called_once_with( request=dict( @@ -318,7 +318,7 @@ def test_stackdriver_enable_notification_channel( project_id=PROJECT_ID, ) - notification_channel_disabled.enabled = True # pylint: disable=no-member + notification_channel_disabled.enabled = True mask = FieldMask(paths=['enabled']) mock_channel_client.return_value.update_notification_channel.assert_called_once_with( request=dict(notification_channel=notification_channel_disabled, update_mask=mask), @@ -348,7 +348,7 @@ def test_stackdriver_disable_notification_channel( project_id=PROJECT_ID, ) - notification_channel_enabled.enabled = False # pylint: disable=no-member + notification_channel_enabled.enabled = False mask = FieldMask(paths=['enabled']) mock_channel_client.return_value.update_notification_channel.assert_called_once_with( request=dict(notification_channel=notification_channel_enabled, update_mask=mask), diff --git a/tests/providers/google/cloud/hooks/test_workflows.py b/tests/providers/google/cloud/hooks/test_workflows.py index 4f3d4d0b1202d..f85cc2230d93c 100644 --- a/tests/providers/google/cloud/hooks/test_workflows.py +++ b/tests/providers/google/cloud/hooks/test_workflows.py @@ -48,7 +48,7 @@ def mock_init(*args, **kwargs): class TestWorkflowsHook: def setup_method(self, _): with mock.patch(BASE_PATH.format("GoogleBaseHook.__init__"), new=mock_init): - self.hook = WorkflowsHook(gcp_conn_id="test") # pylint: disable=attribute-defined-outside-init + self.hook = WorkflowsHook(gcp_conn_id="test") @mock.patch(BASE_PATH.format("WorkflowsHook._get_credentials")) @mock.patch(BASE_PATH.format("WorkflowsHook.client_info"), new_callable=mock.PropertyMock) diff --git a/tests/providers/google/cloud/operators/test_cloud_sql.py b/tests/providers/google/cloud/operators/test_cloud_sql.py index 46a30611734bb..8eda92d88ca51 100644 --- a/tests/providers/google/cloud/operators/test_cloud_sql.py +++ b/tests/providers/google/cloud/operators/test_cloud_sql.py @@ -16,7 +16,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-lines import os import unittest diff --git a/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py b/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py index 962815fab0218..3ea08bb6df384 100644 --- a/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py +++ b/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py @@ -327,7 +327,7 @@ def test_job_create_multiple(self, aws_hook, gcp_hook): ) def test_templates(self, _): dag_id = 'test_dag_id' - # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args={'start_date': DEFAULT_DATE}) op = CloudDataTransferServiceCreateJobOperator( body={"description": "{{ dag.dag_id }}"}, @@ -376,7 +376,7 @@ def test_job_update(self, mock_hook): def test_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = CloudDataTransferServiceUpdateJobOperator( job_name='{{ dag.dag_id }}', body={'transferJob': {"name": "{{ dag.dag_id }}"}}, @@ -419,7 +419,7 @@ def test_job_delete(self, mock_hook): def test_job_delete_with_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = CloudDataTransferServiceDeleteJobOperator( job_name='{{ dag.dag_id }}', gcp_conn_id='{{ dag.dag_id }}', @@ -474,7 +474,7 @@ def test_operation_get(self, mock_hook): def test_operation_get_with_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = CloudDataTransferServiceGetOperationOperator( operation_name='{{ dag.dag_id }}', task_id='task-id', dag=self.dag ) @@ -523,7 +523,7 @@ def test_operation_list(self, mock_hook): def test_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = CloudDataTransferServiceListOperationsOperator( request_filter={"job_names": ['{{ dag.dag_id }}']}, gcp_conn_id='{{ dag.dag_id }}', @@ -533,9 +533,7 @@ def test_templates(self, _): ti = TaskInstance(op, DEFAULT_DATE) ti.render_templates() - # pylint: disable=unsubscriptable-object assert dag_id == getattr(op, 'filter')['job_names'][0] - # pylint: enable=unsubscriptable-object assert dag_id == getattr(op, 'gcp_conn_id') @@ -567,7 +565,7 @@ def test_operation_pause(self, mock_hook): def test_operation_pause_with_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = CloudDataTransferServicePauseOperationOperator( operation_name='{{ dag.dag_id }}', gcp_conn_id='{{ dag.dag_id }}', @@ -603,7 +601,7 @@ def test_operation_resume(self, mock_hook): task_id=TASK_ID, google_impersonation_chain=IMPERSONATION_CHAIN, ) - result = op.execute(None) # pylint: disable=assignment-from-no-return + result = op.execute(None) mock_hook.assert_called_once_with( api_version='v1', gcp_conn_id='google_cloud_default', @@ -623,7 +621,7 @@ def test_operation_resume(self, mock_hook): def test_operation_resume_with_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = CloudDataTransferServiceResumeOperationOperator( operation_name='{{ dag.dag_id }}', gcp_conn_id='{{ dag.dag_id }}', @@ -659,7 +657,7 @@ def test_operation_cancel(self, mock_hook): task_id=TASK_ID, google_impersonation_chain=IMPERSONATION_CHAIN, ) - result = op.execute(None) # pylint: disable=assignment-from-no-return + result = op.execute(None) mock_hook.assert_called_once_with( api_version='v1', gcp_conn_id='google_cloud_default', @@ -679,7 +677,7 @@ def test_operation_cancel(self, mock_hook): def test_operation_cancel_with_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = CloudDataTransferServiceCancelOperationOperator( operation_name='{{ dag.dag_id }}', gcp_conn_id='{{ dag.dag_id }}', @@ -732,7 +730,7 @@ def test_constructor(self): def test_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = CloudDataTransferServiceS3ToGCSOperator( s3_bucket='{{ dag.dag_id }}', gcs_bucket='{{ dag.dag_id }}', @@ -748,9 +746,7 @@ def test_templates(self, _): assert dag_id == getattr(op, 'gcs_bucket') assert dag_id == getattr(op, 'description') - # pylint: disable=unsubscriptable-object assert dag_id == getattr(op, 'object_conditions')['exclude_prefixes'][0] - # pylint: enable=unsubscriptable-object assert dag_id == getattr(op, 'gcp_conn_id') @@ -891,7 +887,7 @@ def test_constructor(self): def test_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = CloudDataTransferServiceGCSToGCSOperator( source_bucket='{{ dag.dag_id }}', destination_bucket='{{ dag.dag_id }}', @@ -907,9 +903,7 @@ def test_templates(self, _): assert dag_id == getattr(op, 'destination_bucket') assert dag_id == getattr(op, 'description') - # pylint: disable=unsubscriptable-object assert dag_id == getattr(op, 'object_conditions')['exclude_prefixes'][0] - # pylint: enable=unsubscriptable-object assert dag_id == getattr(op, 'gcp_conn_id') diff --git a/tests/providers/google/cloud/operators/test_compute.py b/tests/providers/google/cloud/operators/test_compute.py index 9729f64ed9590..d8a853f35349b 100644 --- a/tests/providers/google/cloud/operators/test_compute.py +++ b/tests/providers/google/cloud/operators/test_compute.py @@ -16,7 +16,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-lines import ast import unittest @@ -73,7 +72,7 @@ def test_instance_start(self, mock_hook): def test_instance_start_with_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = ComputeEngineStartInstanceOperator( project_id='{{ dag.dag_id }}', zone='{{ dag.dag_id }}', @@ -152,7 +151,7 @@ def test_instance_stop(self, mock_hook): def test_instance_stop_with_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = ComputeEngineStopInstanceOperator( project_id='{{ dag.dag_id }}', zone='{{ dag.dag_id }}', @@ -244,7 +243,7 @@ def test_set_machine_type(self, mock_hook): def test_set_machine_type_with_templates(self, _): dag_id = 'test_dag_id' args = {'start_date': DEFAULT_DATE} - self.dag = DAG(dag_id, default_args=args) # pylint: disable=attribute-defined-outside-init + self.dag = DAG(dag_id, default_args=args) op = ComputeEngineSetMachineTypeOperator( project_id='{{ dag.dag_id }}', zone='{{ dag.dag_id }}', diff --git a/tests/providers/google/cloud/operators/test_dataprep_system.py b/tests/providers/google/cloud/operators/test_dataprep_system.py index f59e3dc11f7c5..03e19d2f9b9b3 100644 --- a/tests/providers/google/cloud/operators/test_dataprep_system.py +++ b/tests/providers/google/cloud/operators/test_dataprep_system.py @@ -44,7 +44,7 @@ def setUp(self): conn_type="dataprep", extra=json.dumps(EXTRA), ) - session.add(dataprep_conn_id) # pylint: disable=expression-not-assigned + session.add(dataprep_conn_id) def tearDown(self): clear_db_connections() diff --git a/tests/providers/google/cloud/operators/test_dataproc.py b/tests/providers/google/cloud/operators/test_dataproc.py index 764c225f1769a..5b977f13b7ee7 100644 --- a/tests/providers/google/cloud/operators/test_dataproc.py +++ b/tests/providers/google/cloud/operators/test_dataproc.py @@ -540,11 +540,11 @@ def test_execute_if_cluster_exists_in_deleting_state( ): cluster = mock.MagicMock() cluster.status.state = 0 - cluster.status.State.DELETING = 0 # pylint: disable=no-member + cluster.status.State.DELETING = 0 cluster2 = mock.MagicMock() cluster2.status.state = 0 - cluster2.status.State.ERROR = 0 # pylint: disable=no-member + cluster2.status.State.ERROR = 0 mock_create_cluster.side_effect = [AlreadyExists("test"), cluster2] mock_generator.return_value = [0] diff --git a/tests/providers/google/cloud/operators/test_dlp.py b/tests/providers/google/cloud/operators/test_dlp.py index 7c68102784f7e..a0a5dba94948d 100644 --- a/tests/providers/google/cloud/operators/test_dlp.py +++ b/tests/providers/google/cloud/operators/test_dlp.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=R0904, C0111 + """ This module contains various unit tests for Google Cloud DLP Operators """ diff --git a/tests/providers/google/cloud/operators/test_dlp_system.py b/tests/providers/google/cloud/operators/test_dlp_system.py index 38f6bcb62e4ab..b43e8e39432f4 100644 --- a/tests/providers/google/cloud/operators/test_dlp_system.py +++ b/tests/providers/google/cloud/operators/test_dlp_system.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=C0111 + """ This module contains various unit tests for example_gcp_dlp DAG diff --git a/tests/providers/google/cloud/operators/test_kubernetes_engine.py b/tests/providers/google/cloud/operators/test_kubernetes_engine.py index 22920d1493c56..a0288df8058d9 100644 --- a/tests/providers/google/cloud/operators/test_kubernetes_engine.py +++ b/tests/providers/google/cloud/operators/test_kubernetes_engine.py @@ -81,13 +81,11 @@ def test_create_execute_error_body(self, body, mock_hook): project_id=TEST_GCP_PROJECT_ID, location=PROJECT_LOCATION, body=body, task_id=PROJECT_TASK_ID ) - # pylint: disable=missing-kwoa @mock.patch('airflow.providers.google.cloud.operators.kubernetes_engine.GKEHook') def test_create_execute_error_project_id(self, mock_hook): with pytest.raises(AirflowException): GKECreateClusterOperator(location=PROJECT_LOCATION, body=PROJECT_BODY, task_id=PROJECT_TASK_ID) - # pylint: disable=no-value-for-parameter @mock.patch('airflow.providers.google.cloud.operators.kubernetes_engine.GKEHook') def test_create_execute_error_location(self, mock_hook): with pytest.raises(AirflowException): @@ -109,13 +107,11 @@ def test_delete_execute(self, mock_hook): name=CLUSTER_NAME, project_id=TEST_GCP_PROJECT_ID ) - # pylint: disable=no-value-for-parameter @mock.patch('airflow.providers.google.cloud.operators.kubernetes_engine.GKEHook') def test_delete_execute_error_project_id(self, mock_hook): with pytest.raises(AirflowException): GKEDeleteClusterOperator(location=PROJECT_LOCATION, name=CLUSTER_NAME, task_id=PROJECT_TASK_ID) - # pylint: disable=missing-kwoa @mock.patch('airflow.providers.google.cloud.operators.kubernetes_engine.GKEHook') def test_delete_execute_error_cluster_name(self, mock_hook): with pytest.raises(AirflowException): @@ -123,7 +119,6 @@ def test_delete_execute_error_cluster_name(self, mock_hook): project_id=TEST_GCP_PROJECT_ID, location=PROJECT_LOCATION, task_id=PROJECT_TASK_ID ) - # pylint: disable=missing-kwoa @mock.patch('airflow.providers.google.cloud.operators.kubernetes_engine.GKEHook') def test_delete_execute_error_location(self, mock_hook): with pytest.raises(AirflowException): @@ -147,7 +142,6 @@ def setUp(self): def test_template_fields(self): assert set(KubernetesPodOperator.template_fields).issubset(GKEStartPodOperator.template_fields) - # pylint: disable=unused-argument @mock.patch.dict(os.environ, {}) @mock.patch( "airflow.hooks.base.BaseHook.get_connections", @@ -188,7 +182,6 @@ def test_execute(self, file_mock, mock_execute_in_subprocess, mock_gcp_hook, exe assert self.gke_op.config_file == FILE_NAME - # pylint: disable=unused-argument @mock.patch.dict(os.environ, {}) @mock.patch( "airflow.hooks.base.BaseHook.get_connections", diff --git a/tests/providers/google/cloud/operators/test_spanner.py b/tests/providers/google/cloud/operators/test_spanner.py index 6347fd24be4b9..57829ae172326 100644 --- a/tests/providers/google/cloud/operators/test_spanner.py +++ b/tests/providers/google/cloud/operators/test_spanner.py @@ -56,7 +56,7 @@ def test_instance_create(self, mock_hook): display_name=DISPLAY_NAME, task_id="id", ) - result = op.execute(None) # pylint: disable=assignment-from-no-return + result = op.execute(None) mock_hook.assert_called_once_with( gcp_conn_id="google_cloud_default", impersonation_chain=None, @@ -81,7 +81,7 @@ def test_instance_create_missing_project_id(self, mock_hook): display_name=DISPLAY_NAME, task_id="id", ) - result = op.execute(None) # pylint: disable=assignment-from-no-return + result = op.execute(None) mock_hook.assert_called_once_with( gcp_conn_id="google_cloud_default", impersonation_chain=None, @@ -107,7 +107,7 @@ def test_instance_update(self, mock_hook): display_name=DISPLAY_NAME, task_id="id", ) - result = op.execute(None) # pylint: disable=assignment-from-no-return + result = op.execute(None) mock_hook.assert_called_once_with( gcp_conn_id="google_cloud_default", impersonation_chain=None, @@ -132,7 +132,7 @@ def test_instance_update_missing_project_id(self, mock_hook): display_name=DISPLAY_NAME, task_id="id", ) - result = op.execute(None) # pylint: disable=assignment-from-no-return + result = op.execute(None) mock_hook.assert_called_once_with( gcp_conn_id="google_cloud_default", impersonation_chain=None, @@ -158,7 +158,7 @@ def test_instance_create_aborts_and_succeeds_if_instance_exists(self, mock_hook) display_name=DISPLAY_NAME, task_id="id", ) - result = op.execute(None) # pylint: disable=assignment-from-no-return + result = op.execute(None) mock_hook.assert_called_once_with( gcp_conn_id="google_cloud_default", impersonation_chain=None, @@ -251,7 +251,7 @@ def test_instance_query(self, mock_hook): query=INSERT_QUERY, task_id="id", ) - result = op.execute(None) # pylint: disable=assignment-from-no-return + result = op.execute(None) mock_hook.assert_called_once_with( gcp_conn_id="google_cloud_default", impersonation_chain=None, @@ -267,7 +267,7 @@ def test_instance_query_missing_project_id(self, mock_hook): op = SpannerQueryDatabaseInstanceOperator( instance_id=INSTANCE_ID, database_id=DB_ID, query=INSERT_QUERY, task_id="id" ) - result = op.execute(None) # pylint: disable=assignment-from-no-return + result = op.execute(None) mock_hook.assert_called_once_with( gcp_conn_id="google_cloud_default", impersonation_chain=None, diff --git a/tests/providers/google/cloud/operators/test_speech_to_text.py b/tests/providers/google/cloud/operators/test_speech_to_text.py index c13550e41b0f9..6bd09a9b44121 100644 --- a/tests/providers/google/cloud/operators/test_speech_to_text.py +++ b/tests/providers/google/cloud/operators/test_speech_to_text.py @@ -36,7 +36,7 @@ class TestCloudSql(unittest.TestCase): def test_recognize_speech_green_path(self, mock_hook): mock_hook.return_value.recognize_speech.return_value = MagicMock() - CloudSpeechToTextRecognizeSpeechOperator( # pylint: disable=no-value-for-parameter + CloudSpeechToTextRecognizeSpeechOperator( project_id=PROJECT_ID, gcp_conn_id=GCP_CONN_ID, config=CONFIG, @@ -58,7 +58,7 @@ def test_missing_config(self, mock_hook): mock_hook.return_value.recognize_speech.return_value = True with pytest.raises(AirflowException) as ctx: - CloudSpeechToTextRecognizeSpeechOperator( # pylint: disable=missing-kwoa + CloudSpeechToTextRecognizeSpeechOperator( project_id=PROJECT_ID, gcp_conn_id=GCP_CONN_ID, audio=AUDIO, task_id="id" ).execute(context={"task_instance": Mock()}) @@ -71,7 +71,7 @@ def test_missing_audio(self, mock_hook): mock_hook.return_value.recognize_speech.return_value = True with pytest.raises(AirflowException) as ctx: - CloudSpeechToTextRecognizeSpeechOperator( # pylint: disable=missing-kwoa + CloudSpeechToTextRecognizeSpeechOperator( project_id=PROJECT_ID, gcp_conn_id=GCP_CONN_ID, config=CONFIG, task_id="id" ).execute(context={"task_instance": Mock()}) diff --git a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs_system.py index 5867cc74fa1fd..7b0293861316d 100644 --- a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs_system.py +++ b/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs_system.py @@ -51,7 +51,7 @@ def provide_azure_fileshare_with_directory(): login=AZURE_LOGIN, password=AZURE_KEY, ) - session.add(azure_fileshare_conn_id) # pylint: disable=expression-not-assigned + session.add(azure_fileshare_conn_id) with provide_azure_fileshare( share_name=AZURE_SHARE_NAME, diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py b/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py index 95fc0e3868247..7a812e76d2bee 100644 --- a/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py +++ b/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py @@ -36,7 +36,6 @@ DESTINATION_SFTP = "destination_path" -# pylint: disable=unused-argument class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase): @parameterized.expand( [ diff --git a/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py index d9978d813aaeb..8b27c271c5f0b 100644 --- a/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py @@ -129,7 +129,7 @@ def test_schema_file(self, gcs_hook_mock_class, mssql_hook_mock_class): gcs_hook_mock = gcs_hook_mock_class.return_value - def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): if obj == SCHEMA_FILENAME: with open(tmp_filename, 'rb') as file: assert b''.join(SCHEMA_JSON) == file.read() diff --git a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py index a76bfb5d904bd..86e0d6d043c5c 100644 --- a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py @@ -22,7 +22,7 @@ from unittest import mock import pytest -from MySQLdb import ProgrammingError # pylint: disable=no-name-in-module +from MySQLdb import ProgrammingError from parameterized import parameterized from airflow.providers.google.cloud.transfers.mysql_to_gcs import MySQLToGCSOperator @@ -278,7 +278,7 @@ def test_schema_file(self, gcs_hook_mock_class, mysql_hook_mock_class): gcs_hook_mock = gcs_hook_mock_class.return_value - def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): if obj == SCHEMA_FILENAME: assert not gzip with open(tmp_filename, 'rb') as file: @@ -304,7 +304,7 @@ def test_schema_file_with_custom_schema(self, gcs_hook_mock_class, mysql_hook_mo gcs_hook_mock = gcs_hook_mock_class.return_value - def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): if obj == SCHEMA_FILENAME: assert not gzip with open(tmp_filename, 'rb') as file: diff --git a/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py b/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py index 13b743151ed48..a49c224c7aab3 100644 --- a/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=c-extension-no-member + import unittest from unittest import mock @@ -127,7 +127,7 @@ def test_schema_file(self, gcs_hook_mock_class, oracle_hook_mock_class): gcs_hook_mock = gcs_hook_mock_class.return_value - def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): if obj == SCHEMA_FILENAME: with open(tmp_filename, 'rb') as file: assert b''.join(SCHEMA_JSON) == file.read() diff --git a/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py index a8dfca2b86480..564c5ead19033 100644 --- a/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py @@ -150,7 +150,7 @@ def test_schema_file(self, gcs_hook_mock_class): gcs_hook_mock = gcs_hook_mock_class.return_value - def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): if obj == SCHEMA_FILENAME: with open(tmp_filename, 'rb') as file: assert SCHEMA_JSON == file.read() diff --git a/tests/providers/google/cloud/transfers/test_presto_to_gcs.py b/tests/providers/google/cloud/transfers/test_presto_to_gcs.py index 3eb9f63025b02..80a5a50386965 100644 --- a/tests/providers/google/cloud/transfers/test_presto_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_presto_to_gcs.py @@ -160,7 +160,7 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): def test_save_as_json_with_schema_file(self, mock_gcs_hook, mock_presto_hook): """Test writing schema files.""" - def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): if obj == SCHEMA_FILENAME: with open(tmp_filename, "rb") as file: assert SCHEMA_JSON == file.read() @@ -296,7 +296,7 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): def test_save_as_csv_with_schema_file(self, mock_gcs_hook, mock_presto_hook): """Test writing schema files.""" - def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): if obj == SCHEMA_FILENAME: with open(tmp_filename, "rb") as file: assert SCHEMA_JSON == file.read() diff --git a/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py b/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py index 4592f030090e2..2cbbf6b5955ea 100644 --- a/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py @@ -52,7 +52,6 @@ DESTINATION_PATH_FILE = "destination_dir/copy.txt" -# pylint: disable=unused-argument class TestSFTPToGCSOperator(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.GCSHook") @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPHook") diff --git a/tests/providers/google/cloud/transfers/test_trino_to_gcs.py b/tests/providers/google/cloud/transfers/test_trino_to_gcs.py index 7cb6539a3d846..1e5443f6795b7 100644 --- a/tests/providers/google/cloud/transfers/test_trino_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_trino_to_gcs.py @@ -160,7 +160,7 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): def test_save_as_json_with_schema_file(self, mock_gcs_hook, mock_trino_hook): """Test writing schema files.""" - def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): if obj == SCHEMA_FILENAME: with open(tmp_filename, "rb") as file: assert SCHEMA_JSON == file.read() @@ -296,7 +296,7 @@ def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): def test_save_as_csv_with_schema_file(self, mock_gcs_hook, mock_trino_hook): """Test writing schema files.""" - def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): if obj == SCHEMA_FILENAME: with open(tmp_filename, "rb") as file: assert SCHEMA_JSON == file.read() diff --git a/tests/providers/google/cloud/utils/gcp_authenticator.py b/tests/providers/google/cloud/utils/gcp_authenticator.py index 2fc28c6197ccc..cae98d9bd79de 100644 --- a/tests/providers/google/cloud/utils/gcp_authenticator.py +++ b/tests/providers/google/cloud/utils/gcp_authenticator.py @@ -18,7 +18,7 @@ import json import os import subprocess -from typing import Optional # noqa: W0611 +from typing import Optional from airflow import settings from airflow.exceptions import AirflowException diff --git a/tests/providers/google/common/auth_backend/test_google_openid.py b/tests/providers/google/common/auth_backend/test_google_openid.py index c6dc6d65ba9e0..5ad7f49e07846 100644 --- a/tests/providers/google/common/auth_backend/test_google_openid.py +++ b/tests/providers/google/common/auth_backend/test_google_openid.py @@ -39,7 +39,7 @@ def google_openid_app(): @pytest.fixture(scope="module") def admin_user(google_openid_app): - appbuilder = google_openid_app.appbuilder # pylint: disable=no-member + appbuilder = google_openid_app.appbuilder role_admin = appbuilder.sm.find_role("Admin") tester = appbuilder.sm.find_user(username="test") if not tester: diff --git a/tests/providers/google/common/hooks/test_base_google.py b/tests/providers/google/common/hooks/test_base_google.py index d59bd4d645640..70d435ed12dee 100644 --- a/tests/providers/google/common/hooks/test_base_google.py +++ b/tests/providers/google/common/hooks/test_base_google.py @@ -503,7 +503,7 @@ def test_provide_gcp_credential_file_decorator_key_path(self): self.instance.extras = {'extra__google_cloud_platform__key_path': key_path} @hook.GoogleBaseHook.provide_gcp_credential_file - def assert_gcp_credential_file_in_env(hook_instance): # pylint: disable=unused-argument + def assert_gcp_credential_file_in_env(hook_instance): assert os.environ[CREDENTIALS] == key_path assert_gcp_credential_file_in_env(self.instance) @@ -519,7 +519,7 @@ def test_provide_gcp_credential_file_decorator_key_content(self, mock_file): mock_file_handler.write = string_file.write @hook.GoogleBaseHook.provide_gcp_credential_file - def assert_gcp_credential_file_in_env(hook_instance): # pylint: disable=unused-argument + def assert_gcp_credential_file_in_env(hook_instance): assert os.environ[CREDENTIALS] == file_name assert file_content == string_file.getvalue() diff --git a/tests/providers/grpc/hooks/test_grpc.py b/tests/providers/grpc/hooks/test_grpc.py index ec1e7d6658e06..3d9261eb0f632 100644 --- a/tests/providers/grpc/hooks/test_grpc.py +++ b/tests/providers/grpc/hooks/test_grpc.py @@ -49,13 +49,13 @@ def get_airflow_connection_with_port(): class StubClass: - def __init__(self, _): # pylint: disable=unused-argument + def __init__(self, _): pass - def single_call(self, data): # noqa + def single_call(self, data): return data - def stream_call(self, data): # noqa pylint: disable=unused-argument + def stream_call(self, data): return ["streaming", "call"] diff --git a/tests/providers/hashicorp/_internal_client/test_vault_client.py b/tests/providers/hashicorp/_internal_client/test_vault_client.py index 6b7d4aad97740..fb95a90104a0c 100644 --- a/tests/providers/hashicorp/_internal_client/test_vault_client.py +++ b/tests/providers/hashicorp/_internal_client/test_vault_client.py @@ -1,4 +1,3 @@ -# pylint: disable=no-member # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -23,7 +22,7 @@ import pytest from hvac.exceptions import InvalidPath, VaultError -from airflow.providers.hashicorp._internal_client.vault_client import _VaultClient # noqa +from airflow.providers.hashicorp._internal_client.vault_client import _VaultClient class TestVaultClient(TestCase): diff --git a/tests/providers/hashicorp/hooks/test_vault.py b/tests/providers/hashicorp/hooks/test_vault.py index ab3c0b06d711a..394bd0b43a784 100644 --- a/tests/providers/hashicorp/hooks/test_vault.py +++ b/tests/providers/hashicorp/hooks/test_vault.py @@ -1,4 +1,3 @@ -# pylint: disable=no-member # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/tests/providers/imap/hooks/test_imap.py b/tests/providers/imap/hooks/test_imap.py index 671d525461501..56654ea215fee 100644 --- a/tests/providers/imap/hooks/test_imap.py +++ b/tests/providers/imap/hooks/test_imap.py @@ -73,7 +73,7 @@ def test_connect_and_disconnect(self, mock_imaplib): pass mock_imaplib.IMAP4_SSL.assert_called_once_with('imap_server_address') - mock_conn.login.assert_called_once_with('imap_user', 'imap_password') # pylint: disable=no-member + mock_conn.login.assert_called_once_with('imap_user', 'imap_password') assert mock_conn.logout.call_count == 1 @patch(imaplib_string) diff --git a/tests/providers/jdbc/hooks/test_jdbc.py b/tests/providers/jdbc/hooks/test_jdbc.py index 091fe56060978..7ac0279c8ca5d 100644 --- a/tests/providers/jdbc/hooks/test_jdbc.py +++ b/tests/providers/jdbc/hooks/test_jdbc.py @@ -51,7 +51,7 @@ def test_jdbc_conn_connection(self, jdbc_mock): jdbc_conn = jdbc_hook.get_conn() assert jdbc_mock.called assert isinstance(jdbc_conn, Mock) - assert jdbc_conn.name == jdbc_mock.return_value.name # pylint: disable=no-member + assert jdbc_conn.name == jdbc_mock.return_value.name @patch("airflow.providers.jdbc.hooks.jdbc.jaydebeapi.connect") def test_jdbc_conn_set_autocommit(self, _): diff --git a/tests/providers/jira/hooks/test_jira.py b/tests/providers/jira/hooks/test_jira.py index d511ebb6820e9..061011152cc31 100644 --- a/tests/providers/jira/hooks/test_jira.py +++ b/tests/providers/jira/hooks/test_jira.py @@ -45,4 +45,4 @@ def test_jira_client_connection(self, jira_mock): assert jira_mock.called assert isinstance(jira_hook.client, Mock) - assert jira_hook.client.name == jira_mock.return_value.name # pylint: disable=no-member + assert jira_hook.client.name == jira_mock.return_value.name diff --git a/tests/providers/jira/sensors/test_jira.py b/tests/providers/jira/sensors/test_jira.py index 4d4a54b8ef45d..a23256783adfb 100644 --- a/tests/providers/jira/sensors/test_jira.py +++ b/tests/providers/jira/sensors/test_jira.py @@ -74,5 +74,5 @@ def test_issue_label_set(self, jira_mock): assert jira_mock.return_value.issue.called @staticmethod - def field_checker_func(context, issue): # pylint: disable=unused-argument + def field_checker_func(context, issue): return "test-label-1" in issue['fields']['labels'] diff --git a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py index e771b48643711..8c4685441bcfa 100644 --- a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py +++ b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=redefined-outer-name,unused-argument import json from unittest.mock import MagicMock, Mock diff --git a/tests/providers/microsoft/azure/operators/test_azure_batch.py b/tests/providers/microsoft/azure/operators/test_azure_batch.py index 926872a7ffdc9..a3da35d538bc5 100644 --- a/tests/providers/microsoft/azure/operators/test_azure_batch.py +++ b/tests/providers/microsoft/azure/operators/test_azure_batch.py @@ -40,7 +40,7 @@ $TargetDedicated = $isWorkingWeekdayHour ? 20:10;""" -class TestAzureBatchOperator(unittest.TestCase): # pylint: disable=too-many-instance-attributes +class TestAzureBatchOperator(unittest.TestCase): # set up the test environment @mock.patch("airflow.providers.microsoft.azure.hooks.azure_batch.AzureBatchHook") @mock.patch("airflow.providers.microsoft.azure.hooks.azure_batch.BatchServiceClient") diff --git a/tests/providers/mysql/operators/test_mysql.py b/tests/providers/mysql/operators/test_mysql.py index c8d31284eb057..f36fab88b14e4 100644 --- a/tests/providers/mysql/operators/test_mysql.py +++ b/tests/providers/mysql/operators/test_mysql.py @@ -102,7 +102,7 @@ def test_overwrite_schema(self, client): database="foobar", ) - from MySQLdb import OperationalError # pylint: disable=no-name-in-module + from MySQLdb import OperationalError try: op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) diff --git a/tests/providers/odbc/hooks/test_odbc.py b/tests/providers/odbc/hooks/test_odbc.py index 8ce89b2edba27..03e31e855c9b1 100644 --- a/tests/providers/odbc/hooks/test_odbc.py +++ b/tests/providers/odbc/hooks/test_odbc.py @@ -1,4 +1,3 @@ -# pylint: disable=c-extension-no-member # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file diff --git a/tests/providers/oracle/hooks/test_oracle.py b/tests/providers/oracle/hooks/test_oracle.py index b3a6a61476146..0101a34b83c07 100644 --- a/tests/providers/oracle/hooks/test_oracle.py +++ b/tests/providers/oracle/hooks/test_oracle.py @@ -27,7 +27,6 @@ from airflow.models import Connection from airflow.providers.oracle.hooks.oracle import OracleHook -# pylint: disable=c-extension-no-member try: import cx_Oracle except ImportError: diff --git a/tests/providers/postgres/hooks/test_postgres.py b/tests/providers/postgres/hooks/test_postgres.py index 9a0226fd3798f..d78fa4c08538b 100644 --- a/tests/providers/postgres/hooks/test_postgres.py +++ b/tests/providers/postgres/hooks/test_postgres.py @@ -43,7 +43,7 @@ class UnitTestPostgresHook(PostgresHook): @mock.patch('airflow.providers.postgres.hooks.postgres.psycopg2.connect') def test_get_conn_non_default_id(self, mock_connect): - self.db_hook.test_conn_id = 'non_default' # pylint: disable=attribute-defined-outside-init + self.db_hook.test_conn_id = 'non_default' self.db_hook.get_conn() mock_connect.assert_called_once_with( user='login', password='password', host='host', dbname='schema', port=None diff --git a/tests/providers/qubole/hooks/test_qubole.py b/tests/providers/qubole/hooks/test_qubole.py index 6d9b8213fc0c5..9a6c0c9cb49c5 100644 --- a/tests/providers/qubole/hooks/test_qubole.py +++ b/tests/providers/qubole/hooks/test_qubole.py @@ -30,7 +30,6 @@ add_tags = QuboleHook._add_tags -# pylint: disable = unused-argument def get_result_mock(fp, inline, delim, fetch, arguments): if arguments[0] == 'true': fp.write(bytearray(RESULTS_WITH_HEADER, 'utf-8')) diff --git a/tests/providers/qubole/operators/test_qubole_check.py b/tests/providers/qubole/operators/test_qubole_check.py index 3875295507570..ac8f1edb180f2 100644 --- a/tests/providers/qubole/operators/test_qubole_check.py +++ b/tests/providers/qubole/operators/test_qubole_check.py @@ -36,7 +36,6 @@ ) -# pylint: disable=unused-argument @pytest.mark.parametrize( "operator_class, kwargs, parent_check_operator", [ diff --git a/tests/providers/sftp/operators/test_sftp.py b/tests/providers/sftp/operators/test_sftp.py index aa752b1807f33..478cd3614d92b 100644 --- a/tests/providers/sftp/operators/test_sftp.py +++ b/tests/providers/sftp/operators/test_sftp.py @@ -383,7 +383,7 @@ def test_arg_checking(self): ) try: task_1.execute(None) - except Exception: # pylint: disable=broad-except + except Exception: pass assert task_1.ssh_hook.ssh_conn_id == TEST_CONN_ID @@ -397,7 +397,7 @@ def test_arg_checking(self): ) try: task_2.execute(None) - except Exception: # pylint: disable=broad-except + except Exception: pass assert task_2.ssh_hook.ssh_conn_id == TEST_CONN_ID @@ -413,7 +413,7 @@ def test_arg_checking(self): ) try: task_3.execute(None) - except Exception: # pylint: disable=broad-except + except Exception: pass assert task_3.ssh_hook.ssh_conn_id == self.hook.ssh_conn_id diff --git a/tests/providers/snowflake/hooks/test_snowflake.py b/tests/providers/snowflake/hooks/test_snowflake.py index deec76dd38773..d40fc04cd8445 100644 --- a/tests/providers/snowflake/hooks/test_snowflake.py +++ b/tests/providers/snowflake/hooks/test_snowflake.py @@ -121,7 +121,7 @@ def test_get_conn_params(self): 'authenticator': 'snowflake', 'session_parameters': {"QUERY_TAG": "This is a test hook"}, } - assert self.db_hook.snowflake_conn_id == 'snowflake_default' # pylint: disable=no-member + assert self.db_hook.snowflake_conn_id == 'snowflake_default' assert conn_params_shouldbe == self.db_hook._get_conn_params() def test_get_conn(self): diff --git a/tests/providers/sqlite/hooks/test_sqlite.py b/tests/providers/sqlite/hooks/test_sqlite.py index 80fd6a4947eaa..d618fed97af74 100644 --- a/tests/providers/sqlite/hooks/test_sqlite.py +++ b/tests/providers/sqlite/hooks/test_sqlite.py @@ -44,7 +44,7 @@ def test_get_conn(self, mock_connect): @patch('airflow.providers.sqlite.hooks.sqlite.sqlite3.connect') def test_get_conn_non_default_id(self, mock_connect): - self.db_hook.test_conn_id = 'non_default' # pylint: disable=attribute-defined-outside-init + self.db_hook.test_conn_id = 'non_default' self.db_hook.get_conn() mock_connect.assert_called_once_with('host') self.db_hook.get_connection.assert_called_once_with('non_default') diff --git a/tests/providers/ssh/hooks/test_ssh.py b/tests/providers/ssh/hooks/test_ssh.py index 8cb4366fb006b..878cfa6e83b99 100644 --- a/tests/providers/ssh/hooks/test_ssh.py +++ b/tests/providers/ssh/hooks/test_ssh.py @@ -352,16 +352,14 @@ def test_tunnel_with_private_key_ecdsa(self, ssh_mock): def test_ssh_connection(self): hook = SSHHook(ssh_conn_id='ssh_default') with hook.get_conn() as client: - # Note - Pylint will fail with no-member here due to https://github.com/PyCQA/pylint/issues/1437 - (_, stdout, _) = client.exec_command('ls') # pylint: disable=no-member + (_, stdout, _) = client.exec_command('ls') assert stdout.read() is not None def test_ssh_connection_no_connection_id(self): hook = SSHHook(remote_host='localhost') assert hook.ssh_conn_id is None with hook.get_conn() as client: - # Note - Pylint will fail with no-member here due to https://github.com/PyCQA/pylint/issues/1437 - (_, stdout, _) = client.exec_command('ls') # pylint: disable=no-member + (_, stdout, _) = client.exec_command('ls') assert stdout.read() is not None def test_ssh_connection_old_cm(self): diff --git a/tests/providers/ssh/operators/test_ssh.py b/tests/providers/ssh/operators/test_ssh.py index 28b19ea020105..e39b8fbb8c156 100644 --- a/tests/providers/ssh/operators/test_ssh.py +++ b/tests/providers/ssh/operators/test_ssh.py @@ -152,7 +152,7 @@ def test_arg_checking(self): ) try: task_1.execute(None) - except Exception: # pylint: disable=broad-except + except Exception: pass assert task_1.ssh_hook.ssh_conn_id == TEST_CONN_ID @@ -165,7 +165,7 @@ def test_arg_checking(self): ) try: task_2.execute(None) - except Exception: # pylint: disable=broad-except + except Exception: pass assert task_2.ssh_hook.ssh_conn_id == TEST_CONN_ID @@ -180,7 +180,7 @@ def test_arg_checking(self): ) try: task_3.execute(None) - except Exception: # pylint: disable=broad-except + except Exception: pass assert task_3.ssh_hook.ssh_conn_id == self.hook.ssh_conn_id @@ -204,6 +204,6 @@ def test_get_pyt_set_correctly(self, command, get_pty_in, get_pty_out): ) try: task.execute(None) - except Exception: # pylint: disable=broad-except + except Exception: pass assert task.get_pty == get_pty_out diff --git a/tests/providers/tableau/hooks/test_tableau.py b/tests/providers/tableau/hooks/test_tableau.py index 66ecdf747dd57..4c8a6a643c024 100644 --- a/tests/providers/tableau/hooks/test_tableau.py +++ b/tests/providers/tableau/hooks/test_tableau.py @@ -90,7 +90,7 @@ def test_get_conn_auth_via_token_and_site_in_init(self, mock_server, mock_tablea @patch('airflow.providers.tableau.hooks.tableau.TableauAuth') @patch('airflow.providers.tableau.hooks.tableau.Server') @patch('airflow.providers.tableau.hooks.tableau.Pager', return_value=[1, 2, 3]) - def test_get_all(self, mock_pager, mock_server, mock_tableau_auth): # pylint: disable=unused-argument + def test_get_all(self, mock_pager, mock_server, mock_tableau_auth): """ Test get all """ diff --git a/tests/security/test_kerberos.py b/tests/security/test_kerberos.py index 674ff16769463..c14a40e4901c2 100644 --- a/tests/security/test_kerberos.py +++ b/tests/security/test_kerberos.py @@ -34,16 +34,14 @@ class TestKerberos(unittest.TestCase): def setUp(self): self.args = Namespace( keytab=KRB5_KTNAME, principal=None, pid=None, daemon=None, stdout=None, stderr=None, log_file=None - ) # pylint: disable=no-member + ) @conf_vars({('kerberos', 'keytab'): KRB5_KTNAME}) def test_renew_from_kt(self): """ We expect no result, but a successful run. No more TypeError """ - assert ( - renew_from_kt(principal=self.args.principal, keytab=self.args.keytab) is None - ) # pylint: disable=no-member + assert renew_from_kt(principal=self.args.principal, keytab=self.args.keytab) is None @conf_vars({('kerberos', 'keytab'): ''}) def test_args_from_cli(self): @@ -53,7 +51,7 @@ def test_args_from_cli(self): self.args.keytab = "test_keytab" with pytest.raises(SystemExit) as ctx: - renew_from_kt(principal=self.args.principal, keytab=self.args.keytab) # pylint: disable=no-member + renew_from_kt(principal=self.args.principal, keytab=self.args.keytab) with self.assertLogs(kerberos.log) as log: assert ( diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index 19161a7657b36..187fdb2c5d558 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -161,7 +161,7 @@ def test_external_task_sensor_fn_multiple_execution_dates(self): # The test_with_failure task is excepted to fail # once per minute (the run on the first second of # each minute). - except Exception as e: # pylint: disable=broad-except + except Exception as e: failed_tis = ( session.query(TI) .filter( @@ -496,7 +496,6 @@ def clear_tasks(dag_bag, dag, task, start_date=DEFAULT_DATE, end_date=DEFAULT_DA subdag.clear(start_date=start_date, end_date=end_date, dag_bag=dag_bag) -# pylint: disable=redefined-outer-name def test_external_task_marker_transitive(dag_bag_ext): """ Test clearing tasks across DAGs. @@ -511,7 +510,6 @@ def test_external_task_marker_transitive(dag_bag_ext): assert_ti_state_equal(ti_b_3, State.NONE) -# pylint: disable=redefined-outer-name def test_external_task_marker_clear_activate(dag_bag_parent_child): """ Test clearing tasks across DAGs and make sure the right DagRuns are activated. diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index c1ce92e138faa..f24e862cadca2 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -271,7 +271,7 @@ def setUp(self): ) ) ) - self.maxDiff = None # pylint: disable=invalid-name + self.maxDiff = None def test_serialization(self): """Serialization and deserialization should work for every DAG and Operator.""" @@ -973,7 +973,7 @@ def test_edge_info_serialization(self): with DAG("test_edge_info_serialization", start_date=datetime(2020, 1, 1)) as dag: task1 = DummyOperator(task_id="task1") task2 = DummyOperator(task_id="task2") - task1 >> Label("test label") >> task2 # pylint: disable=W0106 + task1 >> Label("test label") >> task2 dag_dict = SerializedDAG.to_dict(dag) SerializedDAG.validate_schema(dag_dict) diff --git a/tests/task/task_runner/test_standard_task_runner.py b/tests/task/task_runner/test_standard_task_runner.py index 35fb41bfe3590..e6f79f194a4f0 100644 --- a/tests/task/task_runner/test_standard_task_runner.py +++ b/tests/task/task_runner/test_standard_task_runner.py @@ -68,7 +68,7 @@ def logging_and_db(self): airflow_logger.handlers = [] try: clear_db_runs() - except Exception: # noqa pylint: disable=broad-except + except Exception: # It might happen that we lost connection to the server here so we need to ignore any errors here pass diff --git a/tests/test_utils/asserts.py b/tests/test_utils/asserts.py index fe8e7dbb0cfd6..7447afe880522 100644 --- a/tests/test_utils/asserts.py +++ b/tests/test_utils/asserts.py @@ -67,7 +67,7 @@ def after_cursor_execute(self, *args, **kwargs): self.result[f"{stack_info}"] += 1 -count_queries = CountQueries # pylint: disable=invalid-name +count_queries = CountQueries @contextmanager diff --git a/tests/test_utils/fake_datetime.py b/tests/test_utils/fake_datetime.py index 192856b3161fd..3e7a374560623 100644 --- a/tests/test_utils/fake_datetime.py +++ b/tests/test_utils/fake_datetime.py @@ -24,5 +24,5 @@ class FakeDatetime(datetime): A fake replacement for datetime that can be mocked for testing. """ - def __new__(cls, *args, **kwargs): # pylint: disable=signature-differs + def __new__(cls, *args, **kwargs): return datetime.__new__(datetime, *args, **kwargs) diff --git a/tests/test_utils/hdfs_utils.py b/tests/test_utils/hdfs_utils.py index 348396f84e2e4..6c9a88bba8dc0 100644 --- a/tests/test_utils/hdfs_utils.py +++ b/tests/test_utils/hdfs_utils.py @@ -36,7 +36,7 @@ class FakeSnakeBiteClient: def __init__(self): self.started = True - def ls(self, path, include_toplevel=False): # pylint: disable=invalid-name + def ls(self, path, include_toplevel=False): """ the fake snakebite client diff --git a/tests/test_utils/perf/dags/elastic_dag.py b/tests/test_utils/perf/dags/elastic_dag.py index 5e9a49496b704..e958d68d521ec 100644 --- a/tests/test_utils/perf/dags/elastic_dag.py +++ b/tests/test_utils/perf/dags/elastic_dag.py @@ -171,7 +171,6 @@ class DagShape(Enum): "__".join( [ DAG_PREFIX, - # pylint: disable=no-member f"SHAPE={SHAPE.name.lower()}", f"DAGS_COUNT={dag_no}_of_{DAG_COUNT}", f"TASKS_COUNT=${TASKS_COUNT}", diff --git a/tests/test_utils/perf/perf_kit/memory.py b/tests/test_utils/perf/perf_kit/memory.py index 5236e236d05db..3fb605767b87f 100644 --- a/tests/test_utils/perf/perf_kit/memory.py +++ b/tests/test_utils/perf/perf_kit/memory.py @@ -81,4 +81,4 @@ def trace_memory(human_readable=True, gc_collect=False): # Example: with trace_memory(): - import airflow # noqa # pylint: disable=unused-import + import airflow # noqa diff --git a/tests/test_utils/perf/perf_kit/sqlalchemy.py b/tests/test_utils/perf/perf_kit/sqlalchemy.py index b0c5a3ed70f5e..e60ad518e0cd0 100644 --- a/tests/test_utils/perf/perf_kit/sqlalchemy.py +++ b/tests/test_utils/perf/perf_kit/sqlalchemy.py @@ -64,12 +64,12 @@ def __init__( def before_cursor_execute( self, conn, - cursor, # pylint: disable=unused-argument - statement, # pylint: disable=unused-argument - parameters, # pylint: disable=unused-argument - context, # pylint: disable=unused-argument + cursor, + statement, + parameters, + context, executemany, - ): # pylint: disable=unused-argument + ): """ Executed before cursor. @@ -88,12 +88,12 @@ def before_cursor_execute( def after_cursor_execute( self, conn, - cursor, # pylint: disable=unused-argument + cursor, statement, parameters, - context, # pylint: disable=unused-argument + context, executemany, - ): # pylint: disable=unused-argument + ): """ Executed after cursor. @@ -141,14 +141,14 @@ def __enter__(self): event.listen(airflow.settings.engine, "before_cursor_execute", self.before_cursor_execute) event.listen(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) - def __exit__(self, type_, value, traceback): # noqa pylint: disable=redefined-outer-name + def __exit__(self, type_, value, traceback): import airflow.settings event.remove(airflow.settings.engine, "before_cursor_execute", self.before_cursor_execute) event.remove(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) -trace_queries = TraceQueries # pylint: disable=invalid-name +trace_queries = TraceQueries class CountQueriesResult: @@ -180,7 +180,7 @@ def __enter__(self): event.listen(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) return self.result - def __exit__(self, type_, value, traceback): # noqa pylint: disable=redefined-outer-name + def __exit__(self, type_, value, traceback): import airflow.settings event.remove(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) @@ -188,13 +188,13 @@ def __exit__(self, type_, value, traceback): # noqa pylint: disable=redefined-o def after_cursor_execute( self, - conn, # pylint: disable=unused-argument - cursor, # pylint: disable=unused-argument - statement, # pylint: disable=unused-argument - parameters, # pylint: disable=unused-argument - context, # pylint: disable=unused-argument + conn, + cursor, + statement, + parameters, + context, executemany, - ): # pylint: disable=unused-argument + ): """ Executed after cursor. @@ -208,7 +208,7 @@ def after_cursor_execute( self.result.count += 1 -count_queries = CountQueries # pylint: disable=invalid-name +count_queries = CountQueries if __name__ == "__main__": diff --git a/tests/test_utils/perf/scheduler_dag_execution_timing.py b/tests/test_utils/perf/scheduler_dag_execution_timing.py index a53ceceb6753a..3898b69f9e700 100755 --- a/tests/test_utils/perf/scheduler_dag_execution_timing.py +++ b/tests/test_utils/perf/scheduler_dag_execution_timing.py @@ -88,7 +88,7 @@ def change_state(self, key, state, info=None): if not self.dags_to_watch: self.log.warning("STOPPING SCHEDULER -- all runs complete") - self.scheduler_job.processor_agent._done = True # pylint: disable=protected-access + self.scheduler_job.processor_agent._done = True return self.log.warning( "WAITING ON %d RUNS", sum(map(attrgetter('waiting_for'), self.dags_to_watch.values())) @@ -161,7 +161,7 @@ def create_dag_runs(dag, num_runs, session): except ImportError: from airflow.models.dagrun import DagRun - id_prefix = DagRun.ID_PREFIX # pylint: disable=no-member + id_prefix = DagRun.ID_PREFIX next_run_date = dag.normalize_schedule(dag.start_date or min(t.start_date for t in dag.tasks)) @@ -197,7 +197,7 @@ def create_dag_runs(dag, num_runs, session): Dotted path Executor class to test, for example 'airflow.executors.local_executor.LocalExecutor'. Defaults to MockExecutor which doesn't run tasks. ''' - ), # pylint: disable=too-many-locals + ), ) @click.argument('dag_ids', required=True, nargs=-1) def main(num_runs, repeat, pre_create_dag_runs, executor_class, dag_ids): @@ -287,7 +287,7 @@ def main(num_runs, repeat, pre_create_dag_runs, executor_class, dag_ids): # Need a lambda to refer to the _latest_ value for scheduler_job, not just # the initial one - code_to_test = lambda: scheduler_job.run() # pylint: disable=unnecessary-lambda + code_to_test = lambda: scheduler_job.run() for count in range(repeat): gc.disable() @@ -324,4 +324,4 @@ def main(num_runs, repeat, pre_create_dag_runs, executor_class, dag_ids): if __name__ == "__main__": - main() # pylint: disable=no-value-for-parameter + main() diff --git a/tests/test_utils/remote_user_api_auth_backend.py b/tests/test_utils/remote_user_api_auth_backend.py index e5f189a0af94b..1e6a0c70adf6d 100644 --- a/tests/test_utils/remote_user_api_auth_backend.py +++ b/tests/test_utils/remote_user_api_auth_backend.py @@ -33,7 +33,7 @@ def init_app(_): """Initializes authentication backend""" -T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +T = TypeVar("T", bound=Callable) def _lookup_user(user_email_or_username: str): diff --git a/tests/test_utils/reset_warning_registry.py b/tests/test_utils/reset_warning_registry.py index 0b3b2f2a18934..20f9712de6336 100644 --- a/tests/test_utils/reset_warning_registry.py +++ b/tests/test_utils/reset_warning_registry.py @@ -20,7 +20,6 @@ import sys from typing import Dict, Match, Optional - # We need to explicitly clear the warning registry context # https://docs.python.org/2/library/warnings.html # One thing to be aware of is that if a warning has already been raised because @@ -28,10 +27,11 @@ # not be seen again unless the warnings registry related to the warning has # been cleared. # + + # Proposed fix from Stack overflow, which refers to the Python bug-page -# noqa # https://stackoverflow.com/questions/19428761/python-showing-once-warnings-again-resetting-all-warning-registries -class reset_warning_registry: # pylint: disable=invalid-name +class reset_warning_registry: """ context manager which archives & clears warning registry for duration of context. diff --git a/tests/ti_deps/deps/fake_models.py b/tests/ti_deps/deps/fake_models.py index 63346101c8aeb..adc0e20cfe3d9 100644 --- a/tests/ti_deps/deps/fake_models.py +++ b/tests/ti_deps/deps/fake_models.py @@ -24,10 +24,10 @@ def __init__(self, **kwds): self.__dict__.update(kwds) def get_dagrun(self, _): - return self.dagrun # pylint: disable=no-member + return self.dagrun - def are_dependents_done(self, session): # pylint: disable=unused-argument - return self.dependents_done # pylint: disable=no-member + def are_dependents_done(self, session): + return self.dependents_done class FakeTask: @@ -40,7 +40,7 @@ def __init__(self, **kwds): self.__dict__.update(kwds) def get_running_dagruns(self, _): - return self.running_dagruns # pylint: disable=no-member + return self.running_dagruns class FakeContext: diff --git a/tests/ti_deps/deps/test_dag_ti_slots_available_dep.py b/tests/ti_deps/deps/test_dag_ti_slots_available_dep.py index c685d6ad2d67f..8033ec7e383af 100644 --- a/tests/ti_deps/deps/test_dag_ti_slots_available_dep.py +++ b/tests/ti_deps/deps/test_dag_ti_slots_available_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from unittest.mock import Mock diff --git a/tests/ti_deps/deps/test_dag_unpaused_dep.py b/tests/ti_deps/deps/test_dag_unpaused_dep.py index bc962038d82fb..c55ff9d223a27 100644 --- a/tests/ti_deps/deps/test_dag_unpaused_dep.py +++ b/tests/ti_deps/deps/test_dag_unpaused_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from unittest.mock import Mock diff --git a/tests/ti_deps/deps/test_dagrun_exists_dep.py b/tests/ti_deps/deps/test_dagrun_exists_dep.py index b9fcb6a8831c7..4ec17c87be29c 100644 --- a/tests/ti_deps/deps/test_dagrun_exists_dep.py +++ b/tests/ti_deps/deps/test_dagrun_exists_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from unittest.mock import Mock, patch diff --git a/tests/ti_deps/deps/test_dagrun_id_dep.py b/tests/ti_deps/deps/test_dagrun_id_dep.py index 11815ee3e688e..1192719bc8b8f 100644 --- a/tests/ti_deps/deps/test_dagrun_id_dep.py +++ b/tests/ti_deps/deps/test_dagrun_id_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from unittest.mock import Mock diff --git a/tests/ti_deps/deps/test_not_in_retry_period_dep.py b/tests/ti_deps/deps/test_not_in_retry_period_dep.py index 42e2a3de50eb2..197277fba2c2c 100644 --- a/tests/ti_deps/deps/test_not_in_retry_period_dep.py +++ b/tests/ti_deps/deps/test_not_in_retry_period_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from datetime import timedelta diff --git a/tests/ti_deps/deps/test_pool_slots_available_dep.py b/tests/ti_deps/deps/test_pool_slots_available_dep.py index 2045a7bf2b6ba..19cd3711e12ba 100644 --- a/tests/ti_deps/deps/test_pool_slots_available_dep.py +++ b/tests/ti_deps/deps/test_pool_slots_available_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from unittest.mock import Mock, patch @@ -39,19 +39,16 @@ def tearDown(self): db.clear_db_pools() @patch('airflow.models.Pool.open_slots', return_value=0) - # pylint: disable=unused-argument def test_pooled_task_reached_concurrency(self, mock_open_slots): ti = Mock(pool='test_pool', pool_slots=1) assert not PoolSlotsAvailableDep().is_met(ti=ti) @patch('airflow.models.Pool.open_slots', return_value=1) - # pylint: disable=unused-argument def test_pooled_task_pass(self, mock_open_slots): ti = Mock(pool='test_pool', pool_slots=1) assert PoolSlotsAvailableDep().is_met(ti=ti) @patch('airflow.models.Pool.open_slots', return_value=0) - # pylint: disable=unused-argument def test_running_pooled_task_pass(self, mock_open_slots): for state in EXECUTION_STATES: ti = Mock(pool='test_pool', state=state, pool_slots=1) diff --git a/tests/ti_deps/deps/test_prev_dagrun_dep.py b/tests/ti_deps/deps/test_prev_dagrun_dep.py index 65c7c7b2c90fa..d80d47533d715 100644 --- a/tests/ti_deps/deps/test_prev_dagrun_dep.py +++ b/tests/ti_deps/deps/test_prev_dagrun_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from datetime import datetime diff --git a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py index 02b0523eb64b4..053c9522f19e1 100644 --- a/tests/ti_deps/deps/test_ready_to_reschedule_dep.py +++ b/tests/ti_deps/deps/test_ready_to_reschedule_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from datetime import timedelta diff --git a/tests/ti_deps/deps/test_runnable_exec_date_dep.py b/tests/ti_deps/deps/test_runnable_exec_date_dep.py index 1c5b8792c7dff..ac81bf6c69f89 100644 --- a/tests/ti_deps/deps/test_runnable_exec_date_dep.py +++ b/tests/ti_deps/deps/test_runnable_exec_date_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from unittest.mock import Mock, patch diff --git a/tests/ti_deps/deps/test_task_concurrency.py b/tests/ti_deps/deps/test_task_concurrency.py index f02000cbc8767..e723c5a089fa8 100644 --- a/tests/ti_deps/deps/test_task_concurrency.py +++ b/tests/ti_deps/deps/test_task_concurrency.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from datetime import datetime diff --git a/tests/ti_deps/deps/test_task_not_running_dep.py b/tests/ti_deps/deps/test_task_not_running_dep.py index ec73fae7e7da0..64cc79a1d9520 100644 --- a/tests/ti_deps/deps/test_task_not_running_dep.py +++ b/tests/ti_deps/deps/test_task_not_running_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from datetime import datetime diff --git a/tests/ti_deps/deps/test_trigger_rule_dep.py b/tests/ti_deps/deps/test_trigger_rule_dep.py index 10dcc1b653d57..1443e92ca9794 100644 --- a/tests/ti_deps/deps/test_trigger_rule_dep.py +++ b/tests/ti_deps/deps/test_trigger_rule_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from datetime import datetime diff --git a/tests/ti_deps/deps/test_valid_state_dep.py b/tests/ti_deps/deps/test_valid_state_dep.py index 64daff43579ba..41010a5628bf3 100644 --- a/tests/ti_deps/deps/test_valid_state_dep.py +++ b/tests/ti_deps/deps/test_valid_state_dep.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=no-value-for-parameter + import unittest from datetime import datetime diff --git a/tests/utils/log/test_log_reader.py b/tests/utils/log/test_log_reader.py index b25ff674e3754..cef374ca5cbed 100644 --- a/tests/utils/log/test_log_reader.py +++ b/tests/utils/log/test_log_reader.py @@ -41,7 +41,7 @@ class TestLogView(unittest.TestCase): DEFAULT_DATE = timezone.datetime(2017, 9, 1) def setUp(self): - self.maxDiff = None # pylint: disable=invalid-name + self.maxDiff = None # Make sure that the configure_logging is not cached self.old_modules = dict(sys.modules) diff --git a/tests/utils/test_dag_processing.py b/tests/utils/test_dag_processing.py index 3242cf3df12ed..58ad010a0aa6c 100644 --- a/tests/utils/test_dag_processing.py +++ b/tests/utils/test_dag_processing.py @@ -417,7 +417,7 @@ def test_find_zombies(self): manager._last_zombie_query_time = timezone.utcnow() - timedelta( seconds=manager._zombie_threshold_secs + 1 ) - manager._find_zombies() # pylint: disable=no-value-for-parameter + manager._find_zombies() requests = manager._callback_to_execute[dag.full_filepath] assert 1 == len(requests) assert requests[0].full_filepath == dag.full_filepath diff --git a/tests/utils/test_edgemodifier.py b/tests/utils/test_edgemodifier.py index fbbb7b8dba39d..21573b3cf67eb 100644 --- a/tests/utils/test_edgemodifier.py +++ b/tests/utils/test_edgemodifier.py @@ -122,8 +122,8 @@ def test_operator_shift(self, test_dag): # Unpack the fixture dag, (op1, op2, op3, op4) = test_dag # Arrange the operators with a Label in the middle - op1 >> Label("Label 1") >> op2 # pylint: disable=W0106 - op3 << Label("Label 2") << op2 >> op4 # pylint: disable=W0106 + op1 >> Label("Label 1") >> op2 + op3 << Label("Label 2") << op2 >> op4 # Check that the DAG has the right edge info assert dag.get_edge_info(op1.task_id, op2.task_id) == {"label": "Label 1"} assert dag.get_edge_info(op2.task_id, op3.task_id) == {"label": "Label 2"} @@ -134,7 +134,7 @@ def test_tasklist_shift(self, test_dag): # Unpack the fixture dag, (op1, op2, op3, op4) = test_dag # Arrange the operators with a Label in the middle - op1 >> Label("Label 1") >> [op2, op3] << Label("Label 2") << op4 # pylint: disable=W0106 + op1 >> Label("Label 1") >> [op2, op3] << Label("Label 2") << op4 # Check that the DAG has the right edge info assert dag.get_edge_info(op1.task_id, op2.task_id) == {"label": "Label 1"} assert dag.get_edge_info(op1.task_id, op3.task_id) == {"label": "Label 1"} @@ -146,7 +146,7 @@ def test_xcomarg_shift(self, test_dag): dag, (op1, op2, op3, op4) = test_dag # Arrange the operators with a Label in the middle op1_arg = XComArg(op1, "test_key") - op1_arg >> Label("Label 1") >> [op2, op3] # pylint: disable=W0106 + op1_arg >> Label("Label 1") >> [op2, op3] op1_arg >> op4 # Check that the DAG has the right edge info assert dag.get_edge_info(op1.task_id, op2.task_id) == {"label": "Label 1"} @@ -157,7 +157,7 @@ def test_taskgroup_shift(self, test_taskgroup_dag): # Unpack the fixture dag, group, (op1, op2, op3, op4) = test_taskgroup_dag # Arrange them with a Label in the middle - op1 >> Label("Group label") >> group >> op4 # pylint: disable=W0106 + op1 >> Label("Group label") >> group >> op4 # Check that the DAG has the right edge info assert dag.get_edge_info(op1.task_id, op2.task_id) == {"label": "Group label"} assert dag.get_edge_info(op1.task_id, op3.task_id) == {"label": "Group label"} diff --git a/tests/utils/test_module_loading.py b/tests/utils/test_module_loading.py index 51daee67b22f4..3e72b674a36d4 100644 --- a/tests/utils/test_module_loading.py +++ b/tests/utils/test_module_loading.py @@ -26,7 +26,7 @@ class TestModuleImport(unittest.TestCase): def test_import_string(self): cls = import_string('airflow.utils.module_loading.import_string') - assert cls == import_string # pylint: disable=comparison-with-callable + assert cls == import_string # Test exceptions raised with pytest.raises(ImportError): diff --git a/tests/utils/test_retries.py b/tests/utils/test_retries.py index 14e9550d5ff5a..1f1c1e71e6bf5 100644 --- a/tests/utils/test_retries.py +++ b/tests/utils/test_retries.py @@ -85,7 +85,7 @@ def test_retry_db_transaction_fails_when_used_in_function_without_retry(self): with self.assertRaisesRegex(ValueError, "has no `session` argument"): @retry_db_transaction - def test_function(): # pylint: disable=unused-variable + def test_function(): print("hi") raise OperationalError(statement=mock.ANY, params=mock.ANY, orig=mock.ANY) @@ -100,4 +100,4 @@ def test_function(session): with self.assertRaisesRegex( TypeError, f"session is a required argument for {test_function.__qualname__}" ): - test_function() # pylint: disable=no-value-for-parameter + test_function() diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py index 2cb71e93279a6..238897b2460af 100644 --- a/tests/utils/test_task_group.py +++ b/tests/utils/test_task_group.py @@ -309,11 +309,9 @@ def task_5(): tsk_1 >> group234 >> tsk_5 - # pylint: disable=no-member assert tsk_1.operator in tsk_2.operator.upstream_list assert tsk_1.operator in tsk_3.operator.upstream_list assert tsk_5.operator in tsk_4.operator.downstream_list - # pylint: enable=no-member assert extract_node_id(task_group_to_dict(dag.task_group)) == { 'id': None, diff --git a/tests/www/api/experimental/test_endpoints.py b/tests/www/api/experimental/test_endpoints.py index a63b0bb327649..9710b38c68954 100644 --- a/tests/www/api/experimental/test_endpoints.py +++ b/tests/www/api/experimental/test_endpoints.py @@ -48,7 +48,7 @@ class TestBase: @pytest.fixture(autouse=True) def _setup_attrs_base(self, experiemental_api_app, configured_session): self.app = experiemental_api_app - self.appbuilder = self.app.appbuilder # pylint: disable=no-member + self.appbuilder = self.app.appbuilder self.client = self.app.test_client() self.session = configured_session diff --git a/tests/www/test_security.py b/tests/www/test_security.py index 73fe4b1f2135e..358387b73ab49 100644 --- a/tests/www/test_security.py +++ b/tests/www/test_security.py @@ -84,7 +84,7 @@ def setUpClass(cls): settings.configure_orm() cls.session = settings.Session cls.app = application.create_app(testing=True) - cls.appbuilder = cls.app.appbuilder # pylint: disable=no-member + cls.appbuilder = cls.app.appbuilder cls.app.config['WTF_CSRF_ENABLED'] = False cls.security_manager = cls.appbuilder.sm cls.delete_roles() @@ -365,7 +365,7 @@ def test_get_accessible_dag_ids(self): self.session.add(dag_model) self.session.commit() - self.security_manager.sync_perm_for_dag( # type: ignore # pylint: disable=no-member + self.security_manager.sync_perm_for_dag( # type: ignore dag_id, access_control={role_name: permission_action} ) @@ -392,7 +392,7 @@ def test_dont_get_inaccessible_dag_ids_for_dag_resource_permission(self): self.session.add(dag_model) self.session.commit() - self.security_manager.sync_perm_for_dag( # type: ignore # pylint: disable=no-member + self.security_manager.sync_perm_for_dag( # type: ignore dag_id, access_control={role_name: permission_action} ) diff --git a/tests/www/views/conftest.py b/tests/www/views/conftest.py index 6e99cbbccb301..54e46b69bab64 100644 --- a/tests/www/views/conftest.py +++ b/tests/www/views/conftest.py @@ -65,7 +65,7 @@ def factory(): app.dag_bag = examples_dag_bag app.jinja_env.undefined = jinja2.StrictUndefined - security_manager = app.appbuilder.sm # pylint: disable=no-member + security_manager = app.appbuilder.sm if not security_manager.find_user(username='test'): security_manager.add_user( username='test', @@ -181,7 +181,7 @@ def capture_templates(app): def manager() -> Generator[List[_TemplateWithContext], None, None]: recorded = [] - def record(sender, template, context, **extra): # pylint: disable=unused-argument + def record(sender, template, context, **extra): recorded.append(_TemplateWithContext(template, context)) flask.template_rendered.connect(record, app) # type: ignore diff --git a/tests/www/views/test_views_acl.py b/tests/www/views/test_views_acl.py index 931151aaa321a..b5e5e998561d1 100644 --- a/tests/www/views/test_views_acl.py +++ b/tests/www/views/test_views_acl.py @@ -80,7 +80,7 @@ def acl_app(app): security_manager.add_user( role=role, username=username, - **kwargs, # pylint: disable=not-a-mapping + **kwargs, ) # FIXME: Clean up this block of code..... @@ -145,7 +145,7 @@ def reset_dagruns(): @pytest.fixture(autouse=True) -def init_dagruns(acl_app, reset_dagruns): # pylint: disable=unused-argument +def init_dagruns(acl_app, reset_dagruns): acl_app.dag_bag.get_dag("example_bash_operator").create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=DEFAULT_DATE, @@ -228,7 +228,7 @@ def user_all_dags(acl_app): @pytest.fixture() -def client_all_dags(acl_app, user_all_dags): # pylint: disable=unused-argument +def client_all_dags(acl_app, user_all_dags): return client_with_login( acl_app, username="user_all_dags", @@ -273,7 +273,7 @@ def user_all_dags_dagruns(acl_app): @pytest.fixture() -def client_all_dags_dagruns(acl_app, user_all_dags_dagruns): # pylint: disable=unused-argument +def client_all_dags_dagruns(acl_app, user_all_dags_dagruns): return client_with_login( acl_app, username="user_all_dags_dagruns", @@ -314,7 +314,7 @@ def user_all_dags_dagruns_tis(acl_app): @pytest.fixture() -def client_all_dags_dagruns_tis(acl_app, user_all_dags_dagruns_tis): # pylint: disable=unused-argument +def client_all_dags_dagruns_tis(acl_app, user_all_dags_dagruns_tis): return client_with_login( acl_app, username="user_all_dags_dagruns_tis", @@ -373,7 +373,7 @@ def user_all_dags_codes(acl_app): @pytest.fixture() -def client_all_dags_codes(acl_app, user_all_dags_codes): # pylint: disable=unused-argument +def client_all_dags_codes(acl_app, user_all_dags_codes): return client_with_login( acl_app, username="user_all_dags_codes", @@ -441,7 +441,7 @@ def user_all_dags_tis(acl_app): @pytest.fixture() -def client_all_dags_tis(acl_app, user_all_dags_tis): # pylint: disable=unused-argument +def client_all_dags_tis(acl_app, user_all_dags_tis): return client_with_login( acl_app, username="user_all_dags_tis", @@ -465,7 +465,7 @@ def user_all_dags_tis_xcom(acl_app): @pytest.fixture() -def client_all_dags_tis_xcom(acl_app, user_all_dags_tis_xcom): # pylint: disable=unused-argument +def client_all_dags_tis_xcom(acl_app, user_all_dags_tis_xcom): return client_with_login( acl_app, username="user_all_dags_tis_xcom", @@ -489,7 +489,7 @@ def user_dags_tis_logs(acl_app): @pytest.fixture() -def client_dags_tis_logs(acl_app, user_dags_tis_logs): # pylint: disable=unused-argument +def client_dags_tis_logs(acl_app, user_dags_tis_logs): return client_with_login( acl_app, username="user_dags_tis_logs", @@ -655,7 +655,7 @@ def user_all_dags_edit_tis(acl_app): @pytest.fixture() -def client_all_dags_edit_tis(acl_app, user_all_dags_edit_tis): # pylint: disable=unused-argument +def client_all_dags_edit_tis(acl_app, user_all_dags_edit_tis): return client_with_login( acl_app, username="user_all_dags_edit_tis", @@ -708,7 +708,7 @@ def user_only_dags_tis(acl_app): @pytest.fixture() -def client_only_dags_tis(acl_app, user_only_dags_tis): # pylint: disable=unused-argument +def client_only_dags_tis(acl_app, user_only_dags_tis): return client_with_login( acl_app, username="user_only_dags_tis", diff --git a/tests/www/views/test_views_extra_links.py b/tests/www/views/test_views_extra_links.py index d27811fb45beb..705e38a763aa1 100644 --- a/tests/www/views/test_views_extra_links.py +++ b/tests/www/views/test_views_extra_links.py @@ -44,7 +44,7 @@ def get_link(self, operator, dttm): class NoResponseLink(BaseOperatorLink): name = 'no_response' - def get_link(self, operator, dttm): # pylint: disable=unused-argument + def get_link(self, operator, dttm): return None @@ -58,7 +58,7 @@ def get_link(self, operator, dttm): class AirflowLink(BaseOperatorLink): name = 'airflow' - def get_link(self, operator, dttm): # pylint: disable=unused-argument + def get_link(self, operator, dttm): return 'https://airflow.apache.org' diff --git a/tests/www/views/test_views_log.py b/tests/www/views/test_views_log.py index 985bc236e6fcb..a56b0f0ea4213 100644 --- a/tests/www/views/test_views_log.py +++ b/tests/www/views/test_views_log.py @@ -52,7 +52,7 @@ def backup_modules(): @pytest.fixture(scope="module") -def log_app(backup_modules): # pylint: disable=unused-argument +def log_app(backup_modules): @dont_initialize_flask_app_submodules( skip_all_except=["init_appbuilder", "init_jinja_globals", "init_appbuilder_views"] ) @@ -61,7 +61,7 @@ def factory(): app = create_app(testing=True) app.config["WTF_CSRF_ENABLED"] = False settings.configure_orm() - security_manager = app.appbuilder.sm # pylint: disable=no-member + security_manager = app.appbuilder.sm if not security_manager.find_user(username='test'): security_manager.add_user( username='test', diff --git a/tests/www/views/test_views_rendered.py b/tests/www/views/test_views_rendered.py index df02a9e56d1a1..1f0f8ae999b5e 100644 --- a/tests/www/views/test_views_rendered.py +++ b/tests/www/views/test_views_rendered.py @@ -59,7 +59,7 @@ def task2(dag): @pytest.fixture(autouse=True) -def reset_db(dag, task1, task2): # pylint: disable=unused-argument +def reset_db(dag, task1, task2): """Reset DB for each test. This writes the DAG to the DB, and clears rendered fields so we have a clean diff --git a/tests/www/views/test_views_tasks.py b/tests/www/views/test_views_tasks.py index e6910d70adc85..b8c2d83cf5d6e 100644 --- a/tests/www/views/test_views_tasks.py +++ b/tests/www/views/test_views_tasks.py @@ -49,7 +49,7 @@ def reset_dagruns(): @pytest.fixture(autouse=True) -def init_dagruns(app, reset_dagruns): # pylint: disable=unused-argument +def init_dagruns(app, reset_dagruns): app.dag_bag.get_dag("example_bash_operator").create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=DEFAULT_DATE,