diff --git a/BREEZE.rst b/BREEZE.rst index abc9e1ef2a6af..e56abd146db0c 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -1263,7 +1263,7 @@ This is the current syntax for `./breeze <./breeze>`_: One of: - 3.6 3.7 3.8 + 3.6 3.7 3.8 3.9 -a, --install-airflow-version INSTALL_AIRFLOW_VERSION Uses different version of Airflow when building PROD image. @@ -1493,7 +1493,7 @@ This is the current syntax for `./breeze <./breeze>`_: One of: - 3.6 3.7 3.8 + 3.6 3.7 3.8 3.9 -I, --production-image Use production image for entering the environment and builds (not for tests). @@ -1560,7 +1560,7 @@ This is the current syntax for `./breeze <./breeze>`_: One of: - 3.6 3.7 3.8 + 3.6 3.7 3.8 3.9 -v, --verbose Show verbose information about executed docker, kind, kubectl, helm commands. Useful for @@ -1683,7 +1683,7 @@ This is the current syntax for `./breeze <./breeze>`_: One of: - 3.6 3.7 3.8 + 3.6 3.7 3.8 3.9 #################################################################################################### @@ -1878,7 +1878,7 @@ This is the current syntax for `./breeze <./breeze>`_: One of: - 3.6 3.7 3.8 + 3.6 3.7 3.8 3.9 -b, --backend BACKEND Backend to use for tests - it determines which database is used. @@ -1942,7 +1942,7 @@ This is the current syntax for `./breeze <./breeze>`_: One of: - 3.6 3.7 3.8 + 3.6 3.7 3.8 3.9 -F, --force-build-images Forces building of the local docker images. The images are rebuilt @@ -2350,7 +2350,7 @@ This is the current syntax for `./breeze <./breeze>`_: One of: - 3.6 3.7 3.8 + 3.6 3.7 3.8 3.9 **************************************************************************************************** Choose backend to run for Airflow diff --git a/CI.rst b/CI.rst index 6cfe7464bf715..af5e8aa0d9dd5 100644 --- a/CI.rst +++ b/CI.rst @@ -57,7 +57,7 @@ Container Registry used as cache For the CI builds of our we are using Container Registry to store results of the "Build Image" workflow and pass it to the "CI Build" workflow. -Currently in main version of Airflow we run tests in 3 different versions of Python (3.6, 3.7, 3.8) +Currently in main version of Airflow we run tests in 4 different versions of Python (3.6, 3.7, 3.8, 3.9) which means that we have to build 6 images (3 CI ones and 3 PROD ones). Yet we run around 12 jobs with each of the CI images. That is a lot of time to just build the environment to run. Therefore we are utilising ``pull_request_target`` feature of GitHub Actions. @@ -779,7 +779,7 @@ The image names follow the patterns: +--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+ * might be either "main" or "v1-10-test" or "v2-*-test" -* - Python version (Major + Minor). For "main" and "v2-*-test" should be in ["3.6", "3.7", "3.8"]. +* - Python version (Major + Minor). For "main" and "v2-*-test" should be in ["3.6", "3.7", "3.8", "3.9"]. * - for images that get merged to "main", "v2-*-test" of "v1-10-test", or built as part of a pull request the images are tagged with the (full length) commit SHA of that particular branch. For pull requests the SHA used is the tip of the pull request branch. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index d89c9435ed7e9..f269170646b52 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -873,7 +873,7 @@ This can be done by running this (it utilizes parallel preparation of the constr .. code-block:: bash - export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING="3.6 3.7 3.8" + export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING="3.6 3.7 3.8 3.9" for python_version in $(echo "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}") do ./breeze build-image --upgrade-to-newer-dependencies --python ${python_version} --build-cache-local diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst index 4c50af672a59e..132112e38d5eb 100644 --- a/CONTRIBUTORS_QUICK_START.rst +++ b/CONTRIBUTORS_QUICK_START.rst @@ -135,7 +135,7 @@ Pyenv and setting up virtual-env libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ xz-utils tk-dev libffi-dev liblzma-dev python-openssl git - $ sudo apt install build-essentials python3.6-dev python3.7-dev python3.8-dev python-dev openssl \ + $ sudo apt install build-essentials python3.6-dev python3.7-dev python3.8-dev python3.9-dev python-dev openssl \ sqlite sqlite-dev default-libmysqlclient-dev libmysqld-dev postgresql 2. Install pyenv diff --git a/Dockerfile b/Dockerfile index 39a13dc6b2635..6a4b75d1e78eb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -44,7 +44,7 @@ ARG AIRFLOW_GID="50000" ARG PYTHON_BASE_IMAGE="python:3.6-slim-buster" -ARG AIRFLOW_PIP_VERSION=21.1.1 +ARG AIRFLOW_PIP_VERSION=21.1.2 # By default PIP has progress bar but you can disable it. ARG PIP_PROGRESS_BAR="on" @@ -232,11 +232,10 @@ ARG INSTALL_FROM_DOCKER_CONTEXT_FILES="" ARG INSTALL_FROM_PYPI="true" # Those are additional constraints that are needed for some extras but we do not want to # Force them on the main Airflow package. -# * chardet<4 and certifi<2021.0.0 required to keep snowflake happy -# * urllib3 - required to keep boto3 happy +# * certifi<2021.0.0 required to keep snowflake happy # * pyjwt<2.0.0: flask-jwt-extended requires it # * dill<0.3.3 required by apache-beam -ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 urllib3<1.26 pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0" +ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0" ARG CONTINUE_ON_PIP_CHECK_FAILURE="false" diff --git a/Dockerfile.ci b/Dockerfile.ci index 085b4e5d6f565..2739c15182848 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -218,7 +218,7 @@ ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true" # By default in the image, we are installing all providers when installing from sources ARG INSTALL_PROVIDERS_FROM_SOURCES="true" ARG INSTALL_FROM_PYPI="true" -ARG AIRFLOW_PIP_VERSION=21.1.1 +ARG AIRFLOW_PIP_VERSION=21.1.2 # Setup PIP # By default PIP install run without cache to make image smaller ARG PIP_NO_CACHE_DIR="true" @@ -262,13 +262,11 @@ ENV AIRFLOW_REPO=${AIRFLOW_REPO}\ # Those are additional constraints that are needed for some extras but we do not want to # force them on the main Airflow package. Those limitations are: -# * chardet<4 and certifi<2021.0.0: required by snowflake provider +# * certifi<2021.0.0: required by snowflake provider # * lazy-object-proxy<1.5.0: required by astroid -# * pyOpenSSL: required by snowflake provider https://github.com/snowflakedb/snowflake-connector-python/blob/v2.3.6/setup.py#L201 -# * urllib3<1.26: Required to keep boto3 happy # * pyjwt<2.0.0: flask-jwt-extended requires it # * dill<0.3.3 required by apache-beam -ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 lazy-object-proxy<1.5.0 pyOpenSSL<20.0.0 urllib3<1.26 pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0" +ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="lazy-object-proxy<1.5.0 pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0" ARG UPGRADE_TO_NEWER_DEPENDENCIES="false" ENV EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} \ UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES} diff --git a/IMAGES.rst b/IMAGES.rst index e38257fc31b80..c3a1805b4811b 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -81,7 +81,7 @@ where: built from branches so they change over time. The ``2.*.*`` labels are built from git tags and they are "fixed" once built. * ``PYTHON_MAJOR_MINOR_VERSION`` - version of Python used to build the image. Examples: ``3.6``, ``3.7``, - ``3.8`` + ``3.8``, ``3.9`` * The ``-ci`` suffix is added for CI images * The ``-manifest`` is added for manifest images (see below for explanation of manifest images) diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst index f97f89a821cf9..b389442170a7c 100644 --- a/LOCAL_VIRTUALENV.rst +++ b/LOCAL_VIRTUALENV.rst @@ -51,8 +51,8 @@ Required Software Packages Use system-level package managers like yum, apt-get for Linux, or Homebrew for macOS to install required software packages: -* Python (One of: 3.6, 3.7, 3.8) -* MySQL +* Python (One of: 3.6, 3.7, 3.8, 3.9) +* MySQL 5.7+ * libxml Refer to the `Dockerfile.ci `__ for a comprehensive list @@ -102,7 +102,7 @@ Creating a Local virtualenv To use your IDE for Airflow development and testing, you need to configure a virtual environment. Ideally you should set up virtualenv for all Python versions that Airflow -supports (3.6, 3.7, 3.8). +supports (3.6, 3.7, 3.8, 3.9). To create and initialize the local virtualenv: diff --git a/PULL_REQUEST_WORKFLOW.rst b/PULL_REQUEST_WORKFLOW.rst index 96cc5b3f09514..3e3cce6b3a10e 100644 --- a/PULL_REQUEST_WORKFLOW.rst +++ b/PULL_REQUEST_WORKFLOW.rst @@ -58,7 +58,7 @@ We approached the problem by: 3) Even more optimisation came from limiting the scope of tests to only "default" matrix parameters. So far in Airflow we always run all tests for all matrix combinations. The primary matrix components are: - * Python versions (currently 3.6, 3.7, 3.8) + * Python versions (currently 3.6, 3.7, 3.8, 3.9) * Backend types (currently MySQL/Postgres) * Backed version (currently MySQL 5.7, MySQL 8, Postgres 9.6, Postgres 13 diff --git a/README.md b/README.md index ec7eebd9b7934..5510f335f959d 100644 --- a/README.md +++ b/README.md @@ -127,9 +127,9 @@ We **highly** recommend upgrading to the latest Airflow major release at the ear Apache Airflow is tested with: -| | Main version (dev) | Stable version (2.0.2) | +| | Main version (dev) | Stable version (2.1.0) | | -------------------- | ------------------------- | ------------------------ | -| Python | 3.6, 3.7, 3.8 | 3.6, 3.7, 3.8 | +| Python | 3.6, 3.7, 3.8, 3.9 | 3.6, 3.7, 3.8 | | Kubernetes | 1.20, 1.19, 1.18 | 1.20, 1.19, 1.18 | | PostgreSQL | 9.6, 10, 11, 12, 13 | 9.6, 10, 11, 12, 13 | | MySQL | 5.7, 8 | 5.7, 8 | diff --git a/airflow/__init__.py b/airflow/__init__.py index 7ecc4877c5935..6d04886252a50 100644 --- a/airflow/__init__.py +++ b/airflow/__init__.py @@ -36,7 +36,7 @@ __version__ = version.version -__all__ = ['__version__', 'login', 'DAG'] +__all__ = ['__version__', 'login', 'DAG', 'PY36', 'PY37', 'PY38', 'PY39'] # Make `airflow` an namespace package, supporting installing # airflow.providers.* in different locations (i.e. one in site, and one in user @@ -50,6 +50,7 @@ PY36 = sys.version_info >= (3, 6) PY37 = sys.version_info >= (3, 7) PY38 = sys.version_info >= (3, 8) +PY39 = sys.version_info >= (3, 9) def __getattr__(name): diff --git a/airflow/provider.yaml.schema.json b/airflow/provider.yaml.schema.json index b8a2c136fa790..6891206718342 100644 --- a/airflow/provider.yaml.schema.json +++ b/airflow/provider.yaml.schema.json @@ -28,6 +28,13 @@ "type": "string" } }, + "excluded-python-versions": { + "description": "List of python versions excluded for that provider", + "type": "array", + "items": { + "type": "string" + } + }, "integrations": { "description": "List of integrations supported by the provider.", "type": "array", diff --git a/airflow/providers/apache/hive/provider.yaml b/airflow/providers/apache/hive/provider.yaml index 803b6ec150c21..9d02184b83639 100644 --- a/airflow/providers/apache/hive/provider.yaml +++ b/airflow/providers/apache/hive/provider.yaml @@ -31,6 +31,9 @@ versions: additional-dependencies: - apache-airflow>=2.1.0 +excluded-python-versions: + - "3.9" + integrations: - integration-name: Apache Hive external-doc-url: https://hive.apache.org/ diff --git a/breeze b/breeze index d358b682997e7..88ecaa2c3c199 100755 --- a/breeze +++ b/breeze @@ -3539,7 +3539,7 @@ function breeze::run_breeze_command() { docker_engine_resources::check_all_resources if [[ ${PRODUCTION_IMAGE} == "true" ]]; then ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}" - ${run_command} "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh" + ${run_command} "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh" || true else ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}" fi @@ -3653,7 +3653,7 @@ function breeze::run_breeze_command() { # We have different versions of images depending on the python version used. We keep up with the # Latest patch-level changes in Python (this is done automatically during CI builds) so we have # To only take into account MAJOR and MINOR version of python. This variable keeps the major/minor -# version of python in X.Y format (3.6, 3.7, 3.8 etc). +# version of python in X.Y format (3.6, 3.7, 3.8, 3.9). # # In Breeze the precedence of setting the version is as follows: # 1. --python flag (if set, it will explicitly override it in the next step) diff --git a/breeze-complete b/breeze-complete index 57a69ff2ac562..26d220403a6c8 100644 --- a/breeze-complete +++ b/breeze-complete @@ -23,7 +23,7 @@ # by the BATS tests automatically during pre-commit and CI # Those cannot be made read-only as the breeze-complete must be re-sourceable -_breeze_allowed_python_major_minor_versions="3.6 3.7 3.8" +_breeze_allowed_python_major_minor_versions="3.6 3.7 3.8 3.9" _breeze_allowed_backends="sqlite mysql postgres mssql" _breeze_allowed_integrations="cassandra kerberos mongo openldap pinot rabbitmq redis statsd trino all" _breeze_allowed_generate_constraints_modes="source-providers pypi-providers no-providers" diff --git a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 index c9837b1b26a9e..cb6575832828f 100644 --- a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 +++ b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 @@ -47,6 +47,8 @@ Installation You can install this package on top of an existing airflow 2.1+ installation via ``pip install {{PACKAGE_PIP_NAME}}`` +The package supports the following python versions: {{ ",".join(SUPPORTED_PYTHON_VERSIONS) }} + {%- if PIP_REQUIREMENTS %} PIP requirements diff --git a/dev/provider_packages/SETUP_TEMPLATE.py.jinja2 b/dev/provider_packages/SETUP_TEMPLATE.py.jinja2 index cfe82e20292a3..69dcdac74c5b0 100644 --- a/dev/provider_packages/SETUP_TEMPLATE.py.jinja2 +++ b/dev/provider_packages/SETUP_TEMPLATE.py.jinja2 @@ -68,16 +68,16 @@ def do_setup(): 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', +{%- for python_version in SUPPORTED_PYTHON_VERSIONS %} + 'Programming Language :: Python :: {{ python_version }}', +{%- endfor %} 'Topic :: System :: Monitoring', ], author='Apache Software Foundation', author_email='dev@airflow.apache.org', url='https://airflow.apache.org/', download_url='https://archive.apache.org/dist/airflow/{{ PROVIDERS_FOLDER }}', - python_requires='~=3.6', + python_requires='{{ PYTHON_REQUIRES }}', project_urls={ 'Documentation': 'https://airflow.apache.org/docs/{{ PACKAGE_PIP_NAME }}/{{RELEASE}}/', 'Bug Tracker': 'https://github.com/apache/airflow/issues', diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py index c27c88bae2800..49dc393d6ec8f 100755 --- a/dev/provider_packages/prepare_provider_packages.py +++ b/dev/provider_packages/prepare_provider_packages.py @@ -51,6 +51,8 @@ from rich.progress import Progress from rich.syntax import Syntax +ALL_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] + try: from yaml import CSafeLoader as SafeLoader except ImportError: @@ -208,6 +210,7 @@ class ProviderPackageDetails(NamedTuple): documentation_provider_package_path: str provider_description: str versions: List[str] + excluded_python_versions: List[str] ENTITY_NAMES = { @@ -1445,6 +1448,7 @@ def get_provider_details(provider_package_id: str) -> ProviderPackageDetails: documentation_provider_package_path=get_documentation_package_path(provider_package_id), provider_description=provider_info['description'], versions=provider_info['versions'], + excluded_python_versions=provider_info.get("excluded-python-versions") or [], ) @@ -1483,6 +1487,12 @@ def get_provider_jinja_context( ) with open(changelog_path) as changelog_file: changelog = changelog_file.read() + supported_python_versions = [ + p for p in ALL_PYTHON_VERSIONS if p not in provider_details.excluded_python_versions + ] + python_requires = "~=3.6" + for p in provider_details.excluded_python_versions: + python_requires += f", !={p}" context: Dict[str, Any] = { "ENTITY_TYPES": list(EntityType), "README_FILE": "README.rst", @@ -1517,6 +1527,8 @@ def get_provider_jinja_context( provider_details.documentation_provider_package_path, ), "CHANGELOG": changelog, + "SUPPORTED_PYTHON_VERSIONS": supported_python_versions, + "PYTHON_REQUIRES": python_requires, } return context diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py index 2e834752de141..91e8dc4c4af1a 100755 --- a/dev/retag_docker_images.py +++ b/dev/retag_docker_images.py @@ -47,7 +47,7 @@ import click -PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] DOCKERHUB_IMAGES = [ "{prefix}:python{python_version}-{branch}", diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh index 099cf853d0daa..598d944549550 100644 --- a/scripts/ci/libraries/_build_images.sh +++ b/scripts/ci/libraries/_build_images.sh @@ -495,7 +495,6 @@ function build_images::rebuild_ci_image_if_needed() { push_pull_remove_images::pull_ci_images_if_needed return fi - local needs_docker_build="false" md5sum::check_if_docker_build_is_needed build_images::get_local_build_cache_hash @@ -526,7 +525,7 @@ function build_images::rebuild_ci_image_if_needed() { local root_files_count root_files_count=$(find "airflow" "tests" -user root | wc -l | xargs) if [[ ${root_files_count} != "0" ]]; then - ./scripts/ci/tools/ci_fix_ownership.sh + ./scripts/ci/tools/ci_fix_ownership.sh || true fi fi verbosity::print_info diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh index 424fd249ab0ba..b4b69415c591c 100644 --- a/scripts/ci/libraries/_initialization.sh +++ b/scripts/ci/libraries/_initialization.sh @@ -102,11 +102,11 @@ function initialization::initialize_base_variables() { export PRODUCTION_IMAGE="false" # All supported major/minor versions of python in all versions of Airflow - ALL_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8") + ALL_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8" "3.9") export ALL_PYTHON_MAJOR_MINOR_VERSIONS # Currently supported major/minor versions of python - CURRENT_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8") + CURRENT_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8" "3.9") export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS # Currently supported versions of Postgres diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh index d7bc17adfddca..a6a1c5c690a3b 100644 --- a/scripts/ci/libraries/_push_pull_remove_images.sh +++ b/scripts/ci/libraries/_push_pull_remove_images.sh @@ -144,7 +144,7 @@ function push_pull_remove_images::pull_base_python_image() { push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PYTHON_BASE_IMAGE}" \ "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${python_tag_suffix}" else - docker_v pull "${AIRFLOW_PYTHON_BASE_IMAGE}" + docker_v pull "${AIRFLOW_PYTHON_BASE_IMAGE}" || true fi } @@ -161,7 +161,7 @@ function push_pull_remove_images::pull_ci_images_if_needed() { push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_CI_IMAGE}" \ "${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" else - push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_CI_IMAGE}" + push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_CI_IMAGE}" || true fi fi } diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh index 65b3624587485..b0ab297c82790 100755 --- a/scripts/ci/selective_ci_checks.sh +++ b/scripts/ci/selective_ci_checks.sh @@ -126,9 +126,9 @@ function output_all_basic_variables() { if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then initialization::ga_output postgres-exclude '[{ "python-version": "3.6" }]' - initialization::ga_output mysql-exclude '[{ "python-version": "3.7" }]' - initialization::ga_output mssql-exclude '[{ "python-version": "3.7" }]' - initialization::ga_output sqlite-exclude '[{ "python-version": "3.8" }]' + initialization::ga_output mysql-exclude '[{ "python-version": "3.7" }, { "python-version": "3.9" }]' + initialization::ga_output mssql-exclude '[{ "python-version": "3.6" }, { "python-version": "3.8" }]' + initialization::ga_output sqlite-exclude '[{ "python-version": "3.7" }, { "python-version": "3.8" }]' else initialization::ga_output postgres-exclude '[]' initialization::ga_output mysql-exclude '[]' diff --git a/scripts/ci/tools/ci_fix_ownership.sh b/scripts/ci/tools/ci_fix_ownership.sh index 56463d211b375..6ed1161be747f 100755 --- a/scripts/ci/tools/ci_fix_ownership.sh +++ b/scripts/ci/tools/ci_fix_ownership.sh @@ -37,4 +37,4 @@ docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \ --rm \ --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \ "${AIRFLOW_CI_IMAGE}" \ - -c /opt/airflow/scripts/in_container/run_fix_ownership.sh + -c /opt/airflow/scripts/in_container/run_fix_ownership.sh || true diff --git a/scripts/ci/tools/prepare_prod_docker_images.sh b/scripts/ci/tools/prepare_prod_docker_images.sh index bd3436fd5223c..ad6cc9564523c 100755 --- a/scripts/ci/tools/prepare_prod_docker_images.sh +++ b/scripts/ci/tools/prepare_prod_docker_images.sh @@ -38,7 +38,7 @@ fi export INSTALL_AIRFLOW_VERSION="${1}" -for python_version in "3.6" "3.7" "3.8" +for python_version in "3.6" "3.7" "3.8" "3.9" do export PYTHON_MAJOR_MINOR_VERSION=${python_version} "${AIRFLOW_SOURCES_DIR}/scripts/ci/images/ci_build_dockerhub.sh" diff --git a/setup.cfg b/setup.cfg index 46bf15bc2fc3d..8b032961ebb1a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -56,6 +56,7 @@ classifiers = Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 Topic :: System :: Monitoring project_urls = Documentation=https://airflow.apache.org/docs/ @@ -106,7 +107,7 @@ install_requires = graphviz>=0.12 gunicorn>=19.5.0 httpx - importlib_metadata~=1.7;python_version<"3.9" # We could work with 3.1, but argparse needs <2 + importlib_metadata>=1.7;python_version<"3.9" importlib_resources~=1.4 # Required by vendored-in connexion inflection>=0.3.1 diff --git a/tests/bats/breeze/test_breeze_complete.bats b/tests/bats/breeze/test_breeze_complete.bats index 249a493f80095..787fc5bd03ccc 100644 --- a/tests/bats/breeze/test_breeze_complete.bats +++ b/tests/bats/breeze/test_breeze_complete.bats @@ -25,7 +25,7 @@ source "${AIRFLOW_SOURCES}/breeze-complete" breeze_complete::get_known_values_breeze "-p" - assert_equal "${_breeze_known_values}" "3.6 3.7 3.8" + assert_equal "${_breeze_known_values}" "3.6 3.7 3.8 3.9" } @test "Test get_known_values long" { @@ -34,7 +34,7 @@ source "${AIRFLOW_SOURCES}/breeze-complete" breeze_complete::get_known_values_breeze "--python" - assert_equal "${_breeze_known_values}" "3.6 3.7 3.8" + assert_equal "${_breeze_known_values}" "3.6 3.7 3.8 3.9" } @test "Test wrong get_known_values" { @@ -125,7 +125,7 @@ COMP_WORDS=("--python" "") breeze_complete::_comp_breeze - assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8" + assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8 3.9" } @test "Test autocomplete --python with prefix" { @@ -136,7 +136,7 @@ COMP_WORDS=("--python" "3") breeze_complete::_comp_breeze - assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8" + assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8 3.9" } @test "Test autocomplete build-" { diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py index 778cf080e80f4..23640a8367312 100644 --- a/tests/plugins/test_plugins_manager.py +++ b/tests/plugins/test_plugins_manager.py @@ -30,8 +30,24 @@ from tests.test_utils.config import conf_vars from tests.test_utils.mock_plugins import mock_plugin_manager -py39 = sys.version_info >= (3, 9) -importlib_metadata = 'importlib.metadata' if py39 else 'importlib_metadata' +importlib_metadata_string = 'importlib_metadata' + +try: + import importlib_metadata # pylint: disable=unused-import + + # If importlib_metadata is installed, it takes precedence over built-in importlib.metadata in PY39 + # so we should use the default declared above +except ImportError: + try: + import importlib.metadata # pylint: disable=unused-import + + # only when we do not have importlib_metadata, the importlib.metadata is actually used + importlib_metadata = 'importlib.metadata' + except ImportError: + raise Exception( + "Either importlib_metadata must be installed or importlib.metadata must be" + " available in system libraries (Python 3.9+). We seem to have neither." + ) ON_LOAD_EXCEPTION_PLUGIN = """ from airflow.plugins_manager import AirflowPlugin @@ -283,9 +299,9 @@ def test_entrypoint_plugin_errors_dont_raise_exceptions(self, caplog): mock_entrypoint.load.side_effect = ImportError('my_fake_module not found') mock_dist.entry_points = [mock_entrypoint] - with mock.patch(f'{importlib_metadata}.distributions', return_value=[mock_dist]), caplog.at_level( - logging.ERROR, logger='airflow.plugins_manager' - ): + with mock.patch( + f'{importlib_metadata_string}.distributions', return_value=[mock_dist] + ), caplog.at_level(logging.ERROR, logger='airflow.plugins_manager'): load_entrypoint_plugins() received_logs = caplog.text @@ -358,7 +374,7 @@ def test_should_return_correct_source_details(self): mock_dist.version = '1.0.0' mock_dist.entry_points = [mock_entrypoint] - with mock.patch(f'{importlib_metadata}.distributions', return_value=[mock_dist]): + with mock.patch(f'{importlib_metadata_string}.distributions', return_value=[mock_dist]): plugins_manager.load_entrypoint_plugins() source = plugins_manager.EntryPointSource(mock_entrypoint, mock_dist) diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/tests/providers/apache/hive/hooks/test_hive.py index 179dbcc6e308d..94ff8b7240683 100644 --- a/tests/providers/apache/hive/hooks/test_hive.py +++ b/tests/providers/apache/hive/hooks/test_hive.py @@ -28,6 +28,7 @@ import pytest from hmsclient import HMSClient +from airflow import PY39 from airflow.exceptions import AirflowException from airflow.models.connection import Connection from airflow.models.dag import DAG @@ -44,6 +45,12 @@ DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10] +@pytest.mark.skipif( + PY39, + reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl." + " This could be removed when https://github.com/dropbox/PyHive/issues/380" + " is solved", +) class TestHiveEnvironment(unittest.TestCase): def setUp(self): self.next_day = (DEFAULT_DATE + datetime.timedelta(days=1)).isoformat()[:10] @@ -58,6 +65,12 @@ def setUp(self): self.hook = HiveMetastoreHook() +@pytest.mark.skipif( + PY39, + reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl." + " This could be removed when https://github.com/dropbox/PyHive/issues/380" + " is solved", +) class TestHiveCliHook(unittest.TestCase): @mock.patch('tempfile.tempdir', '/tmp/') @mock.patch('tempfile._RandomNameSequence.__next__') @@ -324,6 +337,12 @@ def test_load_df_with_data_types(self, mock_run_cli): assert_equal_ignore_multiple_spaces(self, mock_run_cli.call_args_list[0][0][0], query) +@pytest.mark.skipif( + PY39, + reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl." + " This could be removed when https://github.com/dropbox/PyHive/issues/380" + " is solved", +) class TestHiveMetastoreHook(TestHiveEnvironment): VALID_FILTER_MAP = {'key2': 'value2'} @@ -549,6 +568,12 @@ def test_drop_partition(self, get_metastore_client_mock, table_exist_mock): assert metastore_mock.drop_partition(self.table, db=self.database, part_vals=[DEFAULT_DATE_DS]), ret +@pytest.mark.skipif( + PY39, + reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl." + " This could be removed when https://github.com/dropbox/PyHive/issues/380" + " is solved", +) class TestHiveServer2Hook(unittest.TestCase): def _upload_dataframe(self): df = pd.DataFrame({'a': [1, 2], 'b': [1, 2]}) @@ -797,6 +822,12 @@ def test_get_results_with_hive_conf(self): assert 'test_dag_run_id' in output +@pytest.mark.skipif( + PY39, + reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl." + " This could be removed when https://github.com/dropbox/PyHive/issues/380" + " is solved", +) class TestHiveCli(unittest.TestCase): def setUp(self): self.nondefault_schema = "nondefault" diff --git a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py index c1fddd23202ed..e85595fb768a9 100644 --- a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py +++ b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py @@ -20,6 +20,9 @@ import unittest from unittest.mock import MagicMock, patch +import pytest + +from airflow import PY39 from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator from airflow.utils import timezone from airflow.utils.operator_helpers import context_to_airflow_vars @@ -29,6 +32,12 @@ DEFAULT_DATE = timezone.datetime(2015, 1, 1) +@pytest.mark.skipif( + PY39, + reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl." + " This could be removed when https://github.com/dropbox/PyHive/issues/380" + " is solved", +) class TestHiveToMySqlTransfer(TestHiveEnvironment): def setUp(self): self.kwargs = dict( diff --git a/tests/providers/apache/hive/transfers/test_hive_to_samba.py b/tests/providers/apache/hive/transfers/test_hive_to_samba.py index 26c33292ca828..c2a7cdee88414 100644 --- a/tests/providers/apache/hive/transfers/test_hive_to_samba.py +++ b/tests/providers/apache/hive/transfers/test_hive_to_samba.py @@ -19,12 +19,21 @@ import unittest from unittest.mock import MagicMock, Mock, PropertyMock, patch +import pytest + +from airflow import PY39 from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator from airflow.utils.operator_helpers import context_to_airflow_vars from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment from tests.test_utils.mock_hooks import MockHiveServer2Hook, MockSambaHook +@pytest.mark.skipif( + PY39, + reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl." + " This could be removed when https://github.com/dropbox/PyHive/issues/380" + " is solved", +) class TestHive2SambaOperator(TestHiveEnvironment): def setUp(self): self.kwargs = dict( diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py index 99455dab0fb17..881ea137465e3 100644 --- a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py @@ -21,7 +21,9 @@ from collections import OrderedDict from unittest.mock import Mock, PropertyMock, patch -from airflow import PY38 +import pytest + +from airflow import PY38, PY39 if PY38: MsSqlToHiveTransferOperator: None = None @@ -34,8 +36,14 @@ pymssql = None -@unittest.skipIf(PY38, "Mssql package not available when Python >= 3.8.") -@unittest.skipIf(pymssql is None, 'pymssql package not present') +@pytest.mark.skipif( + PY39, + reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl." + " This could be removed when https://github.com/dropbox/PyHive/issues/380" + " is solved", +) +@pytest.mark.skipif(PY38, reason="Mssql package not available when Python >= 3.8.") +@pytest.mark.skipif(pymssql is None, reason='pymssql package not present') class TestMsSqlToHiveTransfer(unittest.TestCase): def setUp(self): self.kwargs = dict(sql='sql', hive_table='table', task_id='test_mssql_to_hive', dag=None) diff --git a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py index 1b7e2cdca6aa4..0bc59967ac1e1 100644 --- a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py @@ -24,6 +24,7 @@ import pytest +from airflow import PY39 from airflow.models.dag import DAG from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator from airflow.providers.mysql.hooks.mysql import MySqlHook @@ -58,6 +59,12 @@ def __eq__(self, other: str) -> bool: return tail.startswith("airflow_hiveop_") +@pytest.mark.skipif( + PY39, + reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl." + " This could be removed when https://github.com/dropbox/PyHive/issues/380" + " is solved", +) @pytest.mark.backend("mysql") class TestTransfer(unittest.TestCase): def setUp(self): diff --git a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py index 19d7d525e05b1..cf81a1a79e259 100644 --- a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py +++ b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py @@ -47,7 +47,7 @@ from .utilities import get_random_id -# pylint: disable=redefined-builtin, unused-argument +# pylint: disable=redefined-builtin,unused-argument class FakeElasticsearch(Elasticsearch): __documents_dict = None diff --git a/tests/sensors/test_base.py b/tests/sensors/test_base.py index b28fb032669dc..38556d96c7736 100644 --- a/tests/sensors/test_base.py +++ b/tests/sensors/test_base.py @@ -15,6 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# pylint: disable=no-member import unittest from datetime import timedelta diff --git a/tests/sensors/test_smart_sensor_operator.py b/tests/sensors/test_smart_sensor_operator.py index 9ea0c00f092e6..0e41896c94542 100644 --- a/tests/sensors/test_smart_sensor_operator.py +++ b/tests/sensors/test_smart_sensor_operator.py @@ -15,6 +15,8 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# pylint: disable=no-member + import datetime import logging