From 9c140cee0cf391822b54c6c02118d2cad97cb5b7 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 23 Jan 2022 22:39:42 +0100 Subject: [PATCH] Add Python 3.10 support Python 3.10 support has been long missing because a number of our dependencies had problems with it. It seems that last problems remaining should be fixed now, and we should be able to get a proper Python 3.10 support. Closes: #19059 --- LOCAL_VIRTUALENV.rst | 12 ++++++------ PULL_REQUEST_WORKFLOW.rst | 2 +- README.md | 18 +++++++++--------- airflow/__main__.py | 10 ---------- breeze | 2 +- dev/REFRESHING_CI_CACHE.md | 6 +++--- dev/TRACKING_BACKTRACKING_ISSUES.md | 2 +- dev/prepare_prod_docker_images.sh | 2 +- .../prepare_provider_packages.py | 5 +++++ dev/retag_docker_images.py | 2 +- scripts/ci/libraries/_initialization.sh | 2 +- scripts/ci/libraries/_parallel.sh | 2 +- scripts/ci/selective_ci_checks.sh | 2 +- setup.py | 8 ++------ .../google/cloud/hooks/test_dataflow.py | 4 +--- tests/utils/test_db_cleanup.py | 3 ++- 16 files changed, 36 insertions(+), 46 deletions(-) diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst index 925651abd64d1..ad0be26ce61b7 100644 --- a/LOCAL_VIRTUALENV.rst +++ b/LOCAL_VIRTUALENV.rst @@ -51,7 +51,7 @@ Required Software Packages Use system-level package managers like yum, apt-get for Linux, or Homebrew for macOS to install required software packages: -* Python (One of: 3.7, 3.8, 3.9) +* Python (One of: 3.7, 3.8, 3.9, 3.10) * MySQL 5.7+ * libxml @@ -102,7 +102,7 @@ Creating a Local virtualenv To use your IDE for Airflow development and testing, you need to configure a virtual environment. Ideally you should set up virtualenv for all Python versions that Airflow -supports (3.7, 3.8, 3.9). +supports (3.7, 3.8, 3.9, 3.10). To create and initialize the local virtualenv: @@ -122,7 +122,7 @@ To create and initialize the local virtualenv: .. code-block:: bash - conda create -n airflow python=3.7 # or 3.8, or 3.9 + conda create -n airflow python=3.7 # or 3.8, 3.9, 3.10 conda activate airflow 2. Install Python PIP requirements: @@ -150,7 +150,7 @@ for different python versions). For development on current main source: .. code-block:: bash - # use the same version of python as you are working with, 3.7, 3.8, or 3.9 + # use the same version of python as you are working with, 3.7, 3.8, 3.9, or 3.10 pip install -e ".[devel,]" \ --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" @@ -163,7 +163,7 @@ You can also install Airflow in non-editable mode: .. code-block:: bash - # use the same version of python as you are working with, 3.7, 3.8, or 3.9 + # use the same version of python as you are working with, 3.7, 3.8, 3.9, or 3.10 pip install ".[devel,]" \ --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" @@ -173,7 +173,7 @@ sources, unless you set ``INSTALL_PROVIDERS_FROM_SOURCES`` environment variable .. code-block:: bash - # use the same version of python as you are working with, 3.7, 3.8, or 3.9 + # use the same version of python as you are working with, 3.7, 3.8, 3.9, or 3.10 INSTALL_PROVIDERS_FROM_SOURCES="true" pip install ".[devel,]" \ --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" diff --git a/PULL_REQUEST_WORKFLOW.rst b/PULL_REQUEST_WORKFLOW.rst index e5802b24c41d1..7676586e11c48 100644 --- a/PULL_REQUEST_WORKFLOW.rst +++ b/PULL_REQUEST_WORKFLOW.rst @@ -57,7 +57,7 @@ We approached the problem by: 3) Even more optimisation came from limiting the scope of tests to only "default" matrix parameters. So far in Airflow we always run all tests for all matrix combinations. The primary matrix components are: - * Python versions (currently 3.6, 3.7, 3.8, 3.9) + * Python versions (currently 3.6, 3.7, 3.8, 3.9, 3.10) * Backend types (currently MySQL/Postgres) * Backed version (currently MySQL 5.7, MySQL 8, Postgres 13 diff --git a/README.md b/README.md index 5ec3d54f9218c..cbb326d859918 100644 --- a/README.md +++ b/README.md @@ -85,15 +85,15 @@ Airflow is not a streaming solution, but it is often used to process real-time d Apache Airflow is tested with: -| | Main version (dev) | Stable version (2.2.4) | -|---------------------|------------------------|--------------------------| -| Python | 3.7, 3.8, 3.9 | 3.6, 3.7, 3.8, 3.9 | -| Platform | AMD64/ARM64(\*) | AMD64 | -| Kubernetes | 1.20, 1.21, 1.22, 1.23 | 1.18, 1.19, 1.20 | -| PostgreSQL | 10, 11, 12, 13 | 9.6, 10, 11, 12, 13 | -| MySQL | 5.7, 8 | 5.7, 8 | -| SQLite | 3.15.0+ | 3.15.0+ | -| MSSQL | 2017(\*), 2019 (\*) | | +| | Main version (dev) | Stable version (2.2.4) | +|---------------------|-------------------------|--------------------------| +| Python | 3.7, 3.8, 3.9, 3.10 | 3.6, 3.7, 3.8, 3.9 | +| Platform | AMD64/ARM64(\*) | AMD64 | +| Kubernetes | 1.20, 1.21, 1.22, 1.23 | 1.18, 1.19, 1.20 | +| PostgreSQL | 10, 11, 12, 13 | 9.6, 10, 11, 12, 13 | +| MySQL | 5.7, 8 | 5.7, 8 | +| SQLite | 3.15.0+ | 3.15.0+ | +| MSSQL | 2017(\*), 2019 (\*) | | \* Experimental diff --git a/airflow/__main__.py b/airflow/__main__.py index 50f156a3acf29..334126b2d930b 100644 --- a/airflow/__main__.py +++ b/airflow/__main__.py @@ -20,14 +20,11 @@ """Main executable module""" import os -import warnings import argcomplete -from airflow import PY310 from airflow.cli import cli_parser from airflow.configuration import conf -from airflow.utils.docs import get_docs_url def main(): @@ -35,13 +32,6 @@ def main(): if conf.get("core", "security") == 'kerberos': os.environ['KRB5CCNAME'] = conf.get('kerberos', 'ccache') os.environ['KRB5_KTNAME'] = conf.get('kerberos', 'keytab') - if PY310: - docs_url = get_docs_url('installation/prerequisites.html') - warnings.warn( - "Python v3.10 is not official supported on this version of Airflow. Please be careful. " - f"For details, see: {docs_url}" - ) - parser = cli_parser.get_parser() argcomplete.autocomplete(parser) args = parser.parse_args() diff --git a/breeze b/breeze index 4b1dae501d80f..3f3d66707c3e9 100755 --- a/breeze +++ b/breeze @@ -3620,7 +3620,7 @@ function breeze::run_breeze_command() { # We have different versions of images depending on the python version used. We keep up with the # Latest patch-level changes in Python (this is done automatically during CI test runs) so we have # To only take into account MAJOR and MINOR version of python. This variable keeps the major/minor -# version of python in X.Y format (3.6, 3.7, 3.8, 3.9). +# version of python in X.Y format (3.7, 3.8, 3.9, 3.10). # # In Breeze the precedence of setting the version is as follows: # 1. --python flag (if set, it will explicitly override it in the next step) diff --git a/dev/REFRESHING_CI_CACHE.md b/dev/REFRESHING_CI_CACHE.md index 8d2209d6a141b..740af2fbb1820 100644 --- a/dev/REFRESHING_CI_CACHE.md +++ b/dev/REFRESHING_CI_CACHE.md @@ -48,7 +48,7 @@ manual refresh might be needed. # Manually generating constraint files ```bash -export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING="3.7 3.8 3.9" +export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING="3.7 3.8 3.9 3.10" for python_version in $(echo "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}") do ./breeze build-image --upgrade-to-newer-dependencies --python ${python_version} @@ -96,7 +96,7 @@ python version is a simple as running the [refresh_images.sh](refresh_images.sh) as parameter: ```bash -./dev/refresh_images.sh 3.9 +./dev/refresh_images.sh 3.10 ``` If you have fast network and powerful computer, you can refresh the images in parallel running the @@ -104,5 +104,5 @@ If you have fast network and powerful computer, you can refresh the images in pa or with gnu parallel: ```bash -parallel -j 4 --linebuffer --tagstring '{}' ./dev/refresh_images.sh ::: 3.7 3.8 3.9 +parallel -j 4 --linebuffer --tagstring '{}' ./dev/refresh_images.sh ::: 3.7 3.8 3.9 3.10 ``` diff --git a/dev/TRACKING_BACKTRACKING_ISSUES.md b/dev/TRACKING_BACKTRACKING_ISSUES.md index 763758cb7de93..a8d1fd3a2c9f0 100644 --- a/dev/TRACKING_BACKTRACKING_ISSUES.md +++ b/dev/TRACKING_BACKTRACKING_ISSUES.md @@ -211,7 +211,7 @@ Options: after if specified) --updated-on-or-after TEXT Date when the release was updated after - --python [3.7|3.8|3.9] Python version used + --python [3.7|3.8|3.9,3.10] Python version used --constraints-branch TEXT Constraint branch to use to find newer dependencies diff --git a/dev/prepare_prod_docker_images.sh b/dev/prepare_prod_docker_images.sh index 17dd1c7d98359..e0a0c65a6d254 100755 --- a/dev/prepare_prod_docker_images.sh +++ b/dev/prepare_prod_docker_images.sh @@ -20,7 +20,7 @@ export AIRFLOW_SOURCES_DIR set -e -CURRENT_PYTHON_MAJOR_MINOR_VERSIONS=("3.7" "3.8" "3.9") +CURRENT_PYTHON_MAJOR_MINOR_VERSIONS=("3.7" "3.8" "3.9" "3.10") usage() { local cmdname diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py index d12bd887704e9..9ea957fb197bc 100755 --- a/dev/provider_packages/prepare_provider_packages.py +++ b/dev/provider_packages/prepare_provider_packages.py @@ -2110,6 +2110,11 @@ def summarise_total_vs_bad_and_warnings(total: int, bad: int, warns: List[warnin 'You may encounter bugs or missing features.', "apache_beam", ), + ( + 'This version of Apache Beam has not been sufficiently tested on Python 3.10. ' + 'You may encounter bugs or missing features.', + "apache_beam", + ), ( "Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since" " Python 3.3,and in 3.9 it will stop working", diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py index bcb81c55223f8..765f6d8560ab8 100755 --- a/dev/retag_docker_images.py +++ b/dev/retag_docker_images.py @@ -31,7 +31,7 @@ import rich_click as click -PYTHON_VERSIONS = ["3.7", "3.8", "3.9"] +PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] GHCR_IO_PREFIX = "ghcr.io" diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh index 7087e04913f82..0f19a471a0e3c 100644 --- a/scripts/ci/libraries/_initialization.sh +++ b/scripts/ci/libraries/_initialization.sh @@ -121,7 +121,7 @@ function initialization::initialize_base_variables() { export ALL_PYTHON_MAJOR_MINOR_VERSIONS # Currently supported major/minor versions of python - CURRENT_PYTHON_MAJOR_MINOR_VERSIONS+=("3.7" "3.8" "3.9") + CURRENT_PYTHON_MAJOR_MINOR_VERSIONS+=("3.7" "3.8" "3.9" "3.10") export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS # Currently supported versions of Postgres diff --git a/scripts/ci/libraries/_parallel.sh b/scripts/ci/libraries/_parallel.sh index 3df223487648b..018491063ca0a 100644 --- a/scripts/ci/libraries/_parallel.sh +++ b/scripts/ci/libraries/_parallel.sh @@ -257,7 +257,7 @@ function parallel::get_maximum_parallel_k8s_jobs() { echo "${COLOR_YELLOW}Maximum parallel k8s jobs forced vi MAX_PARALLEL_K8S_JOBS = ${MAX_PARALLEL_K8S_JOBS}${COLOR_RESET}" echo else - MAX_PARALLEL_K8S_JOBS=${CPUS_AVAILABLE_FOR_DOCKER} + MAX_PARALLEL_K8S_JOBS=$((CPUS_AVAILABLE_FOR_DOCKER / 4)) echo echo "${COLOR_YELLOW}Maximum parallel k8s jobs set to number of CPUs available for Docker = ${MAX_PARALLEL_K8S_JOBS}${COLOR_RESET}" echo diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh index 410013c2f3060..0f2521cee94a7 100755 --- a/scripts/ci/selective_ci_checks.sh +++ b/scripts/ci/selective_ci_checks.sh @@ -127,7 +127,7 @@ function output_all_basic_variables() { if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then initialization::ga_output postgres-exclude '[{ "python-version": "3.7" }]' initialization::ga_output mssql-exclude '[{ "python-version": "3.8" }]' - initialization::ga_output mysql-exclude '[]' + initialization::ga_output mysql-exclude '[{ "python-version": "3.10" }]' initialization::ga_output sqlite-exclude '[{ "python-version": "3.9" }]' else initialization::ga_output postgres-exclude '[]' diff --git a/setup.py b/setup.py index c3ea0edb7ed69..1f6c0c9fd888f 100644 --- a/setup.py +++ b/setup.py @@ -964,12 +964,8 @@ def get_provider_package_from_package_id(package_id: str) -> str: def get_excluded_providers() -> List[str]: - """ - Returns packages excluded for the current python version. - Currently the only excluded provider is apache hive for Python 3.9. - Until https://github.com/dropbox/PyHive/issues/380 is fixed. - """ - return ['apache.hive'] if PY39 else [] + """Returns packages excluded for the current python version.""" + return [] def get_all_provider_packages() -> str: diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py b/tests/providers/google/cloud/hooks/test_dataflow.py index f22e18f4512df..98aeebe2fb1e4 100644 --- a/tests/providers/google/cloud/hooks/test_dataflow.py +++ b/tests/providers/google/cloud/hooks/test_dataflow.py @@ -1045,9 +1045,7 @@ def test_start_flex_template(self, mock_conn, mock_controller): cancel_timeout=DEFAULT_CANCEL_TIMEOUT, wait_until_finished=self.dataflow_hook.wait_until_finished, ) - mock_controller.return_value.get_jobs.wait_for_done.assrt_called_once_with() - mock_controller.return_value.get_jobs.assrt_called_once_with() - + mock_controller.return_value.get_jobs.assert_called_once_with(refresh=True) assert result == {"id": TEST_JOB_ID} @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) diff --git a/tests/utils/test_db_cleanup.py b/tests/utils/test_db_cleanup.py index 8ef80eb86bc85..290d7d75723db 100644 --- a/tests/utils/test_db_cleanup.py +++ b/tests/utils/test_db_cleanup.py @@ -210,7 +210,8 @@ def test_no_models_missing(self): proj_root = Path(__file__).parent.parent.parent mods = list( - f"airflow.models.{name}" for _, name, _ in pkgutil.iter_modules([proj_root / 'airflow/models']) + f"airflow.models.{name}" + for _, name, _ in pkgutil.iter_modules([str(proj_root / 'airflow/models')]) ) all_models = {}