diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 00c65e99767..0ad7e5f3e13 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -4,7 +4,5 @@ contact_links: url: https://github.com/pydata/xarray/discussions about: | Ask questions and discuss with other community members here. - If you have a question like "How do I concatenate a list of datasets?" then + If you have a question like "How do I concatenate a list of datasets?" then please include a self-contained reproducible example if possible. - - \ No newline at end of file diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 03e046512fb..74f3fe2430f 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -2,9 +2,14 @@ - [ ] Closes #xxxx - [ ] Tests added -- [ ] Passes `isort . && black . && mypy . && flake8` +- [ ] Passes `pre-commit run --all-files` - [ ] User visible changes (including notable bug fixes) are documented in `whats-new.rst` - [ ] New functions/methods are listed in `api.rst` -By default, the upstream dev CI is disabled on pull request and push events. You can override this behavior per commit by adding a `[test-upstream]` tag to the first line of the commit message. + +

+ Overriding CI behaviors +

+ By default, the upstream dev CI is disabled on pull request and push events. You can override this behavior per commit by adding a `[test-upstream]` tag to the first line of the commit message. For documentation-only commits, you can skip the CI per commit by adding a `[skip-ci]` tag to the first line of the commit message +
diff --git a/.github/stale.yml b/.github/stale.yml index f4835b5eeec..f4057844d01 100644 --- a/.github/stale.yml +++ b/.github/stale.yml @@ -56,4 +56,4 @@ limitPerRun: 1 # start with a small number # issues: # exemptLabels: -# - confirmed \ No newline at end of file +# - confirmed diff --git a/.github/workflows/ci-additional.yaml b/.github/workflows/ci-additional.yaml new file mode 100644 index 00000000000..1e19f594853 --- /dev/null +++ b/.github/workflows/ci-additional.yaml @@ -0,0 +1,188 @@ +name: CI Additional +on: + push: + branches: + - "*" + pull_request: + branches: + - "*" + workflow_dispatch: # allows you to trigger manually + +jobs: + detect-ci-trigger: + name: detect ci trigger + runs-on: ubuntu-latest + if: github.event_name == 'push' || github.event_name == 'pull_request' + outputs: + triggered: ${{ steps.detect-trigger.outputs.trigger-found }} + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/detect-ci-trigger + id: detect-trigger + with: + keyword: "[skip-ci]" + + test: + name: ${{ matrix.os }} ${{ matrix.env }} + runs-on: ${{ matrix.os }} + needs: detect-ci-trigger + if: needs.detect-ci-trigger.outputs.triggered == 'false' + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest"] + env: + [ + "py37-bare-minimum", + "py37-min-all-deps", + "py37-min-nep18", + "py38-all-but-dask", + "py38-backend-api-v2", + "py38-flaky", + ] + steps: + - name: Cancel previous runs + uses: styfle/cancel-workflow-action@0.6.0 + with: + access_token: ${{ github.token }} + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # Fetch all history for all branches and tags. + + - name: Set environment variables + run: | + if [[ ${{ matrix.env }} == "py38-backend-api-v2" ]] ; + then + echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV + echo "XARRAY_BACKEND_API=v2" >> $GITHUB_ENV + + elif [[ ${{ matrix.env }} == "py38-flaky" ]] ; + then + echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV + echo "PYTEST_EXTRA_FLAGS=--run-flaky --run-network-tests" >> $GITHUB_ENV + + else + echo "CONDA_ENV_FILE=ci/requirements/${{ matrix.env }}.yml" >> $GITHUB_ENV + fi + - name: Cache conda + uses: actions/cache@v2 + with: + path: ~/conda_pkgs_dir + key: + ${{ runner.os }}-conda-${{ matrix.env }}-${{ + hashFiles('ci/requirements/**.yml') }} + + - uses: conda-incubator/setup-miniconda@v2 + with: + channels: conda-forge + channel-priority: strict + mamba-version: "*" + activate-environment: xarray-tests + auto-update-conda: false + python-version: 3.8 + use-only-tar-bz2: true + + - name: Install conda dependencies + run: | + mamba env update -f $CONDA_ENV_FILE + + - name: Install xarray + run: | + python -m pip install --no-deps -e . + + - name: Version info + run: | + conda info -a + conda list + python xarray/util/print_versions.py + - name: Import xarray + run: | + python -c "import xarray" + - name: Run tests + run: | + python -m pytest -n 4 \ + --cov=xarray \ + --cov-report=xml \ + $PYTEST_EXTRA_FLAGS + + - name: Upload code coverage to Codecov + uses: codecov/codecov-action@v1 + with: + file: ./coverage.xml + flags: unittests,${{ matrix.env }} + env_vars: RUNNER_OS + name: codecov-umbrella + fail_ci_if_error: false + doctest: + name: Doctests + runs-on: "ubuntu-latest" + needs: detect-ci-trigger + if: needs.detect-ci-trigger.outputs.triggered == 'false' + defaults: + run: + shell: bash -l {0} + + steps: + - name: Cancel previous runs + uses: styfle/cancel-workflow-action@0.6.0 + with: + access_token: ${{ github.token }} + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # Fetch all history for all branches and tags. + - uses: conda-incubator/setup-miniconda@v2 + with: + channels: conda-forge + channel-priority: strict + mamba-version: "*" + activate-environment: xarray-tests + auto-update-conda: false + python-version: "3.8" + + - name: Install conda dependencies + run: | + mamba env update -f ci/requirements/environment.yml + - name: Install xarray + run: | + python -m pip install --no-deps -e . + - name: Version info + run: | + conda info -a + conda list + python xarray/util/print_versions.py + - name: Run doctests + run: | + python -m pytest --doctest-modules xarray --ignore xarray/tests + + min-version-policy: + name: Minimum Version Policy + runs-on: "ubuntu-latest" + needs: detect-ci-trigger + if: needs.detect-ci-trigger.outputs.triggered == 'false' + defaults: + run: + shell: bash -l {0} + + steps: + - name: Cancel previous runs + uses: styfle/cancel-workflow-action@0.6.0 + with: + access_token: ${{ github.token }} + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # Fetch all history for all branches and tags. + - uses: conda-incubator/setup-miniconda@v2 + with: + channels: conda-forge + channel-priority: strict + mamba-version: "*" + auto-update-conda: false + + - name: minimum versions policy + run: | + mamba install -y pyyaml + python ci/min_deps_check.py ci/requirements/py37-bare-minimum.yml + python ci/min_deps_check.py ci/requirements/py37-min-all-deps.yml diff --git a/.github/workflows/ci-pre-commit.yml b/.github/workflows/ci-pre-commit.yml new file mode 100644 index 00000000000..1ab5642367e --- /dev/null +++ b/.github/workflows/ci-pre-commit.yml @@ -0,0 +1,16 @@ +name: linting + +on: + push: + branches: "*" + pull_request: + branches: "*" + +jobs: + linting: + name: "pre-commit hooks" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: pre-commit/action@v2.0.0 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 00000000000..849d8a445a5 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,102 @@ +name: CI +on: + push: + branches: + - "*" + pull_request: + branches: + - "*" + workflow_dispatch: # allows you to trigger manually + +jobs: + detect-ci-trigger: + name: detect ci trigger + runs-on: ubuntu-latest + if: github.event_name == 'push' || github.event_name == 'pull_request' + outputs: + triggered: ${{ steps.detect-trigger.outputs.trigger-found }} + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/detect-ci-trigger + id: detect-trigger + with: + keyword: "[skip-ci]" + test: + name: ${{ matrix.os }} py${{ matrix.python-version }} + runs-on: ${{ matrix.os }} + needs: detect-ci-trigger + if: needs.detect-ci-trigger.outputs.triggered == 'false' + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest", "macos-latest", "windows-latest"] + python-version: ["3.7", "3.8"] + steps: + - name: Cancel previous runs + uses: styfle/cancel-workflow-action@0.6.0 + with: + access_token: ${{ github.token }} + - uses: actions/checkout@v2 + with: + fetch-depth: 0 # Fetch all history for all branches and tags. + - name: Set environment variables + run: | + if [[ ${{ matrix.os }} == windows* ]] ; + then + echo "CONDA_ENV_FILE=ci/requirements/environment-windows.yml" >> $GITHUB_ENV + else + echo "CONDA_ENV_FILE=ci/requirements/environment.yml" >> $GITHUB_ENV + + fi + echo "PYTHON_VERSION=${{ matrix.python-version }}" >> $GITHUB_ENV + + - name: Cache conda + uses: actions/cache@v2 + with: + path: ~/conda_pkgs_dir + key: + ${{ runner.os }}-conda-py${{ matrix.python-version }}-${{ + hashFiles('ci/requirements/**.yml') }} + - uses: conda-incubator/setup-miniconda@v2 + with: + channels: conda-forge + channel-priority: strict + mamba-version: "*" + activate-environment: xarray-tests + auto-update-conda: false + python-version: ${{ matrix.python-version }} + use-only-tar-bz2: true + + - name: Install conda dependencies + run: | + mamba env update -f $CONDA_ENV_FILE + + - name: Install xarray + run: | + python -m pip install --no-deps -e . + + - name: Version info + run: | + conda info -a + conda list + python xarray/util/print_versions.py + - name: Import xarray + run: | + python -c "import xarray" + - name: Run tests + run: | + python -m pytest -n 4 \ + --cov=xarray \ + --cov-report=xml + + - name: Upload code coverage to Codecov + uses: codecov/codecov-action@v1 + with: + file: ./coverage.xml + flags: unittests + env_vars: RUNNER_OS,PYTHON_VERSION + name: codecov-umbrella + fail_ci_if_error: false diff --git a/.github/workflows/upstream-dev-ci.yaml b/.github/workflows/upstream-dev-ci.yaml index cb5e1272d91..bb325c1837e 100644 --- a/.github/workflows/upstream-dev-ci.yaml +++ b/.github/workflows/upstream-dev-ci.yaml @@ -1,4 +1,4 @@ -name: CI +name: CI Upstream on: push: branches: @@ -23,7 +23,7 @@ jobs: id: detect-trigger with: keyword: "[test-upstream]" - + upstream-dev: name: upstream-dev runs-on: ubuntu-latest @@ -60,7 +60,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Set up conda environment run: | - mamba env update -f ci/requirements/py38.yml + mamba env update -f ci/requirements/environment.yml bash ci/install-upstream-wheels.sh conda list - name: import xarray @@ -148,8 +148,8 @@ jobs: } const result = await github.graphql(query, variables) - // If no issue is open, create a new issue, - // else update the body of the existing issue. + // If no issue is open, create a new issue, + // else update the body of the existing issue. if (result.repository.issues.edges.length === 0) { github.issues.create({ owner: variables.owner, diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6659366538b..b0fa21a7bf9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,11 @@ # https://pre-commit.com/ repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml # isort should run before black as black sometimes tweaks the isort output - repo: https://github.com/PyCQA/isort rev: 5.7.0 @@ -22,6 +28,7 @@ repos: rev: v0.790 # Must match ci/requirements/*.yml hooks: - id: mypy + exclude: "properties|asv_bench" # run this occasionally, ref discussion https://github.com/pydata/xarray/pull/3194 # - repo: https://github.com/asottile/pyupgrade # rev: v1.22.1 diff --git a/README.rst b/README.rst index 0b3b66c468d..e258a8ccd23 100644 --- a/README.rst +++ b/README.rst @@ -1,8 +1,8 @@ xarray: N-D labeled arrays and datasets ======================================= -.. image:: https://dev.azure.com/xarray/xarray/_apis/build/status/pydata.xarray?branchName=master - :target: https://dev.azure.com/xarray/xarray/_build/latest?definitionId=1&branchName=master +.. image:: https://github.com/pydata/xarray/workflows/CI/badge.svg?branch=master + :target: https://github.com/pydata/xarray/actions?query=workflow%3ACI .. image:: https://codecov.io/gh/pydata/xarray/branch/master/graph/badge.svg :target: https://codecov.io/gh/pydata/xarray .. image:: https://readthedocs.org/projects/xray/badge/?version=latest diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index f9bfa032a98..00000000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,124 +0,0 @@ -variables: - pytest_extra_flags: '' - allow_failure: false - upstream_dev: false - -jobs: - -- job: Linux - strategy: - matrix: - py37-bare-minimum: - conda_env: py37-bare-minimum - py37-min-all-deps: - conda_env: py37-min-all-deps - py37-min-nep18: - conda_env: py37-min-nep18 - py37: - conda_env: py37 - py38: - conda_env: py38 - py38-backend-api-v2: - conda_env: py38 - environment_variables: XARRAY_BACKEND_API=v2 - py38-all-but-dask: - conda_env: py38-all-but-dask - py38-upstream-dev: - conda_env: py38 - upstream_dev: true - allow_failure: true - py38-flaky: - conda_env: py38 - pytest_extra_flags: --run-flaky --run-network-tests - allow_failure: true - pool: - vmImage: 'ubuntu-16.04' - steps: - - template: ci/azure/unit-tests.yml - -- job: MacOSX - strategy: - matrix: - py38: - conda_env: py38 - pool: - vmImage: 'macOS-10.15' - steps: - - template: ci/azure/unit-tests.yml - -- job: Windows - strategy: - matrix: - py37: - conda_env: py37-windows - pool: - vmImage: 'windows-2019' - steps: - - template: ci/azure/unit-tests.yml - -- job: LintFlake8 - pool: - vmImage: 'ubuntu-16.04' - steps: - - task: UsePythonVersion@0 - - bash: python -m pip install flake8 - displayName: Install flake8 - - bash: flake8 - displayName: flake8 lint checks - -- job: FormattingBlack - pool: - vmImage: 'ubuntu-16.04' - steps: - - task: UsePythonVersion@0 - - bash: python -m pip install black - displayName: Install black - - bash: black --diff --check . - displayName: black formatting check - -- job: Doctests - variables: - conda_env: py38 - pool: - vmImage: 'ubuntu-16.04' - steps: - - template: ci/azure/install.yml - - bash: | - source activate xarray-tests - python -m pytest --doctest-modules xarray --ignore xarray/tests - displayName: Run doctests - -- job: TypeChecking - variables: - conda_env: py38 - pool: - vmImage: 'ubuntu-16.04' - steps: - - template: ci/azure/install.yml - - bash: | - source activate xarray-tests - mypy . - displayName: mypy type checks - -- job: isort - variables: - conda_env: py38 - pool: - vmImage: 'ubuntu-16.04' - steps: - - template: ci/azure/install.yml - - bash: | - source activate xarray-tests - isort --check . - displayName: isort formatting checks - -- job: MinimumVersionsPolicy - pool: - vmImage: 'ubuntu-16.04' - steps: - - template: ci/azure/add-conda-to-path.yml - - bash: | - conda install -y pyyaml - python ci/min_deps_check.py ci/requirements/py37-bare-minimum.yml - python ci/min_deps_check.py ci/requirements/py37-min-all-deps.yml - displayName: minimum versions policy diff --git a/ci/azure/add-conda-to-path.yml b/ci/azure/add-conda-to-path.yml deleted file mode 100644 index e5173835388..00000000000 --- a/ci/azure/add-conda-to-path.yml +++ /dev/null @@ -1,18 +0,0 @@ -# https://docs.microsoft.com/en-us/azure/devops/pipelines/languages/anaconda -steps: - -- bash: | - echo "##vso[task.prependpath]$CONDA/bin" - displayName: Add conda to PATH (Linux) - condition: eq(variables['Agent.OS'], 'Linux') - -- bash: | - echo "##vso[task.prependpath]$CONDA/bin" - sudo chown -R $USER $CONDA - displayName: Add conda to PATH (OS X) - condition: eq(variables['Agent.OS'], 'Darwin') - -- powershell: | - Write-Host "##vso[task.prependpath]$env:CONDA\Scripts" - displayName: Add conda to PATH (Windows) - condition: eq(variables['Agent.OS'], 'Windows_NT') diff --git a/ci/azure/install.yml b/ci/azure/install.yml deleted file mode 100644 index 2b080287669..00000000000 --- a/ci/azure/install.yml +++ /dev/null @@ -1,68 +0,0 @@ -parameters: - env_file: ci/requirements/$CONDA_ENV.yml - -steps: - -- template: add-conda-to-path.yml - -- bash: | - conda install -y mamba -c conda-forge - source activate # required for mamba on windows - mamba env create -n xarray-tests --file ${{ parameters.env_file }} - displayName: Install conda dependencies - -# TODO: add sparse back in, once Numba works with the development version of -# NumPy again: https://github.com/pydata/xarray/issues/4146 -- bash: | - source activate xarray-tests - conda uninstall -y --force \ - numpy \ - scipy \ - pandas \ - matplotlib \ - dask \ - distributed \ - zarr \ - cftime \ - rasterio \ - pint \ - bottleneck \ - sparse - python -m pip install \ - -i https://pypi.anaconda.org/scipy-wheels-nightly/simple \ - --no-deps \ - --pre \ - --upgrade \ - numpy \ - scipy \ - pandas - python -m pip install \ - -f https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com \ - --no-deps \ - --pre \ - --upgrade \ - matplotlib - python -m pip install \ - --no-deps \ - --upgrade \ - git+https://github.com/dask/dask \ - git+https://github.com/dask/distributed \ - git+https://github.com/zarr-developers/zarr \ - git+https://github.com/Unidata/cftime \ - git+https://github.com/mapbox/rasterio \ - git+https://github.com/hgrecco/pint \ - git+https://github.com/pydata/bottleneck - condition: eq(variables['UPSTREAM_DEV'], 'true') - displayName: Install upstream dev dependencies - -- bash: | - source activate xarray-tests - python -m pip install --no-deps -e . - displayName: Install xarray - -- bash: | - source activate xarray-tests - conda info -a - conda list - python xarray/util/print_versions.py - displayName: Version info diff --git a/ci/azure/unit-tests.yml b/ci/azure/unit-tests.yml deleted file mode 100644 index 0fb2d53cc36..00000000000 --- a/ci/azure/unit-tests.yml +++ /dev/null @@ -1,39 +0,0 @@ -steps: - -- template: install.yml - -- bash: | - source activate xarray-tests - python -OO -c "import xarray" - displayName: Import xarray - -# Work around for allowed test failures: -# https://github.com/microsoft/azure-pipelines-tasks/issues/9302 -- bash: | - source activate xarray-tests - $(environment_variables) pytest -n 4 \ - --junitxml=junit/test-results.xml \ - --cov=xarray \ - --cov-report=xml \ - $(pytest_extra_flags) \ - || ( \ - [ "$ALLOW_FAILURE" = "true" ] \ - && echo "##vso[task.logissue type=warning]Allowed CI failure!!" \ - && echo "##vso[task.complete result=SucceededWithIssues]" \ - ) - displayName: Run tests - -- bash: | - curl https://codecov.io/bash > codecov.sh - bash codecov.sh -t 688f4d53-31bb-49b5-8370-4ce6f792cf3d - displayName: Upload coverage to codecov.io - -# TODO: publish coverage results to Azure, once we can merge them across -# multiple jobs: https://stackoverflow.com/questions/56776185 - -- task: PublishTestResults@2 - condition: succeededOrFailed() - inputs: - testResultsFiles: '**/test-*.xml' - failTaskOnFailedTests: false - testRunTitle: '$(Agent.JobName)' diff --git a/ci/min_deps_check.py b/ci/min_deps_check.py index 5bb4c155410..3ffab645e8e 100755 --- a/ci/min_deps_check.py +++ b/ci/min_deps_check.py @@ -1,5 +1,5 @@ """Fetch from conda database all available versions of the xarray dependencies and their -publication date. Compare it against requirements/py36-min-all-deps.yml to verify the +publication date. Compare it against requirements/py37-min-all-deps.yml to verify the policy on obsolete dependencies is being followed. Print a pretty report :) """ import itertools @@ -56,7 +56,7 @@ def warning(msg: str) -> None: def parse_requirements(fname) -> Iterator[Tuple[str, int, int, Optional[int]]]: - """Load requirements/py36-min-all-deps.yml + """Load requirements/py37-min-all-deps.yml Yield (package name, major version, minor version, [patch version]) """ diff --git a/ci/requirements/py37-windows.yml b/ci/requirements/environment-windows.yml similarity index 77% rename from ci/requirements/py37-windows.yml rename to ci/requirements/environment-windows.yml index 6dd8be768b1..6de2bc8dc64 100644 --- a/ci/requirements/py37-windows.yml +++ b/ci/requirements/environment-windows.yml @@ -2,27 +2,21 @@ name: xarray-tests channels: - conda-forge dependencies: - - python=3.7 - - black - boto3 - bottleneck - cartopy # - cdms2 # Not available on Windows - # - cfgrib # Causes Python interpreter crash on Windows + # - cfgrib # Causes Python interpreter crash on Windows: https://github.com/pydata/xarray/pull/3340 - cftime - - coveralls - dask - distributed - - flake8 - h5netcdf - h5py=2 - hdf5 - hypothesis - iris - - isort - lxml # Optional dep of pydap - matplotlib-base - - mypy=0.790 # Must match .pre-commit-config.yaml - nc-time-axis - netcdf4 - numba @@ -30,6 +24,7 @@ dependencies: - pandas - pint - pip + - pre-commit - pseudonetcdf - pydap # - pynio # Not available on Windows diff --git a/ci/requirements/py38.yml b/ci/requirements/environment.yml similarity index 82% rename from ci/requirements/py38.yml rename to ci/requirements/environment.yml index 7e31216c285..0f59d9570c8 100644 --- a/ci/requirements/py38.yml +++ b/ci/requirements/environment.yml @@ -3,27 +3,21 @@ channels: - conda-forge - nodefaults dependencies: - - python=3.8 - - black - boto3 - bottleneck - cartopy - cdms2 - cfgrib - cftime - - coveralls - dask - distributed - - flake8 - h5netcdf - h5py=2 - hdf5 - hypothesis - iris - - isort - lxml # Optional dep of pydap - matplotlib-base - - mypy=0.790 # Must match .pre-commit-config.yaml - nc-time-axis - netcdf4 - numba @@ -31,9 +25,10 @@ dependencies: - pandas - pint - pip=20.2 + - pre-commit - pseudonetcdf - pydap - # - pynio: not compatible with netCDF4>1.5.3; only tested in py36-bare-minimum + # - pynio: not compatible with netCDF4>1.5.3; only tested in py37-bare-minimum - pytest - pytest-cov - pytest-env diff --git a/ci/requirements/py37.yml b/ci/requirements/py37.yml deleted file mode 100644 index 34d8b3a3fc2..00000000000 --- a/ci/requirements/py37.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: xarray-tests -channels: - - conda-forge - - nodefaults -dependencies: - - python=3.7 - - black - - boto3 - - bottleneck - - cartopy - - cdms2 - - cfgrib - - cftime - - coveralls - - dask - - distributed - - flake8 - - h5netcdf - - h5py=2 - - hdf5 - - hypothesis - - iris - - isort - - lxml # Optional dep of pydap - - matplotlib-base - - mypy=0.790 # Must match .pre-commit-config.yaml - - nc-time-axis - - netcdf4 - - numba - - numpy - - pandas - - pint - - pip - - pseudonetcdf - - pydap - # - pynio: not compatible with netCDF4>1.5.3; only tested in py36-bare-minimum - - pytest - - pytest-cov - - pytest-env - - pytest-xdist - - rasterio - - scipy - - seaborn - - setuptools - - sparse - - toolz - - zarr - - pip: - - numbagg diff --git a/ci/requirements/py38-all-but-dask.yml b/ci/requirements/py38-all-but-dask.yml index 5eb8fa39cef..14930f5272d 100644 --- a/ci/requirements/py38-all-but-dask.yml +++ b/ci/requirements/py38-all-but-dask.yml @@ -30,7 +30,7 @@ dependencies: - pip - pseudonetcdf - pydap - # - pynio: not compatible with netCDF4>1.5.3; only tested in py36-bare-minimum + # - pynio: not compatible with netCDF4>1.5.3; only tested in py37-bare-minimum - pytest - pytest-cov - pytest-env diff --git a/doc/_templates/autosummary/base.rst b/doc/_templates/autosummary/base.rst index e6726a71d7c..53f2a29c193 100644 --- a/doc/_templates/autosummary/base.rst +++ b/doc/_templates/autosummary/base.rst @@ -1,3 +1,3 @@ :github_url: {{ fullname | github_url | escape_underscores }} -{% extends "!autosummary/base.rst" %} \ No newline at end of file +{% extends "!autosummary/base.rst" %} diff --git a/doc/contributing.rst b/doc/contributing.rst index 39863ec96c7..9c4ce5a0af2 100644 --- a/doc/contributing.rst +++ b/doc/contributing.rst @@ -152,8 +152,13 @@ We'll now kick off a two-step process: .. code-block:: sh # Create and activate the build environment - # This is for Linux and MacOS. On Windows, use py37-windows.yml instead. - conda env create -f ci/requirements/py37.yml + conda create -c conda-forge -n xarray-tests python=3.8 + + # This is for Linux and MacOS + conda env update -f ci/requirements/environment.yml + + # On windows, use environment-windows.yml instead + conda env update -f ci/requirements/environment-windows.yml conda activate xarray-tests @@ -363,28 +368,9 @@ xarray uses several tools to ensure a consistent code format throughout the proj - `mypy `_ for static type checking on `type hints `_ -``pip``:: - - pip install black flake8 isort mypy blackdoc - -and then run from the root of the xarray repository:: - - isort . - black -t py36 . - blackdoc -t py36 . - flake8 - mypy . - -to auto-format your code. Additionally, many editors have plugins that will -apply ``black`` as you edit files. - -Optionally, you may wish to setup `pre-commit hooks `_ +We highly recommend that you setup `pre-commit hooks `_ to automatically run all the above tools every time you make a git commit. This -can be done by installing ``pre-commit``:: - - pip install pre-commit - -and then running:: +can be done by running:: pre-commit install @@ -406,12 +392,8 @@ Testing With Continuous Integration ----------------------------------- The *xarray* test suite runs automatically the -`Azure Pipelines `__, -continuous integration service, once your pull request is submitted. However, -if you wish to run the test suite on a branch prior to submitting the pull -request, then Azure Pipelines -`needs to be configured `_ -for your GitHub repository. +`GitHub Actions `__, +continuous integration service, once your pull request is submitted. A pull-request will be considered for merging when you have an all 'green' build. If any tests are failing, then you will get a red 'X', where you can click through to see the @@ -857,8 +839,7 @@ PR checklist - **Properly format your code** and verify that it passes the formatting guidelines set by `Black `_ and `Flake8 `_. See `"Code formatting" `_. You can use `pre-commit `_ to run these automatically on each commit. - - Run ``black .`` in the root directory. This may modify some files. Confirm and commit any formatting changes. - - Run ``flake8`` in the root directory. If this fails, it will log an error message. + - Run ``pre-commit run --all-files`` in the root directory. This may modify some files. Confirm and commit any formatting changes. - **Push your code and** `create a PR on GitHub `_. - **Use a helpful title for your pull request** by summarizing the main contributions rather than using the latest commit message. If the PR addresses an `issue `_, please `reference it `_. diff --git a/doc/gallery/README.txt b/doc/gallery/README.txt index b17f803696b..63f7d477cf4 100644 --- a/doc/gallery/README.txt +++ b/doc/gallery/README.txt @@ -2,4 +2,3 @@ Gallery ======= - diff --git a/doc/groupby.rst b/doc/groupby.rst index c72a26c45ea..d0c0b1849f9 100644 --- a/doc/groupby.rst +++ b/doc/groupby.rst @@ -124,7 +124,7 @@ dimensions *other than* the provided one: .. note:: We use an ellipsis (`...`) here to indicate we want to reduce over all - other dimensions + other dimensions First and last @@ -220,10 +220,10 @@ may be desirable: da.groupby_bins("lon", [0, 45, 50]).sum() These methods group by `lon` values. It is also possible to groupby each -cell in a grid, regardless of value, by stacking multiple dimensions, +cell in a grid, regardless of value, by stacking multiple dimensions, applying your function, and then unstacking the result: .. ipython:: python stacked = da.stack(gridcell=["ny", "nx"]) - stacked.groupby("gridcell").sum(...).unstack("gridcell") \ No newline at end of file + stacked.groupby("gridcell").sum(...).unstack("gridcell") diff --git a/doc/howdoi.rst b/doc/howdoi.rst index 84c0c786027..3604d66bd0c 100644 --- a/doc/howdoi.rst +++ b/doc/howdoi.rst @@ -59,4 +59,3 @@ How do I ... - ``obj.dt.ceil``, ``obj.dt.floor``, ``obj.dt.round``. See :ref:`dt_accessor` for more. * - make a mask that is ``True`` where an object contains any of the values in a array - :py:meth:`Dataset.isin`, :py:meth:`DataArray.isin` - diff --git a/doc/indexing.rst b/doc/indexing.rst index 58064582354..78766b8fd81 100644 --- a/doc/indexing.rst +++ b/doc/indexing.rst @@ -548,7 +548,7 @@ you can supply a :py:class:`~xarray.DataArray` with a coordinate, x=xr.DataArray([0, 1, 6], dims="z", coords={"z": ["a", "b", "c"]}), y=xr.DataArray([0, 1, 0], dims="z"), ) - + Analogously, label-based pointwise-indexing is also possible by the ``.sel`` method: diff --git a/doc/installing.rst b/doc/installing.rst index d5783e557e1..99b8b621aed 100644 --- a/doc/installing.rst +++ b/doc/installing.rst @@ -118,9 +118,9 @@ at the moment of publishing the xarray release is guaranteed to work. You can see the actual minimum tested versions: - `For NEP-18 libraries - `_ + `_ - `For everything else - `_ + `_ .. _installation-instructions: diff --git a/doc/internals.rst b/doc/internals.rst index 177cc60ac0a..60d32128c60 100644 --- a/doc/internals.rst +++ b/doc/internals.rst @@ -230,4 +230,4 @@ re-open it directly with Zarr: zgroup = zarr.open("rasm.zarr") print(zgroup.tree()) - dict(zgroup["Tair"].attrs) \ No newline at end of file + dict(zgroup["Tair"].attrs) diff --git a/doc/interpolation.rst b/doc/interpolation.rst index c2922813e15..9a3b7a7ee2d 100644 --- a/doc/interpolation.rst +++ b/doc/interpolation.rst @@ -68,7 +68,7 @@ by specifying the time periods required. Interpolation of data indexed by a :py:class:`~xarray.CFTimeIndex` is also allowed. See :ref:`CFTimeIndex` for examples. - + .. note:: Currently, our interpolation only works for regular grids. @@ -317,4 +317,4 @@ The remapping can be done as follows dsi = ds.interp(lon=lon, lat=lat) dsi.air.plot(ax=axes[1]) @savefig interpolation_sample4.png width=8in - axes[1].set_title("Remapped data") \ No newline at end of file + axes[1].set_title("Remapped data") diff --git a/doc/io.rst b/doc/io.rst index e4fa57689a6..2e46879929b 100644 --- a/doc/io.rst +++ b/doc/io.rst @@ -1107,7 +1107,7 @@ We recommend installing PyNIO via conda:: conda install -c conda-forge pynio .. note:: - + PyNIO is no longer actively maintained and conflicts with netcdf4 > 1.5.3. The PyNIO backend may be moved outside of xarray in the future. @@ -1152,4 +1152,4 @@ Third party libraries More formats are supported by extension libraries: -- `xarray-mongodb `_: Store xarray objects on MongoDB \ No newline at end of file +- `xarray-mongodb `_: Store xarray objects on MongoDB diff --git a/doc/plotting.rst b/doc/plotting.rst index dde48d47569..3699f794ae8 100644 --- a/doc/plotting.rst +++ b/doc/plotting.rst @@ -955,4 +955,4 @@ One can also make line plots with multidimensional coordinates. In this case, `` f, ax = plt.subplots(2, 1) da.plot.line(x="lon", hue="y", ax=ax[0]) @savefig plotting_example_2d_hue_xy.png - da.plot.line(x="lon", hue="x", ax=ax[1]) \ No newline at end of file + da.plot.line(x="lon", hue="x", ax=ax[1]) diff --git a/doc/quick-overview.rst b/doc/quick-overview.rst index e3d1456f017..1a2bc809550 100644 --- a/doc/quick-overview.rst +++ b/doc/quick-overview.rst @@ -153,7 +153,7 @@ Visualizing your datasets is quick and convenient: data.plot() Note the automatic labeling with names and units. Our effort in adding metadata attributes has paid off! Many aspects of these figures are customizable: see :ref:`plotting`. - + pandas ------ diff --git a/doc/reshaping.rst b/doc/reshaping.rst index d07929b3b8f..81fd4a6d35e 100644 --- a/doc/reshaping.rst +++ b/doc/reshaping.rst @@ -19,7 +19,7 @@ Reordering dimensions --------------------- To reorder dimensions on a :py:class:`~xarray.DataArray` or across all variables -on a :py:class:`~xarray.Dataset`, use :py:meth:`~xarray.DataArray.transpose`. An +on a :py:class:`~xarray.Dataset`, use :py:meth:`~xarray.DataArray.transpose`. An ellipsis (`...`) can be use to represent all other dimensions: .. ipython:: python @@ -288,4 +288,4 @@ As a shortcut, you can refer to existing coordinates by name: ds.sortby("x") ds.sortby(["y", "x"]) - ds.sortby(["y", "x"], ascending=False) \ No newline at end of file + ds.sortby(["y", "x"], ascending=False) diff --git a/doc/weather-climate.rst b/doc/weather-climate.rst index cb2921e2ed1..db612d74859 100644 --- a/doc/weather-climate.rst +++ b/doc/weather-climate.rst @@ -136,7 +136,7 @@ For data indexed by a :py:class:`~xarray.CFTimeIndex` xarray currently supports: da.time.dt.ceil("3D") da.time.dt.floor("5D") da.time.dt.round("2D") - + - Group-by operations based on datetime accessor attributes (e.g. by month of the year): diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 70b3c407b36..c969453b108 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -90,7 +90,7 @@ Internal Changes By `Justus Magin `_ and `Mathias Hauser `_. -- Replace all usages of ``assert x.identical(y)`` with ``assert_identical(x, y)`` +- Replace all usages of ``assert x.identical(y)`` with ``assert_identical(x, y)`` for clearer error messages. (:pull:`4752`); By `Maximilian Roos `_. diff --git a/licenses/PYTHON_LICENSE b/licenses/PYTHON_LICENSE index 43829c533b9..88251f5b6e8 100644 --- a/licenses/PYTHON_LICENSE +++ b/licenses/PYTHON_LICENSE @@ -251,4 +251,4 @@ FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/xarray/tests/data/example.ict b/xarray/tests/data/example.ict index bc04888fb80..41bbfeb996c 100644 --- a/xarray/tests/data/example.ict +++ b/xarray/tests/data/example.ict @@ -28,4 +28,4 @@ Start_UTC, lat, lon, elev, TEST_ppbv, TESTM_ppbv 43200, 41.00000, -71.00000, 5, 1.2345, 2.220 46800, 42.00000, -72.00000, 15, 2.3456, -9999 50400, 42.00000, -73.00000, 20, 3.4567, -7777 -50400, 42.00000, -74.00000, 25, 4.5678, -8888 \ No newline at end of file +50400, 42.00000, -74.00000, 25, 4.5678, -8888 diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py index d730b6dc1c6..2f4a4edd436 100644 --- a/xarray/tests/test_plot.py +++ b/xarray/tests/test_plot.py @@ -2240,7 +2240,7 @@ def test_datetime_hue(self, hue_style): ds2.plot.scatter(x="A", y="B", hue="hue", hue_style=hue_style) def test_facetgrid_hue_style(self): - # Can't move this to pytest.mark.parametrize because py36-bare-minimum + # Can't move this to pytest.mark.parametrize because py37-bare-minimum # doesn't have matplotlib. for hue_style, map_type in ( ("discrete", list),