Skip to content

ENH: sparse: Add indexing for 1D arrays #14215

ENH: sparse: Add indexing for 1D arrays

ENH: sparse: Add indexing for 1D arrays #14215

Workflow file for this run

name: Linux tests
on:
push:
branches:
- maintenance/**
pull_request:
branches:
- main
- maintenance/**
permissions:
contents: read # to fetch code (actions/checkout)
env:
CCACHE_DIR: "${{ github.workspace }}/.ccache"
INSTALLDIR: "build-install"
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
get_commit_message:
name: Get commit message
uses: ./.github/workflows/commit_message.yml
test_meson:
name: mypy (py3.10) & dev deps (py3.12), fast, dev.py
needs: get_commit_message
# If using act to run CI locally the github object does not exist and
# the usual skipping should not be enforced
if: >
needs.get_commit_message.outputs.message == 1
&& (github.repository == 'scipy/scipy' || github.repository == '')
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: ['3.10', '3.12'] # this run will use python dev versions when available
maintenance-branch:
- ${{ contains(github.ref, 'maintenance/') || contains(github.base_ref, 'maintenance/') }}
exclude:
- maintenance-branch: true
python-version: '3.12'
steps:
- uses: actions/checkout@v4.1.1
with:
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: 'environment.yml'
- name: Install Ubuntu dependencies
run: |
# NOTE: not the same OpenBLAS version as in upstream CI (I'm being lazy here)
sudo apt-get update
sudo apt-get install -y libopenblas-dev libatlas-base-dev liblapack-dev gfortran libgmp-dev libmpfr-dev libsuitesparse-dev ccache libmpc-dev
- name: Install Python packages
if: matrix.python-version == '3.10'
run: |
python -m pip install numpy cython pytest pytest-xdist pytest-timeout pybind11 mpmath gmpy2 pythran ninja meson click rich-click doit pydevtool pooch hypothesis
- name: Install Python packages from repositories
if: matrix.python-version == '3.12' # this run will use python dev versions when available
run: |
python -m pip install git+https://github.com/numpy/numpy.git
python -m pip install ninja cython pytest pybind11 pytest-xdist pytest-timeout click rich-click doit pydevtool pooch hypothesis "setuptools<67.3"
python -m pip install git+https://github.com/serge-sans-paille/pythran.git
python -m pip install git+https://github.com/mesonbuild/meson.git
- name: Prepare compiler cache
id: prep-ccache
shell: bash
run: |
mkdir -p "${CCACHE_DIR}"
echo "dir=$CCACHE_DIR" >> $GITHUB_OUTPUT
NOW=$(date -u +"%F-%T")
echo "timestamp=${NOW}" >> $GITHUB_OUTPUT
- name: Setup compiler cache
uses: actions/cache@v3
id: cache-ccache
# Reference: https://docs.github.com/en/actions/guides/caching-dependencies-to-speed-up-workflows#matching-a-cache-key
# NOTE: The caching strategy is modeled in a way that it will always have a unique cache key for each workflow run
# (even if the same workflow is run multiple times). The restore keys are not unique and for a partial match, they will
# return the most recently created cache entry, according to the GitHub Action Docs.
with:
path: ${{ steps.prep-ccache.outputs.dir }}
# Restores ccache from either a previous build on this branch or on main
key: ${{ github.workflow }}-${{ matrix.python-version }}-ccache-linux-${{ steps.prep-ccache.outputs.timestamp }}
# This evaluates to `Linux Tests-3.10-ccache-linux-` which is not unique. As the CI matrix is expanded, this will
# need to be updated to be unique so that the cache is not restored from a different job altogether.
restore-keys: |
${{ github.workflow }}-${{ matrix.python-version }}-ccache-linux-
- name: Setup build and install scipy
run: |
python dev.py build --werror
- name: Ccache performance
shell: bash -l {0}
run: ccache -s
- name: Check installation
run: |
pushd tools
python check_installation.py ${{ env.INSTALLDIR }}
./check_pyext_symbol_hiding.sh ../build
popd
- name: Mypy
if: matrix.python-version == '3.10'
run: |
# Packages that are only needed for their annotations
python -m pip install mypy==1.0.0 types-psutil typing_extensions
python -m pip install pybind11 sphinx
python -u dev.py mypy
- name: Test SciPy
run: |
export OMP_NUM_THREADS=2
python dev.py --no-build test -j2 -- --durations 10 --timeout=60
#################################################################################
test_venv_install:
name: Install into venv, cluster only, pyAny/npAny, pip+cluster.test()
needs: get_commit_message
if: >
needs.get_commit_message.outputs.message == 1
&& (github.repository == 'scipy/scipy' || github.repository == '')
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4.1.1
with:
submodules: recursive
- name: Install Ubuntu dependencies
run: |
# We're not running the full test suite here, only testing the install
# into a venv is working, so leave out optional dependencies. That's
# also why we can get away with an old version of OpenBLAS from Ubuntu
sudo apt-get update
sudo apt-get install -y python3-dev libopenblas-dev pkg-config gfortran
- name: Create venv, install SciPy
run: |
python -m venv ../venvs/scipy-venv
source ../venvs/scipy-venv/bin/activate
# Note that this uses build isolation. That's why we don't need build
# dependencies to be installed in the venv itself.
python -m pip install . -vv
- name: Basic imports and tests
run: |
source ../venvs/scipy-venv/bin/activate
cd ..
python -c "import scipy"
python -c "import scipy.linalg"
python -m pip install pytest hypothesis
python -c "from scipy import cluster; cluster.test()"
- name: Create venv inside source tree
# This is a regression test for gh-16312
run: |
python -m venv .venv
source .venv/bin/activate
# Install build dependencies. Use meson-python from its main branch,
# most convenient to test in this job because we're using pip without
# build isolation here.
python -m pip install numpy pybind11 pythran cython pytest ninja hypothesis
python -m pip install git+https://github.com/mesonbuild/meson-python.git
# Non-isolated build, so we use dependencies installed inside the source tree
python -m pip install -U pip # need pip >=23 for `--config-settings`
python -m pip install . --no-build-isolation --config-settings=compile-args=-j2
# Basic tests
cd ..
python -c "import scipy"
python -c "import scipy.linalg"
python -c "from scipy import cluster; cluster.test()"
#################################################################################
python_debug:
# also uses the vcs->sdist->wheel route.
name: Python-debug & ATLAS & sdist+wheel, fast, py3.10/npMin, pip+pytest
needs: get_commit_message
if: >
needs.get_commit_message.outputs.message == 1
&& (github.repository == 'scipy/scipy' || github.repository == '')
runs-on: ubuntu-22.04 # provides python3.10-dbg
steps:
- uses: actions/checkout@v4.1.1
with:
submodules: recursive
- name: Configuring Test Environment
run: |
sudo apt-get update
sudo apt install python3-dbg python3-dev libatlas-base-dev liblapack-dev gfortran ccache libgmp-dev libmpfr-dev libmpc-dev
python3-dbg --version # just to check
python3-dbg -c 'import sys; print("Python debug build:", hasattr(sys, "gettotalrefcount"))'
- name: Build SciPy
run: |
python3-dbg -m pip install build
python3-dbg -m build -Csetup-args=-Dbuildtype=debugoptimized -Csetup-args=-Dblas=blas-atlas -Csetup-args=-Dlapack=lapack-atlas -Ccompile-args=-j2
python3-dbg -m pip install dist/scipy*.whl
- name: Testing SciPy
run: |
cd doc
python3-dbg -m pip install pytest pytest-xdist pytest-timeout mpmath gmpy2 threadpoolctl pooch hypothesis
python3-dbg -m pytest --pyargs scipy -n2 --durations=10 -m "not slow"
#################################################################################
gcc9:
# Purpose is to examine builds with oldest-supported gcc and test with pydata/sparse.
name: Oldest GCC & pydata/sparse, fast, py3.10/npMin, pip+pytest
needs: get_commit_message
if: >
needs.get_commit_message.outputs.message == 1
&& (github.repository == 'scipy/scipy' || github.repository == '')
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4.1.1
with:
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Setup system dependencies
run: |
sudo apt-get -y update
sudo apt install -y g++-9 gcc-9 gfortran-9
sudo apt install -y libatlas-base-dev liblapack-dev libgmp-dev \
libmpfr-dev libmpc-dev pkg-config libsuitesparse-dev liblapack-dev
- name: Setup Python build deps
run: |
pip install build meson-python ninja pythran pybind11 cython "numpy>=2.0.0b1"
- name: Build wheel and install
run: |
set -euo pipefail
export PYTHONOPTIMIZE=2
# specify which compilers to use using environment variables
CC=gcc-9 CXX=g++-9 FC=gfortran-9 python -m build --wheel --no-isolation -Csetup-args=-Dblas=blas-atlas -Csetup-args=-Dlapack=lapack-atlas -Ccompile-args=-j2
python -m pip install dist/scipy*.whl
- name: Install test dependencies
run: |
# Downgrade numpy to oldest supported version
pip install gmpy2 threadpoolctl mpmath pooch pytest pytest-xdist==2.5.0 pytest-timeout hypothesis sparse "numpy==1.23.5"
- name: Run tests
run: |
# can't be in source directory
pushd $RUNNER_TEMP
export PYTHONOPTIMIZE=2
python -m pytest --pyargs scipy -n2 --durations=10
popd
#################################################################################
prerelease_deps_coverage_64bit_blas:
# TODO: re-enable ILP64 build.
name: Prerelease deps & coverage report, full, py3.10/npMin & py3.11/npPre, dev.py
needs: get_commit_message
if: >
needs.get_commit_message.outputs.message == 1
&& (github.repository == 'scipy/scipy' || github.repository == '')
runs-on: ubuntu-latest
strategy:
matrix:
# Both use numpy 2.x-dev at build time; 3.10 job then downgrades to
# lowest supported NumPy version in order to test ABI compatibility.
python-version: ['3.10', '3.11']
steps:
- uses: actions/checkout@v4.1.1
with:
submodules: recursive
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install Ubuntu dependencies
run: |
sudo apt-get update
sudo apt-get install -y libgmp-dev libmpfr-dev libmpc-dev ccache gfortran
- name: Caching Python dependencies
uses: actions/cache@v3
id: cache
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-prerelease
- name: Install Python packages
run: |
python -m pip install cython pythran ninja meson-python pybind11 click rich_click pydevtool
# Note: matplotlib not installed here to avoid issues around numpy 2.0
# (it can be put back after matplotlib has made a 2.0-compatible
# release on PyPI.
python -m pip install --pre --upgrade pytest pytest-cov pytest-xdist mpmath gmpy2 threadpoolctl pooch hypothesis
# TODO: once the scipy_ symbol prefix issue is fixed, install
# scipy-openblas32 from the pre-releases bucket again (see gh-19640)
python -m pip install "scipy-openblas32<=0.3.23.293.2"
# Install numpy last, to ensure we get 2.0.0-dev (avoid possible <2.0 constraints).
python -m pip install --pre --upgrade --timeout=60 -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple numpy
- name: Prepare compiler cache
id: prep-ccache
shell: bash
run: |
mkdir -p "${CCACHE_DIR}"
echo "dir=$CCACHE_DIR" >> $GITHUB_OUTPUT
NOW=$(date -u +"%F-%T")
echo "timestamp=${NOW}" >> $GITHUB_OUTPUT
- name: Setup compiler cache
uses: actions/cache@v3
id: cache-ccache
with:
path: ${{ steps.prep-ccache.outputs.dir }}
# Restores ccache from either a previous build on this branch or on main
key: ${{ github.workflow }}-${{ matrix.python-version }}-ccache-linux-prerelease-${{ steps.prep-ccache.outputs.timestamp }}
restore-keys: |
${{ github.workflow }}-${{ matrix.python-version }}-ccache-linux-prerelease-
- name: Build and install SciPy
run: |
python dev.py build --gcov --with-scipy-openblas
- name: Ccache performance
shell: bash -l {0}
run: ccache -s
- name: Downgrade NumPy from 2.0-dev to lowest supported
if: matrix.python-version == '3.10'
run: |
python -m pip install "numpy==1.23.5"
- name: Test SciPy
run: |
export OPENBLAS_NUM_THREADS=1
python dev.py --no-build test -j2 --mode full -- --cov --cov-report term-missing
#################################################################################
linux_32bit:
name: 32-bit, fast, py3.10/npMin, dev.py
needs: get_commit_message
if: >
needs.get_commit_message.outputs.message == 1
&& (github.repository == 'scipy/scipy' || github.repository == '')
runs-on: ubuntu-latest
# I tried running directly in a container:, using the image: and options:
# entries. Unfortunately at this time options: does not seem to listen to
# --platform linux/i386.
steps:
- uses: actions/checkout@v4.1.1
with:
submodules: recursive
- name: build + test
run: |
set -euo pipefail
docker pull quay.io/pypa/manylinux2014_i686
docker run -v $(pwd):/scipy --platform=linux/i386 quay.io/pypa/manylinux2014_i686 /bin/bash -c "cd /scipy && \
uname -a && \
basedir=\$(python3.10 tools/openblas_support.py) && \
cp -r \$basedir/lib/* /usr/local/lib && \
cp \$basedir/include/* /usr/local/include && \
export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig && \
python3.10 -m venv test && \
source test/bin/activate && \
python -m pip install doit click rich_click pydevtool meson ninja && \
python -m pip install numpy==1.23.5 cython pybind11 pytest pytest-timeout pytest-xdist pytest-env 'Pillow<10.0.0' mpmath pythran pooch meson hypothesis && \
LD_LIBRARY_PATH=/usr/local/lib python dev.py build && \
LD_LIBRARY_PATH=/usr/local/lib python dev.py test"