diff --git a/.github/workflows/build.yml b/.github/workflows/build_latest.yml similarity index 87% rename from .github/workflows/build.yml rename to .github/workflows/build_latest.yml index 11e218157..3e9e0e448 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build_latest.yml @@ -1,4 +1,4 @@ -name: Build and Test Linux +name: Build and Test Linux with latest netcdf-c on: [push, pull_request] jobs: build-linux: @@ -13,7 +13,7 @@ jobs: #NO_NET: 1 strategy: matrix: - python-version: ["3.9"] + python-version: ["3.10"] steps: - uses: actions/checkout@v2 @@ -36,7 +36,7 @@ jobs: make install popd echo "Download and build netCDF version ${NETCDF_VERSION}" - wget https://downloads.unidata.ucar.edu/netcdf-c/4.9.0/netcdf-c-${NETCDF_VERSION}.tar.gz + wget https://downloads.unidata.ucar.edu/netcdf-c/${NETCDF_VERSION}/netcdf-c-${NETCDF_VERSION}.tar.gz tar -xzf netcdf-c-${NETCDF_VERSION}.tar.gz pushd netcdf-c-${NETCDF_VERSION} export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include" @@ -94,11 +94,11 @@ jobs: echo "pnetcdf mpi test passed!" fi - - name: Tarball - run: | - export PATH=${NETCDF_DIR}/bin:${PATH} - python setup.py --version - check-manifest --version - check-manifest --verbose - pip wheel . -w dist --no-deps - twine check dist/* +# - name: Tarball +# run: | +# export PATH=${NETCDF_DIR}/bin:${PATH} +# python setup.py --version +# check-manifest --version +# check-manifest --verbose +# pip wheel . -w dist --no-deps +# twine check dist/* diff --git a/.github/workflows/build_master.yml b/.github/workflows/build_master.yml index 00b3a52d4..1749d9585 100644 --- a/.github/workflows/build_master.yml +++ b/.github/workflows/build_master.yml @@ -10,7 +10,7 @@ jobs: #NO_NET: 1 strategy: matrix: - python-version: ["3.9"] + python-version: ["3.10"] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/build_old.yml b/.github/workflows/build_old.yml new file mode 100644 index 000000000..3bc79d718 --- /dev/null +++ b/.github/workflows/build_old.yml @@ -0,0 +1,104 @@ +name: Build and Test Linux with older netcdf-c +on: [push, pull_request] +jobs: + build-linux: + name: Python (${{ matrix.python-version }}) + runs-on: ubuntu-latest + env: + PNETCDF_VERSION: 1.12.1 + NETCDF_VERSION: 4.8.1 + NETCDF_DIR: ${{ github.workspace }}/.. + NETCDF_EXTRA_CONFIG: --enable-pnetcdf + CC: mpicc.mpich + #NO_NET: 1 + strategy: + matrix: + python-version: ["3.10"] + steps: + + - uses: actions/checkout@v2 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Ubuntu Dependencies + run: | + sudo apt-get update + sudo apt-get install mpich libmpich-dev libhdf5-mpich-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev + echo "Download and build PnetCDF version ${PNETCDF_VERSION}" + wget https://parallel-netcdf.github.io/Release/pnetcdf-${PNETCDF_VERSION}.tar.gz + tar -xzf pnetcdf-${PNETCDF_VERSION}.tar.gz + pushd pnetcdf-${PNETCDF_VERSION} + ./configure --prefix $NETCDF_DIR --enable-shared --disable-fortran --disable-cxx + make -j 2 + make install + popd + echo "Download and build netCDF version ${NETCDF_VERSION}" + wget https://downloads.unidata.ucar.edu/netcdf-c/${NETCDF_VERSION}/netcdf-c-${NETCDF_VERSION}.tar.gz + tar -xzf netcdf-c-${NETCDF_VERSION}.tar.gz + pushd netcdf-c-${NETCDF_VERSION} + export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include" + export LDFLAGS="-L${NETCDF_DIR}/lib" + export LIBS="-lhdf5_mpich_hl -lhdf5_mpich -lm -lz" + ./configure --prefix $NETCDF_DIR --enable-netcdf-4 --enable-shared --enable-dap --enable-parallel4 $NETCDF_EXTRA_CONFIG + make -j 2 + make install + popd + +# - name: The job has failed +# if: ${{ failure() }} +# run: | +# cd netcdf-c-${NETCDF_VERSION} +# cat config.log + + - name: Install python dependencies via pip + run: | + python -m pip install --upgrade pip + pip install numpy cython cftime pytest twine wheel check-manifest mpi4py + + - name: Install netcdf4-python + run: | + export PATH=${NETCDF_DIR}/bin:${PATH} + export NETCDF_PLUGIN_DIR=${{ github.workspace }}/netcdf-c-${NETCDF_VERSION}/plugins/plugindir + python setup.py install + - name: Test + run: | + export PATH=${NETCDF_DIR}/bin:${PATH} + python checkversion.py + # serial + cd test + python run_all.py + # parallel (hdf5 for netcdf4, pnetcdf for netcdf3) + cd ../examples + mpirun.mpich -np 4 python mpi_example.py + if [ $? -ne 0 ] ; then + echo "hdf5 mpi test failed!" + exit 1 + else + echo "hdf5 mpi test passed!" + fi + mpirun.mpich -np 4 python mpi_example_compressed.py + if [ $? -ne 0 ] ; then + echo "hdf5 compressed mpi test failed!" + exit 1 + else + echo "hdf5 compressed mpi test passed!" + fi + mpirun.mpich -np 4 python mpi_example.py NETCDF3_64BIT_DATA + if [ $? -ne 0 ] ; then + echo "pnetcdf mpi test failed!" + exit 1 + else + echo "pnetcdf mpi test passed!" + fi + +# - name: Tarball +# run: | +# export PATH=${NETCDF_DIR}/bin:${PATH} +# python setup.py --version +# check-manifest --version +# check-manifest --verbose +# pip wheel . -w dist --no-deps +# twine check dist/* diff --git a/.github/workflows/miniconda.yml b/.github/workflows/miniconda.yml index 601fbda04..042bb1625 100644 --- a/.github/workflows/miniconda.yml +++ b/.github/workflows/miniconda.yml @@ -12,7 +12,7 @@ jobs: # NO_NET: 1 strategy: matrix: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10" ] + python-version: [ "3.7", "3.8", "3.9", "3.10", "3.11" ] os: [windows-latest, ubuntu-latest, macos-latest] platform: [x64, x32] exclude: @@ -34,7 +34,7 @@ jobs: micromamba create --name TEST python=${{ matrix.python-version }} numpy cython pip pytest hdf5 libnetcdf cftime zlib --channel conda-forge micromamba activate TEST export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" # so setup.py finds nc-config - pip install -e . --no-deps --force-reinstall + pip install -v -e . --no-deps --force-reinstall - name: Debug conda shell: bash -l {0} @@ -53,7 +53,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python-version: [ "3.9" ] + python-version: [ "3.10" ] os: [ubuntu-latest] platform: [x64] steps: @@ -70,7 +70,8 @@ jobs: micromamba create --name TEST python=${{ matrix.python-version }} numpy cython pip pytest mpi4py hdf5=*=mpi* libnetcdf=*=mpi* cftime zlib --channel conda-forge micromamba activate TEST export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" # so setup.py finds nc-config - pip install -e . --no-deps --force-reinstall + nc-config --all + pip install -v -e . --no-build-isolation --no-deps --force-reinstall - name: Debug conda shell: bash -l {0} @@ -88,8 +89,8 @@ jobs: export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" which mpirun mpirun --version - #mpirun -np 4 --oversubscribe python mpi_example.py # for openmpi - mpirun -np 4 python mpi_example.py + mpirun -np 4 --oversubscribe python mpi_example.py # for openmpi + #mpirun -np 4 python mpi_example.py if [ $? -ne 0 ] ; then echo "hdf5 mpi test failed!" exit 1 diff --git a/Changelog b/Changelog index 41baa59c7..dbbbed650 100644 --- a/Changelog +++ b/Changelog @@ -1,10 +1,11 @@ - version 1.6.2 (Unrelease) -========================== + version 1.6.2 (not yet released) +============================== * Added ``netCDF4.__has_set_alignment__`` property to help identify if the underlying netcdf4 supports setting the HDF5 alignment. * Slicing multi-dimensional variables with an all False boolean index array now returns an empty numpy array (instead of raising an exception - issue #1197). Behavior now consistent with numpy slicing. + * fix problem with compiling using netcdf-c < 4.9.0 (issue #1209) version 1.6.1 (tag v1.6.1rel) ============================== diff --git a/include/netCDF4.pxi b/include/netCDF4.pxi index 86e309839..9233c8165 100644 --- a/include/netCDF4.pxi +++ b/include/netCDF4.pxi @@ -367,7 +367,6 @@ cdef extern from "netcdf.h": int nc_inq_enum_ident(int ncid, nc_type xtype, long long value, char *identifier) nogil - IF HAS_QUANTIZATION_SUPPORT: cdef extern from "netcdf.h": cdef enum: @@ -377,6 +376,8 @@ IF HAS_QUANTIZATION_SUPPORT: NC_QUANTIZE_BITROUND int nc_def_var_quantize(int ncid, int varid, int quantize_mode, int nsd) nogil int nc_inq_var_quantize(int ncid, int varid, int *quantize_modep, int *nsdp) nogil + +IF HAS_NCFILTER: cdef extern from "netcdf_filter.h": int nc_inq_filter_avail(int ncid, unsigned filterid) nogil @@ -388,8 +389,6 @@ IF HAS_SZIP_SUPPORT: int nc_inq_var_quantize(int ncid, int varid, int *quantize_modep, int *nsdp) nogil int nc_def_var_szip(int ncid, int varid, int options_mask, int pixels_per_bloc) nogil int nc_inq_var_szip(int ncid, int varid, int *options_maskp, int *pixels_per_blockp) nogil - cdef extern from "netcdf_filter.h": - int nc_inq_filter_avail(int ncid, unsigned filterid) nogil IF HAS_ZSTANDARD_SUPPORT: cdef extern from "netcdf_filter.h": @@ -397,7 +396,6 @@ IF HAS_ZSTANDARD_SUPPORT: H5Z_FILTER_ZSTD int nc_def_var_zstandard(int ncid, int varid, int level) nogil int nc_inq_var_zstandard(int ncid, int varid, int* hasfilterp, int *levelp) nogil - int nc_inq_filter_avail(int ncid, unsigned id) nogil IF HAS_BZIP2_SUPPORT: cdef extern from "netcdf_filter.h": @@ -405,7 +403,6 @@ IF HAS_BZIP2_SUPPORT: H5Z_FILTER_BZIP2 int nc_def_var_bzip2(int ncid, int varid, int level) nogil int nc_inq_var_bzip2(int ncid, int varid, int* hasfilterp, int *levelp) nogil - int nc_inq_filter_avail(int ncid, unsigned filterid) nogil IF HAS_BLOSC_SUPPORT: cdef extern from "netcdf_filter.h": @@ -413,7 +410,6 @@ IF HAS_BLOSC_SUPPORT: H5Z_FILTER_BLOSC int nc_def_var_blosc(int ncid, int varid, unsigned subcompressor, unsigned level, unsigned blocksize, unsigned addshuffle) nogil int nc_inq_var_blosc(int ncid, int varid, int* hasfilterp, unsigned* subcompressorp, unsigned* levelp, unsigned* blocksizep, unsigned* addshufflep) nogil - int nc_inq_filter_avail(int ncid, unsigned filterid) nogil IF HAS_NC_OPEN_MEM: cdef extern from "netcdf_mem.h": diff --git a/setup.py b/setup.py index 87ba082e9..7a88e23c9 100644 --- a/setup.py +++ b/setup.py @@ -70,6 +70,7 @@ def check_api(inc_dirs,netcdf_lib_version): has_zstandard = False has_bzip2 = False has_blosc = False + has_ncfilter = False has_set_alignment = False for d in inc_dirs: @@ -116,6 +117,8 @@ def check_api(inc_dirs,netcdf_lib_version): has_bzip2 = True if line.startswith('EXTERNL int nc_def_var_blosc'): has_blosc = True + if line.startswith('EXTERNL int nc_inq_filter_avail'): + has_ncfilter = True ncmetapath = os.path.join(d,'netcdf_meta.h') if os.path.exists(ncmetapath): @@ -143,7 +146,7 @@ def check_api(inc_dirs,netcdf_lib_version): return has_rename_grp, has_nc_inq_path, has_nc_inq_format_extended, \ has_cdf5_format, has_nc_open_mem, has_nc_create_mem, \ has_parallel4_support, has_pnetcdf_support, has_szip_support, has_quantize, \ - has_zstandard, has_bzip2, has_blosc, has_set_alignment + has_zstandard, has_bzip2, has_blosc, has_set_alignment, has_ncfilter def getnetcdfvers(libdirs): @@ -557,7 +560,7 @@ def _populate_hdf5_info(dirstosearch, inc_dirs, libs, lib_dirs): has_rename_grp, has_nc_inq_path, has_nc_inq_format_extended, \ has_cdf5_format, has_nc_open_mem, has_nc_create_mem, \ has_parallel4_support, has_pnetcdf_support, has_szip_support, has_quantize, \ - has_zstandard, has_bzip2, has_blosc, has_set_alignment = \ + has_zstandard, has_bzip2, has_blosc, has_set_alignment, has_ncfilter = \ check_api(inc_dirs,netcdf_lib_version) # for netcdf 4.4.x CDF5 format is always enabled. if netcdf_lib_version is not None and\ @@ -565,11 +568,12 @@ def _populate_hdf5_info(dirstosearch, inc_dirs, libs, lib_dirs): has_cdf5_format = True # disable parallel support if mpi4py not available. - try: - import mpi4py - except ImportError: - has_parallel4_support = False - has_pnetcdf_support = False + #try: + # import mpi4py + #except ImportError: + # f.write('disabling mpi parallel support because mpi4py not found\n') + # has_parallel4_support = False + # has_pnetcdf_support = False f = open(osp.join('include', 'constants.pyx'), 'w') if has_rename_grp: @@ -671,9 +675,17 @@ def _populate_hdf5_info(dirstosearch, inc_dirs, libs, lib_dirs): sys.stdout.write('netcdf lib does not have nc_set_alignment function\n') f.write('DEF HAS_SET_ALIGNMENT = 0\n') + if has_ncfilter: + sys.stdout.write('netcdf lib has nc_inq_filter_avail function\n') + f.write('DEF HAS_NCFILTER = 1\n') + else: + sys.stdout.write('netcdf lib does not have nc_inq_filter_avail function\n') + f.write('DEF HAS_NCFILTER = 0\n') + f.close() if has_parallel4_support or has_pnetcdf_support: + import mpi4py inc_dirs.append(mpi4py.get_include()) # mpi_incdir should not be needed if using nc-config # (should be included in nc-config --cflags) diff --git a/src/netCDF4/_netCDF4.pyx b/src/netCDF4/_netCDF4.pyx index 29852a65d..c36f308b1 100644 --- a/src/netCDF4/_netCDF4.pyx +++ b/src/netCDF4/_netCDF4.pyx @@ -1,5 +1,5 @@ """ -Version 1.6.1 +Version 1.6.2 ------------- # Introduction @@ -1230,7 +1230,7 @@ if sys.version_info[0:2] < (3, 7): # Python 3.7+ guarantees order; older versions need OrderedDict from collections import OrderedDict -__version__ = "1.6.1" +__version__ = "1.6.2" # Initialize numpy import posixpath @@ -3543,15 +3543,21 @@ returns True if bzip2 compression filter is available""" **`has_szip_filter(self)`** returns True if szip compression filter is available""" cdef int ierr - IF HAS_SZIP_SUPPORT: - with nogil: - ierr = nc_inq_filter_avail(self._grpid, H5Z_FILTER_SZIP) - if ierr: + IF HAS_NCFILTER: + IF HAS_SZIP_SUPPORT: + with nogil: + ierr = nc_inq_filter_avail(self._grpid, H5Z_FILTER_SZIP) + if ierr: + return False + else: + return True + ELSE: return False - else: - return True ELSE: - return False + IF HAS_SZIP_SUPPORT: + return True + ELSE: + return False cdef class Group(Dataset): """