diff --git a/continuous_integration/scripts/install.sh b/continuous_integration/scripts/install.sh index 6e491a36f54..200f9e49dec 100644 --- a/continuous_integration/scripts/install.sh +++ b/continuous_integration/scripts/install.sh @@ -7,7 +7,7 @@ set -xe # python -m pip install --no-deps cityhash if [[ ${UPSTREAM_DEV} ]]; then - mamba install -y -c arrow-nightlies "pyarrow>5.0" + mamba install -y -c arrow-nightlies "pyarrow>7.0" # FIXME https://github.com/mamba-org/mamba/issues/412 # mamba uninstall --force numpy pandas fastparquet diff --git a/dask/dataframe/io/tests/test_parquet.py b/dask/dataframe/io/tests/test_parquet.py index da457f06027..c76e1146bff 100644 --- a/dask/dataframe/io/tests/test_parquet.py +++ b/dask/dataframe/io/tests/test_parquet.py @@ -1662,7 +1662,7 @@ def check_compression(engine, filename, compression): else: assert md.total_compressed_size != md.total_uncompressed_size else: - metadata = pa.parquet.ParquetDataset(filename).metadata + metadata = pa.parquet.read_metadata(os.path.join(filename, "_metadata")) names = metadata.schema.names for i in range(metadata.num_row_groups): row_group = metadata.row_group(i)