Skip to content

Commit

Permalink
Enable partquet append tests after fix (#11104)
Browse files Browse the repository at this point in the history
  • Loading branch information
phofl committed May 6, 2024
1 parent 7b2a47e commit aa3ccdb
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 8 deletions.
2 changes: 0 additions & 2 deletions dask/bytes/tests/test_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,8 +567,6 @@ def test_parquet_append(s3, engine, s3so):
if NUMPY_GE_200 and engine == "fastparquet":
# https://github.com/dask/fastparquet/issues/923
pytest.skip("fastparquet doesn't work with Numpy 2")
if dd._dask_expr_enabled():
pytest.skip("need convert string option")

url = "s3://%s/test.parquet.append" % test_bucket_name

Expand Down
6 changes: 0 additions & 6 deletions dask/dataframe/io/tests/test_parquet.py
Original file line number Diff line number Diff line change
Expand Up @@ -732,8 +732,6 @@ def test_categorical(tmpdir, write_engine, read_engine):
@pytest.mark.parametrize("metadata_file", [False, True])
def test_append(tmpdir, engine, metadata_file):
"""Test that appended parquet equal to the original one."""
if DASK_EXPR_ENABLED and metadata_file:
pytest.xfail("doesn't work yet")
tmp = str(tmpdir)
df = pd.DataFrame(
{
Expand Down Expand Up @@ -920,8 +918,6 @@ def test_partition_on_cats_2(tmpdir, engine):
@pytest.mark.parametrize("metadata_file", [False, True])
def test_append_wo_index(tmpdir, engine, metadata_file):
"""Test append with write_index=False."""
if DASK_EXPR_ENABLED and metadata_file:
pytest.xfail("doesn't work yet")
tmp = str(tmpdir.join("tmp1.parquet"))
df = pd.DataFrame(
{
Expand Down Expand Up @@ -990,7 +986,6 @@ def test_append_overlapping_divisions(tmpdir, engine, metadata_file, index, offs
ddf2.to_parquet(tmp, engine=engine, append=True, ignore_divisions=True)


@pytest.mark.xfail(DASK_EXPR_ENABLED, reason="will be supported after string option")
def test_append_known_divisions_to_unknown_divisions_works(tmpdir, engine):
tmp = str(tmpdir)

Expand Down Expand Up @@ -4351,7 +4346,6 @@ def test_custom_filename(tmpdir, engine):
assert_eq(df, dd.read_parquet(fn, engine=engine, calculate_divisions=True))


@pytest.mark.xfail(DASK_EXPR_ENABLED, reason="Can't hash metadata file at the moment")
@PYARROW_MARK
def test_custom_filename_works_with_pyarrow_when_append_is_true(tmpdir):
fn = str(tmpdir)
Expand Down

0 comments on commit aa3ccdb

Please sign in to comment.