Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switch from skipping to xfailing some Python tests #1002

Merged
merged 6 commits into from Feb 1, 2023
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
7 changes: 2 additions & 5 deletions tests/integration/test_compatibility.py
Expand Up @@ -314,7 +314,7 @@ def test_multi_agg_count_no_group_by():
)


@pytest.mark.skip(
@pytest.mark.xfail(
reason="conflicting aggregation functions: [('count', 'a'), ('count', 'a')]"
)
def test_multi_agg_count_no_group_by_dupe_distinct():
Expand Down Expand Up @@ -386,7 +386,7 @@ def test_agg_count_distinct_no_group_by():
)


@pytest.mark.skip(
@pytest.mark.xfail(
reason="conflicting aggregation functions: [('count', 'c'), ('count', 'c')]"
)
def test_agg_count():
Expand Down Expand Up @@ -442,9 +442,6 @@ def test_agg_sum_avg_no_group_by():
)


@pytest.mark.skip(
reason="WIP DataFusion - https://github.com/dask-contrib/dask-sql/issues/534"
)
def test_agg_sum_avg():
a = make_rand_df(
100, a=(int, 50), b=(str, 50), c=(int, 30), d=(str, 40), e=(float, 40)
Expand Down
4 changes: 2 additions & 2 deletions tests/integration/test_hive.py
Expand Up @@ -9,8 +9,8 @@
from dask_sql.context import Context
from tests.utils import assert_eq

pytestmark = pytest.mark.skipif(
sys.platform in ("win32", "darwin"),
pytestmark = pytest.mark.xfail(
condition=sys.platform in ("win32", "darwin"),
reason="hive testing not supported on Windows/macOS",
)
docker = pytest.importorskip("docker")
Expand Down
2 changes: 0 additions & 2 deletions tests/integration/test_model.py
Expand Up @@ -903,7 +903,6 @@ def test_ml_experiment(c, client, training_df):

# TODO - many ML tests fail on clusters without sklearn - can we avoid this?
@xfail_if_external_scheduler
@pytest.mark.skip(reason="Waiting on https://github.com/EpistasisLab/tpot/pull/1280")
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

cc @sarahyurick since this is your PR; are you aware of any other upstream changes that could've resolved these failures?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

AFAIK, the only other solution is to restrict the NumPy version to be < 1.24.0

def test_experiment_automl_classifier(c, client, training_df):
tpot = pytest.importorskip("tpot", reason="tpot not installed")
# currently tested with tpot==
Expand All @@ -929,7 +928,6 @@ def test_experiment_automl_classifier(c, client, training_df):

# TODO - many ML tests fail on clusters without sklearn - can we avoid this?
@xfail_if_external_scheduler
@pytest.mark.skip(reason="Waiting on https://github.com/EpistasisLab/tpot/pull/1280")
def test_experiment_automl_regressor(c, client, training_df):
tpot = pytest.importorskip("tpot", reason="tpot not installed")
# test regressor
Expand Down
4 changes: 2 additions & 2 deletions tests/integration/test_postgres.py
Expand Up @@ -2,8 +2,8 @@

import pytest

pytestmark = pytest.mark.skipif(
sys.platform in ("win32", "darwin"),
pytestmark = pytest.mark.xfail(
condition=sys.platform in ("win32", "darwin"),
reason="hive testing not supported on Windows/macOS",
)
docker = pytest.importorskip("docker")
Expand Down