Skip to content

Commit

Permalink
Use pytest.hookimpl/.hookspec to configure hooks
Browse files Browse the repository at this point in the history
  • Loading branch information
The-Compiler committed Oct 7, 2022
1 parent 4a99afe commit 665b475
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 8 deletions.
1 change: 1 addition & 0 deletions AUTHORS.rst
Expand Up @@ -32,3 +32,4 @@ Authors
* Dimitris Rozakis - https://github.com/dimrozakis
* Friedrich Delgado - https://github.com/TauPan
* Sam James - https://github.com/thesamesam
* Florian Bruhin - https://github.com/The-Compiler
3 changes: 3 additions & 0 deletions CHANGELOG.rst
Expand Up @@ -6,6 +6,9 @@ Changelog
------------------
* Fix skipping test in `test_utils.py` if appropriate VCS not available. Also fix typo.
Contributed by Sam James in `#211 <https://github.com/ionelmc/pytest-benchmark/pull/211>`_.
* Use ``pytest.hookimpl`` and ``pytest.hookspec`` to configure hooks,
avoiding a deprecation warning in the upcoming pytest 7.2.0.
Contributed by Florian Bruhin in `#224 <https://github.com/ionelmc/pytest-benchmark/pull/224>`_.

3.4.2 (2021-06-15)
------------------
Expand Down
14 changes: 7 additions & 7 deletions src/pytest_benchmark/hookspec.py
@@ -1,3 +1,6 @@
import pytest

@pytest.hookspec(firstresult=True)
def pytest_benchmark_scale_unit(config, unit, benchmarks, best, worst, sort):
"""
To have custom time scaling do something like this:
Expand All @@ -19,6 +22,7 @@ def pytest_benchmark_scale_unit(config, unit, benchmarks, best, worst, sort):
pass


@pytest.hookspec(firstresult=True)
def pytest_benchmark_generate_machine_info(config):
"""
To completely replace the generated machine_info do something like this:
Expand All @@ -45,6 +49,7 @@ def pytest_benchmark_update_machine_info(config, machine_info):
pass


@pytest.hookspec(firstresult=True)
def pytest_benchmark_generate_commit_info(config):
"""
To completely replace the generated commit_info do something like this:
Expand All @@ -69,6 +74,7 @@ def pytest_benchmark_update_commit_info(config, commit_info):
pass


@pytest.hookspec(firstresult=True)
def pytest_benchmark_group_stats(config, benchmarks, group_by):
"""
You may perform grouping customization here, in case the builtin grouping doesn't suit you.
Expand All @@ -90,6 +96,7 @@ def pytest_benchmark_group_stats(config, benchmarks, group_by):
pass


@pytest.hookspec(firstresult=True)
def pytest_benchmark_generate_json(config, benchmarks, include_data, machine_info, commit_info):
"""
You should read pytest-benchmark's code if you really need to wholly customize the json.
Expand Down Expand Up @@ -143,10 +150,3 @@ def pytest_benchmark_compare_machine_info(config, benchmarksession, machine_info
)
"""
pass


pytest_benchmark_scale_unit.firstresult = True
pytest_benchmark_generate_commit_info.firstresult = True
pytest_benchmark_generate_json.firstresult = True
pytest_benchmark_generate_machine_info.firstresult = True
pytest_benchmark_group_stats.firstresult = True
2 changes: 1 addition & 1 deletion src/pytest_benchmark/plugin.py
Expand Up @@ -434,7 +434,7 @@ def pytest_runtest_makereport(item, call):
fixture.skipped = outcome.get_result().outcome == 'skipped'


@pytest.mark.trylast # force the other plugins to initialise, fixes issue with capture not being properly initialised
@pytest.hookimpl(trylast=True) # force the other plugins to initialise, fixes issue with capture not being properly initialised
def pytest_configure(config):
config.addinivalue_line("markers", "benchmark: mark a test with custom benchmark settings.")
bs = config._benchmarksession = BenchmarkSession(config)
Expand Down

0 comments on commit 665b475

Please sign in to comment.