Skip to content

Commit

Permalink
feat: coverage combine now prints messages naming the files being c…
Browse files Browse the repository at this point in the history
…ombined. #1105 (#1208)
  • Loading branch information
nedbat committed Aug 5, 2021
1 parent 4ef91bd commit 1f51202
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 13 deletions.
4 changes: 4 additions & 0 deletions CHANGES.rst
Expand Up @@ -27,6 +27,9 @@ Unreleased
- The ``coverage html`` command now prints a message indicating where the HTML
report was written. Fixes `issue 1195`_.

- The ``coverage combine`` command now prints messages indicating each data
file being combined. Fixes `issue 1105`_.

- Unrecognized options in the configuration file are no longer errors. They are
now warnings, to ease the use of coverage across versions. Fixes `issue
1035`_.
Expand All @@ -35,6 +38,7 @@ Unreleased
unsupported type." (`issue 1010`_).

.. _issue 1035: https://github.com/nedbat/coveragepy/issues/1035
.. _issue 1105: https://github.com/nedbat/coveragepy/issues/1105
.. _issue 1195: https://github.com/nedbat/coveragepy/issues/1195


Expand Down
1 change: 1 addition & 0 deletions coverage/control.py
Expand Up @@ -718,6 +718,7 @@ def combine(self, data_paths=None, strict=False, keep=False):
data_paths=data_paths,
strict=strict,
keep=keep,
message=self._message,
)

def get_data(self):
Expand Down
6 changes: 5 additions & 1 deletion coverage/data.py
Expand Up @@ -53,7 +53,9 @@ def add_data_to_hash(data, filename, hasher):
hasher.update(data.file_tracer(filename))


def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, keep=False):
def combine_parallel_data(
data, aliases=None, data_paths=None, strict=False, keep=False, message=None,
):
"""Combine a number of data files together.
Treat `data.filename` as a file prefix, and combine the data from all
Expand Down Expand Up @@ -117,6 +119,8 @@ def combine_parallel_data(data, aliases=None, data_paths=None, strict=False, kee
else:
data.update(new_data, aliases=aliases)
files_combined += 1
if message:
message(f"Combined data file {os.path.relpath(f)}")
if not keep:
if data._debug.should('dataio'):
data._debug.write(f"Deleting combined data file {f!r}")
Expand Down
4 changes: 4 additions & 0 deletions tests/test_api.py
Expand Up @@ -437,6 +437,7 @@ def test_combining_twice(self):
self.make_good_data_files()
cov1 = coverage.Coverage()
cov1.combine()
assert self.stdout() == ""
cov1.save()
self.check_code1_code2(cov1)
self.assert_file_count(".coverage.*", 0)
Expand All @@ -448,6 +449,7 @@ def test_combining_twice(self):

cov3 = coverage.Coverage()
cov3.combine()
assert self.stdout() == ""
# Now the data is empty!
_, statements, missing, _ = cov3.analysis("code1.py")
assert statements == [1]
Expand All @@ -469,6 +471,7 @@ def test_combining_with_a_used_coverage(self):
cov.save()

cov.combine()
assert self.stdout() == ""
self.check_code1_code2(cov)

def test_ordered_combine(self):
Expand All @@ -483,6 +486,7 @@ def make_data_file():
def get_combined_filenames():
cov = coverage.Coverage()
cov.combine()
assert self.stdout() == ""
cov.save()
data = cov.get_data()
filenames = {relative_filename(f).replace("\\", "/") for f in data.measured_files()}
Expand Down
30 changes: 18 additions & 12 deletions tests/test_concurrency.py
Expand Up @@ -392,7 +392,12 @@ def try_multiprocessing_code(
assert len(glob.glob(".coverage.*")) == nprocs + 1

out = self.run_command("coverage combine")
assert out == ""
out_lines = out.splitlines()
assert len(out_lines) == nprocs + 1
assert all(
re.fullmatch(r"Combined data file \.coverage\..*\.\d+\.\d+", line)
for line in out_lines
)
out = self.run_command("coverage report -m")

last_line = self.squeezed_lines(out)[-1]
Expand Down Expand Up @@ -426,8 +431,12 @@ def test_multiprocessing_and_gevent(self):
code, expected_out, eventlet, nprocs, concurrency="multiprocessing,eventlet"
)

def try_multiprocessing_code_with_branching(self, code, expected_out):
"""Run code using multiprocessing, it should produce `expected_out`."""
def test_multiprocessing_with_branching(self):
nprocs = 3
upto = 30
code = (SQUARE_OR_CUBE_WORK + MULTI_CODE).format(NPROCS=nprocs, UPTO=upto)
total = sum(x*x if x%2 else x*x*x for x in range(upto))
expected_out = f"{nprocs} pids, total = {total}"
self.make_file("multi.py", code)
self.make_file("multi.rc", """\
[run]
Expand All @@ -444,20 +453,17 @@ def try_multiprocessing_code_with_branching(self, code, expected_out):
assert out.rstrip() == expected_out

out = self.run_command("coverage combine")
assert out == ""
out_lines = out.splitlines()
assert len(out_lines) == nprocs + 1
assert all(
re.fullmatch(r"Combined data file \.coverage\..*\.\d+\.\d+", line)
for line in out_lines
)
out = self.run_command("coverage report -m")

last_line = self.squeezed_lines(out)[-1]
assert re.search(r"TOTAL \d+ 0 \d+ 0 100%", last_line)

def test_multiprocessing_with_branching(self):
nprocs = 3
upto = 30
code = (SQUARE_OR_CUBE_WORK + MULTI_CODE).format(NPROCS=nprocs, UPTO=upto)
total = sum(x*x if x%2 else x*x*x for x in range(upto))
expected_out = f"{nprocs} pids, total = {total}"
self.try_multiprocessing_code_with_branching(code, expected_out)

def test_multiprocessing_bootstrap_error_handling(self):
# An exception during bootstrapping will be reported.
self.make_file("multi.py", """\
Expand Down

0 comments on commit 1f51202

Please sign in to comment.