Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Include test case stdout and stderr in annotation details #358

Merged
merged 9 commits into from Oct 14, 2022
27 changes: 22 additions & 5 deletions python/publish/__init__.py
Expand Up @@ -709,12 +709,13 @@ def get_long_summary_with_digest_md(stats: UnitTestRunResultsOrDeltaResults,


def get_case_messages(case_results: UnitTestCaseResults) -> CaseMessages:
""" Re-index cases from test+state to test+state+message. """
messages = defaultdict(lambda: defaultdict(lambda: defaultdict(list)))
for key in case_results:
for state in case_results[key]:
for case in case_results[key][state]:
for test in case_results:
for state in case_results[test]:
for case in case_results[test][state]:
message = case.message if case.result in ['skipped', 'disabled'] else case.content
messages[key][state][message].append(case)
messages[test][state][message].append(case)
return CaseMessages(messages)


Expand Down Expand Up @@ -747,6 +748,15 @@ def to_dict(self) -> Mapping[str, Any]:
return dictionary


def message_is_contained_in_content(message: Optional[str], content: Optional[str]) -> bool:
# ignore new lines and any leading or trailing white spaces
if content and message:
content = re.sub(r'\s+', ' ', content.strip())
message = re.sub(r'\s+', ' ', message.strip())
return content.startswith(message)
return False


def get_case_annotation(messages: CaseMessages,
key: Tuple[Optional[str], Optional[str], Optional[str]],
state: str,
Expand Down Expand Up @@ -791,6 +801,13 @@ def get_case_annotation(messages: CaseMessages,
'notice'
)

# pick details from message and content, but try to avoid redundancy (e.g. when content repeats message)
# always add stdout and stderr if they are not empty
maybe_message = [case.message] if not message_is_contained_in_content(case.message, case.content) else []
details = [detail.rstrip()
for detail in maybe_message + [case.content, case.stdout, case.stderr]
if detail and detail.rstrip()]

return Annotation(
path=test_file or class_name or '/',
start_line=line,
Expand All @@ -800,7 +817,7 @@ def get_case_annotation(messages: CaseMessages,
annotation_level=level,
message='\n'.join(sorted(same_result_files)),
title=title,
raw_details=message
raw_details='\n'.join(details) if details else None
)


Expand Down
5 changes: 5 additions & 0 deletions python/publish/junit.py
Expand Up @@ -216,6 +216,8 @@ def get_suites(suite: TestSuite) -> List[TestSuite]:
for suite in suites
for leaf_suite in get_suites(suite)] + ([suite] if cases or not suites else [])

# junit allows for multiple results for a single test case (e.g. success and failure for the same test)
# we pick the most severe result, which could still be multiple results, so we aggregate those, which is messy
cases = [
UnitTestCase(
result_file=result_file,
Expand All @@ -226,11 +228,14 @@ def get_suites(suite: TestSuite) -> List[TestSuite]:
result=get_result(results),
message=get_message(results),
content=get_content(results),
stdout=case.system_out,
stderr=case.system_err,
time=case.time * time_factor if case.time is not None else case.time
)
for result_file, suite in suites
for case in get_cases(suite)
if case.classname is not None or case.name is not None
# junit allows for multiple results in one test case, pick the most severe results
for results in [get_results(case.result, case.status)]
]

Expand Down
14 changes: 11 additions & 3 deletions python/publish/unittestresults.py
Expand Up @@ -15,6 +15,8 @@ class UnitTestCase:
result: str
message: Optional[str]
content: Optional[str]
stdout: Optional[str]
stderr: Optional[str]
time: Optional[float]


Expand Down Expand Up @@ -416,11 +418,17 @@ def get_test_results(parsed_results: ParsedUnitTestResultsWithCommit,
cases_errors = [case for case in cases if case.result == 'error']
cases_time = sum([case.time or 0 for case in cases])

# group cases by tests
# index cases by tests and state
cases_results = UnitTestCaseResults()
for case in cases:
key = (case.test_file if dedup_classes_by_file_name else None, case.class_name, case.test_name)
cases_results[key][case.result if case.result != 'disabled' else 'skipped'].append(case)
# index by test file name (when de-duplicating by file name), class name and test name
test = (case.test_file if dedup_classes_by_file_name else None, case.class_name, case.test_name)

# second index by state
state = case.result if case.result != 'disabled' else 'skipped'

# collect cases of test and state
cases_results[test][state].append(case)

test_results = dict()
for test, states in cases_results.items():
Expand Down
79 changes: 79 additions & 0 deletions python/test/files/junit-xml/junit.multiresult.annotations
@@ -0,0 +1,79 @@
[
{
'name': 'Test Results',
'head_sha': 'commit sha',
'status': 'completed',
'conclusion': 'failure',
'output': {
'title': '1 errors, 1 fail, 1 skipped, 1 pass in 1s',
'summary':
'1 files\u2004\u20031 suites\u2004\u2003\u20021s '
'[:stopwatch:](https://github.com/EnricoMi/publish-unit-test-result-ac'
'tion/blob/v1.20/README.md#the-symbols "duration of all tests")\n4 '
'tests\u20031 '
'[:heavy_check_mark:](https://github.com/EnricoMi/publish-unit-test-re'
'sult-action/blob/v1.20/README.md#the-symbols "passed tests")\u20031 '
'[:zzz:](https://github.com/EnricoMi/publish-unit-test-result-action/b'
'lob/v1.20/README.md#the-symbols "skipped / disabled tests")\u20031 '
'[:x:](https://github.com/EnricoMi/publish-unit-test-result-action/blo'
'b/v1.20/README.md#the-symbols "failed tests")\u20031 '
'[:fire:](https://github.com/EnricoMi/publish-unit-test-result-action/'
'blob/v1.20/README.md#the-symbols "test errors")\n4 runs\u2006\u2003'
'-2 '
'[:heavy_check_mark:](https://github.com/EnricoMi/publish-unit-test-re'
'sult-action/blob/v1.20/README.md#the-symbols "passed tests")\u20033 '
'[:zzz:](https://github.com/EnricoMi/publish-unit-test-result-action/b'
'lob/v1.20/README.md#the-symbols "skipped / disabled tests")\u20032 '
'[:x:](https://github.com/EnricoMi/publish-unit-test-result-action/blo'
'b/v1.20/README.md#the-symbols "failed tests")\u20031 '
'[:fire:](https://github.com/EnricoMi/publish-unit-test-result-action/'
'blob/v1.20/README.md#the-symbols "test errors")\n\nResults for '
'commit commit s.\n\n'
'[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr'
'0KotfBTeRlCEONGPmYXKuPdlQhEujdvkrn4BkYTX9jQMU4RQoU1ogzgXcZXhKTmsgVFpf'
'5S0AFnc2wSTHNoRI/5wehKL82S68d6fLmpcK5V/48pby2EF/JitEt+P6y+BE/eAAAA\n',
'annotations': [
{
'path': 'test class',
'start_line': 0,
'end_line': 0,
'annotation_level': 'failure',
'message': 'junit.multiresult.xml',
'title': 'test that errors (test class) with error',
'raw_details': 'test teardown failure\nstdout'
},
{
'path': 'test class',
'start_line': 0,
'end_line': 0,
'annotation_level': 'warning',
'message': 'junit.multiresult.xml',
'title': 'test that fails (test class) failed',
'raw_details': 'test failure\nAssertion failed'
},
{
'path': '.github',
'start_line': 0,
'end_line': 0,
'annotation_level': 'notice',
'message':
'There is 1 skipped test, see "Raw output" for the name of the '
'skipped test.',
'title': '1 skipped test found',
'raw_details': 'test class ‑ test that is skipped'
},
{
'path': '.github',
'start_line': 0,
'end_line': 0,
'annotation_level': 'notice',
'message': 'There are 4 tests, see "Raw output" for the full list of tests.',
'title': '4 tests found',
'raw_details':
'test class ‑ test that errors\ntest class ‑ test that fails\ntest '
'class ‑ test that is skipped\ntest class ‑ test that succeeds'
}
]
}
}
]
8 changes: 8 additions & 0 deletions python/test/files/junit-xml/junit.multiresult.results
Expand Up @@ -17,6 +17,8 @@ publish.unittestresults.ParsedUnitTestResults(
result='error',
message='test teardown failure',
content='stdout',
stdout=None,
stderr=None,
time=0.123
),
publish.unittestresults.UnitTestCase(
Expand All @@ -28,6 +30,8 @@ publish.unittestresults.ParsedUnitTestResults(
result='failure',
message='test failure',
content='Assertion failed',
stdout=None,
stderr=None,
time=0.234
),
publish.unittestresults.UnitTestCase(
Expand All @@ -39,6 +43,8 @@ publish.unittestresults.ParsedUnitTestResults(
result='skipped',
message=None,
content=None,
stdout=None,
stderr=None,
time=0.345
),
publish.unittestresults.UnitTestCase(
Expand All @@ -50,6 +56,8 @@ publish.unittestresults.ParsedUnitTestResults(
result='success',
message=None,
content=None,
stdout=None,
stderr=None,
time=0.456
)
]
Expand Down
70 changes: 70 additions & 0 deletions python/test/files/junit-xml/minimal-attributes.annotations
@@ -0,0 +1,70 @@
[
{
'name': 'Test Results',
'head_sha': 'commit sha',
'status': 'completed',
'conclusion': 'failure',
'output': {
'title': '1 errors, 1 fail, 1 skipped, 1 pass in 0s',
'summary':
'4 tests\u2002\u2003\u20031 '
'[:heavy_check_mark:](https://github.com/EnricoMi/publish-unit-test-re'
'sult-action/blob/v1.20/README.md#the-symbols "passed tests")\u2003\u2003'
'0s '
'[:stopwatch:](https://github.com/EnricoMi/publish-unit-test-result-ac'
'tion/blob/v1.20/README.md#the-symbols "duration of all tests")\n1 '
'suites\u2003\u20031 '
'[:zzz:](https://github.com/EnricoMi/publish-unit-test-result-action/b'
'lob/v1.20/README.md#the-symbols "skipped / disabled tests")\n1 files\u2004'
'\u2002\u2003\u20031 '
'[:x:](https://github.com/EnricoMi/publish-unit-test-result-action/blo'
'b/v1.20/README.md#the-symbols "failed tests")\u2003\u20031 '
'[:fire:](https://github.com/EnricoMi/publish-unit-test-result-action/'
'blob/v1.20/README.md#the-symbols "test errors")\n\nResults for '
'commit commit s.\n\n'
'[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr'
'0KoLTSx8jKGIMSNfMwClfHuAoJC92Z2MxeVoISjC5kGQl0A/8EWkHmwJuIYMR58Os11ry'
'5wXn6LOODshGSgOiEQLRaDwdRemm3u5b+WuYllblvcag0+QlnE7YzeD8XajRvdAAAA\n',
'annotations': [
{
'path': 'ClassName',
'start_line': 0,
'end_line': 0,
'annotation_level': 'warning',
'message': 'minimal-attributes.xml',
'title': 'failed_test (ClassName) failed'
},
{
'path': 'ClassName',
'start_line': 0,
'end_line': 0,
'annotation_level': 'failure',
'message': 'minimal-attributes.xml',
'title': 'error_test (ClassName) with error'
},
{
'path': '.github',
'start_line': 0,
'end_line': 0,
'annotation_level': 'notice',
'message':
'There is 1 skipped test, see "Raw output" for the name of the '
'skipped test.',
'title': '1 skipped test found',
'raw_details': 'ClassName ‑ skipped_test'
},
{
'path': '.github',
'start_line': 0,
'end_line': 0,
'annotation_level': 'notice',
'message': 'There are 4 tests, see "Raw output" for the full list of tests.',
'title': '4 tests found',
'raw_details':
'ClassName ‑ error_test\nClassName ‑ failed_test\nClassName ‑ '
'skipped_test\nClassName ‑ test_name'
}
]
}
}
]
8 changes: 8 additions & 0 deletions python/test/files/junit-xml/minimal-attributes.results
Expand Up @@ -17,6 +17,8 @@ publish.unittestresults.ParsedUnitTestResults(
result='success',
message=None,
content=None,
stdout=None,
stderr=None,
time=None
),
publish.unittestresults.UnitTestCase(
Expand All @@ -28,6 +30,8 @@ publish.unittestresults.ParsedUnitTestResults(
result='skipped',
message=None,
content=None,
stdout=None,
stderr=None,
time=None
),
publish.unittestresults.UnitTestCase(
Expand All @@ -39,6 +43,8 @@ publish.unittestresults.ParsedUnitTestResults(
result='failure',
message=None,
content=None,
stdout=None,
stderr=None,
time=None
),
publish.unittestresults.UnitTestCase(
Expand All @@ -50,6 +56,8 @@ publish.unittestresults.ParsedUnitTestResults(
result='error',
message=None,
content=None,
stdout=None,
stderr=None,
time=None
)
]
Expand Down
31 changes: 31 additions & 0 deletions python/test/files/junit-xml/no-attributes.annotations
@@ -0,0 +1,31 @@
[
{
'name': 'Test Results',
'head_sha': 'commit sha',
'status': 'completed',
'conclusion': 'failure',
'output': {
'title': '1 errors, 1 fail, 1 skipped, 1 pass in 0s',
'summary':
'4 tests\u2002\u2003\u20031 '
'[:heavy_check_mark:](https://github.com/EnricoMi/publish-unit-test-re'
'sult-action/blob/v1.20/README.md#the-symbols "passed tests")\u2003\u2003'
'0s '
'[:stopwatch:](https://github.com/EnricoMi/publish-unit-test-result-ac'
'tion/blob/v1.20/README.md#the-symbols "duration of all tests")\n1 '
'suites\u2003\u20031 '
'[:zzz:](https://github.com/EnricoMi/publish-unit-test-result-action/b'
'lob/v1.20/README.md#the-symbols "skipped / disabled tests")\n1 files\u2004'
'\u2002\u2003\u20031 '
'[:x:](https://github.com/EnricoMi/publish-unit-test-result-action/blo'
'b/v1.20/README.md#the-symbols "failed tests")\u2003\u20031 '
'[:fire:](https://github.com/EnricoMi/publish-unit-test-result-action/'
'blob/v1.20/README.md#the-symbols "test errors")\n\nResults for '
'commit commit s.\n\n'
'[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr'
'0KoLTSx8jKGIMSNfMwClfHuAoJC92Z2MxeVoISjC5kGQl0A/8EWkHmwJuIYMR58Os11ry'
'5wXn6LOODshGSgOiEQLRaDwdRemm3u5b+WuYllblvcag0+QlnE7YzeD8XajRvdAAAA\n',
'annotations': []
}
}
]