Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

initial implementation of a github actions reporter #1678

Merged
merged 15 commits into from Mar 30, 2022
392 changes: 392 additions & 0 deletions pkgs/test/test/runner/github_reporter_test.dart
@@ -0,0 +1,392 @@
// Copyright (c) 2022, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.

@TestOn('vm')

import 'dart:async';

import 'package:test_descriptor/test_descriptor.dart' as d;

import 'package:test/test.dart';

import '../io.dart';

void main() {
setUpAll(precompileTestExecutable);

test('reports when no tests are run', () async {
await d.file('test.dart', 'void main() {}').create();

var test = await runTest(['test.dart'], reporter: 'github');
expect(test.stdout, emitsThrough(contains('0 tests passed')));
await test.shouldExit(79);
});

test('runs several successful tests and reports when each completes', () {
return _expectReport('''
test('success 1', () {});
test('success 2', () {});
test('success 3', () {});''', '''
::group::✅ success 1
::endgroup::
::group::✅ success 2
::endgroup::
::group::✅ success 3
::endgroup::
🎉 3 tests passed.''');
});

test('runs several failing tests and reports when each fails', () {
return _expectReport('''
test('failure 1', () => throw TestFailure('oh no'));
test('failure 2', () => throw TestFailure('oh no'));
test('failure 3', () => throw TestFailure('oh no'));''', '''
::group::❌ failure 1 (failed)
oh no
test.dart 6:33 main.<fn>
::endgroup::
::group::❌ failure 2 (failed)
oh no
test.dart 7:33 main.<fn>
::endgroup::
::group::❌ failure 3 (failed)
oh no
test.dart 8:33 main.<fn>
::endgroup::
::error::0 tests passed, 3 failed.''');
});

test('includes the full stack trace with --verbose-trace', () async {
await d.file('test.dart', '''
import 'dart:async';

import 'package:test/test.dart';

void main() {
test("failure", () => throw "oh no");
}
''').create();

var test =
await runTest(['--verbose-trace', 'test.dart'], reporter: 'github');
expect(test.stdout, emitsThrough(contains('dart:async')));
await test.shouldExit(1);
});

test('runs failing tests along with successful tests', () {
return _expectReport('''
test('failure 1', () => throw TestFailure('oh no'));
test('success 1', () {});
test('failure 2', () => throw TestFailure('oh no'));
test('success 2', () {});''', '''
::group::❌ failure 1 (failed)
oh no
test.dart 6:33 main.<fn>
::endgroup::
::group::✅ success 1
::endgroup::
::group::❌ failure 2 (failed)
oh no
test.dart 8:33 main.<fn>
::endgroup::
::group::✅ success 2
::endgroup::
::error::2 tests passed, 2 failed.''');
});

test('always prints the full test name', () {
return _expectReport(
'''
test(
'really gosh dang long test name. Even longer than that. No, yet '
'longer. A little more... okay, that should do it.',
() {});''',
'''
::group::✅ really gosh dang long test name. Even longer than that. No, yet longer. A little more... okay, that should do it.
::endgroup::''',
useContains: true,
);
});

test('gracefully handles multiple test failures in a row', () {
return _expectReport('''
// This completer ensures that the test isolate isn't killed until all
// errors have been thrown.
var completer = Completer();
test('failures', () {
Future.microtask(() => throw 'first error');
Future.microtask(() => throw 'second error');
Future.microtask(() => throw 'third error');
Future.microtask(completer.complete);
});
test('wait', () => completer.future);''', '''
::group::❌ failures (failed)
first error
test.dart 10:34 main.<fn>.<fn>
second error
test.dart 11:34 main.<fn>.<fn>
third error
test.dart 12:34 main.<fn>.<fn>
::endgroup::
::group::✅ wait
::endgroup::
::error::1 test passed, 1 failed.''');
});

group('print:', () {
test('handles multiple prints', () {
return _expectReport(
'''
test('test', () {
print("one");
print("two");
print("three");
print("four");
});''',
'''
::group::✅ test
one
two
three
four
::endgroup::''',
useContains: true,
);
});

test('handles a print after the test completes', () {
return _expectReport('''
// This completer ensures that the test isolate isn't killed until all
// prints have happened.
var testDone = Completer();
var waitStarted = Completer();
test('test', () async {
waitStarted.future.then((_) {
Future(() => print("one"));
Future(() => print("two"));
Future(() => print("three"));
Future(() => print("four"));
Future(testDone.complete);
});
});

test('wait', () {
waitStarted.complete();
return testDone.future;
});''', '''
::group::✅ test
::endgroup::
one
two
three
four
::group::✅ wait
::endgroup::
🎉 2 tests passed.''');
});

// todo: fix this
// test('interleaves prints and errors', () {
// return _expectReport('''
// // This completer ensures that the test isolate isn't killed until all
// // prints have happened.
// var completer = Completer();
// test('test', () {
// scheduleMicrotask(() {
// print("three");
// print("four");
// throw "second error";
// });

// scheduleMicrotask(() {
// print("five");
// print("six");
// completer.complete();
// });

// print("one");
// print("two");
// throw "first error";
// });

// test('wait', () => completer.future);''', '''
// +0: test
// one
// two
// +0 -1: test [E]
// first error
// test.dart 24:11 main.<fn>

// three
// four
// second error
// test.dart 13:13 main.<fn>.<fn>
// ===== asynchronous gap ===========================
// dart:async scheduleMicrotask
// test.dart 10:11 main.<fn>

// five
// six
// +0 -1: wait
// +1 -1: Some tests failed.''');
// });
});

group('skip:', () {
test('displays skipped tests', () {
return _expectReport('''
test('skip 1', () {}, skip: true);
test('skip 2', () {}, skip: true);
test('skip 3', () {}, skip: true);''', '''
::group::❎ skip 1 (skipped)
::endgroup::
::group::❎ skip 2 (skipped)
::endgroup::
::group::❎ skip 3 (skipped)
::endgroup::
🎉 0 tests passed, 3 skipped.''');
});

test('displays a skipped group', () {
return _expectReport('''
group('skip', () {
test('test 1', () {});
test('test 2', () {});
test('test 3', () {});
}, skip: true);''', '''
::group::❎ skip test 1 (skipped)
::endgroup::
::group::❎ skip test 2 (skipped)
::endgroup::
::group::❎ skip test 3 (skipped)
::endgroup::
🎉 0 tests passed, 3 skipped.''');
});

test('runs skipped tests along with successful tests', () {
return _expectReport('''
test('skip 1', () {}, skip: true);
test('success 1', () {});
test('skip 2', () {}, skip: true);
test('success 2', () {});''', '''
::group::❎ skip 1 (skipped)
::endgroup::
::group::✅ success 1
::endgroup::
::group::❎ skip 2 (skipped)
::endgroup::
::group::✅ success 2
::endgroup::
🎉 2 tests passed, 2 skipped.''');
});

test('runs skipped tests along with successful and failing tests', () {
return _expectReport('''
test('failure 1', () => throw TestFailure('oh no'));
test('skip 1', () {}, skip: true);
test('success 1', () {});
test('failure 2', () => throw TestFailure('oh no'));
test('skip 2', () {}, skip: true);
test('success 2', () {});''', '''
::group::❌ failure 1 (failed)
oh no
test.dart 6:35 main.<fn>
::endgroup::
::group::❎ skip 1 (skipped)
::endgroup::
::group::✅ success 1
::endgroup::
::group::❌ failure 2 (failed)
oh no
test.dart 9:35 main.<fn>
::endgroup::
::group::❎ skip 2 (skipped)
::endgroup::
::group::✅ success 2
::endgroup::
::error::2 tests passed, 2 failed, 2 skipped.''');
});

test('displays the skip reason if available', () {
return _expectReport('''
test('skip 1', () {}, skip: 'some reason');
test('skip 2', () {}, skip: 'or another');''', '''
::group::❎ skip 1 (skipped)
Skip: some reason
::endgroup::
::group::❎ skip 2 (skipped)
Skip: or another
::endgroup::
🎉 0 tests passed, 2 skipped.''');
});
});

test('loadSuite, setUpAll, and tearDownAll hidden if no content', () {
return _expectReport('''
group('one', () {
setUpAll(() {/* nothing to do here */});
tearDownAll(() {/* nothing to do here */});
test('test 1', () {});
});''', '''
::group::✅ one test 1
::endgroup::
🎉 1 test passed.''');
});

test('setUpAll and tearDownAll show when they have content', () {
return _expectReport('''
group('one', () {
setUpAll(() { print('one'); });
tearDownAll(() { print('two'); });
test('test 1', () {});
});''', '''
::group::⏺ one (setUpAll)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not use ✅ for these as well? This looks to me like a warning indicator.

I would consider using this icon for skipped tests actually?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There's some (incomplete) discussion of the icons here: #1678 (comment). I did play around w/ a lot of different icons, including just editing the dom of the 'checks' page to see how things would look.

For this icon, I don't want to use the same icon as for a regular test; I think it's valuable for 6 users test ==> 6 test icons in the output. I'd like the additional test plumbing to have distinct icons from the passed tests. Additionally, the icon should be the same width as the other icons - so the LHS isn't ragged - and shouldn't use a color which users already attach some meaning to (red, yellow, ...).

Other similar unicode chars can be found here: https://www.compart.com/en/unicode/.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What about ☑? That way it is different but still indicates success (it is a test that can fail). And it isn't a color that indicates a warning. My main complaint about the current choice is the orange color indicates a warning to me.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've been checking the rendering in-line in the test results, just so we're getting the font that github uses there. What I see for the current 'non-test' group char (⏺) is:

Screen Shot 2022-03-29 at 9 45 09 AM

Which - for me - is a gray color, not an orange color. I do agree that orange (or red, or yellow) would indicate a failure or problem of some kind to the user - something we should avoid unless there's an actual issue.

Here's what the ☑ char looks like in-line in test results (note the different size and style):

Screen Shot 2022-03-29 at 9 40 14 AM

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh weird... ya its orange on a chromebook :(. I am on my mac now and I see it as a greyish blue. These were both looking at the github UI, so the color is still different based on the platform and not the terminal.

That is pretty unfortunate. @munificent was suggesting using an older check mark symbol that is just the check mark (no background) and then coloring the check mark explicitly?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The green checkmark seems to be fairly consistent so I would suggest just rendering using that. Yes you will see a different number of lines with green checkmarks than passed tests reported at the bottom, but personally that doesn't bother me (the text can still make it obvious that it is a setUpAll).

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I also think we can definitely launch this with any of the suggested choices, and revist later.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that is just the check mark (no background) and then coloring the check mark explicitly?

That would be great if we could do it :) From experimentation, we can't use ansi colors in the summary line of a group here.

I also think we can definitely launch this with any of the suggested choices, and revist later.

Let's do that then; I don't think there's an idea set of icons unfortunately - I've experimented w/ many. I'll change to the green checkmark, and we can iterate as needed.

one
::endgroup::
::group::✅ one test 1
::endgroup::
::group::⏺ one (tearDownAll)
two
::endgroup::
🎉 1 test passed.''');
});
}

Future<void> _expectReport(
String tests,
String expected, {
List<String> args = const [],
bool useContains = false,
}) async {
await d.file('test.dart', '''
import 'dart:async';

import 'package:test/test.dart';

void main() {
$tests
}
''').create();

var test = await runTest([
'test.dart',
...args,
], reporter: 'github');
await test.shouldExit();

var stdoutLines = await test.stdoutStream().toList();
var actual = stdoutLines
.map((line) => line.trim())
.where((line) => line.isNotEmpty)
.join('\n');

// Un-indent the expected string.
var indentation = expected.indexOf(RegExp('[^ ]'));
expected = expected.split('\n').map((line) {
if (line.isEmpty) return line;
return line.substring(indentation);
}).join('\n');

expect(actual, useContains ? contains(expected) : equals(expected));
}