Add file-based reporter support (#1129)

Add support for writing any reporter output to a file in addition to the
reporter on `stdout`. The `json` reporter is the primary use case.
Adds a `--file-reporter` CLI argument, and `file_reporters` yaml config
option.

- Add a `MultiplexingReporter` to allow wiring up multiple reporters to
  listen at once.
- Plumb through the configuration mapping a reporter to a file path.
- Instantiate each file reporter with a sink from the file instead of
  the normal `stdout`.
diff --git a/pkgs/test/CHANGELOG.md b/pkgs/test/CHANGELOG.md
index 399850a..434bfce 100644
--- a/pkgs/test/CHANGELOG.md
+++ b/pkgs/test/CHANGELOG.md
@@ -1,5 +1,7 @@
 ## 1.10.1-dev
 
+* Add `file_reporters` configuration option and `--file-reporter` CLI option to
+  allow specifying a separate reporter that writes to a file instead of stdout.
 * Internal cleanup.
 
 ## 1.10.0
diff --git a/pkgs/test/doc/configuration.md b/pkgs/test/doc/configuration.md
index 0c0a3f0..7a7cabb 100644
--- a/pkgs/test/doc/configuration.md
+++ b/pkgs/test/doc/configuration.md
@@ -436,6 +436,19 @@
 This field is not supported in the
 [global configuration file](#global-configuration).
 
+### `file_reporters`
+
+This field specifies additional reporters to use that will write their output to
+a file rather than stdout. It should be a map of reporter names to filepaths.
+
+```yaml
+file_reporters:
+  json: reports/tests.json
+```
+
+This field is not supported in the
+[global configuration file](#global-configuration).
+
 ### `fold_stack_frames`
 
 This field controls which packages' stack frames will be folded away
diff --git a/pkgs/test/doc/json_reporter.md b/pkgs/test/doc/json_reporter.md
index 669eebe..50348e7 100644
--- a/pkgs/test/doc/json_reporter.md
+++ b/pkgs/test/doc/json_reporter.md
@@ -19,6 +19,11 @@
 
     pub run test --reporter json <path-to-test-file>
 
+You may also use the `--file-reporter` option to enable the JSON reporter such
+that it writes to a file instead of stdout.
+
+    pub run test --file-reporter json:reports/tests.json <path-to-test-file>
+
 The JSON stream will be emitted via standard output. It will be a stream of JSON
 objects, separated by newlines.
 
diff --git a/pkgs/test/test/io.dart b/pkgs/test/test/io.dart
index 53e3533..0d92192 100644
--- a/pkgs/test/test/io.dart
+++ b/pkgs/test/test/io.dart
@@ -64,6 +64,7 @@
 /// Runs the test executable with the package root set properly.
 Future<TestProcess> runTest(Iterable<String> args,
     {String reporter,
+    String fileReporter,
     int concurrency,
     Map<String, String> environment,
     bool forwardStdio = false}) async {
@@ -71,10 +72,11 @@
 
   var allArgs = [
     p.absolute(p.join(await packageDir, 'bin/test.dart')),
-    '--concurrency=$concurrency'
+    '--concurrency=$concurrency',
+    if (reporter != null) '--reporter=$reporter',
+    if (fileReporter != null) '--file-reporter=$fileReporter',
+    ...?args,
   ];
-  if (reporter != null) allArgs.add('--reporter=$reporter');
-  allArgs.addAll(args);
 
   environment ??= {};
   environment.putIfAbsent('_DART_TEST_TESTING', () => 'true');
diff --git a/pkgs/test/test/runner/configuration/configuration_test.dart b/pkgs/test/test/runner/configuration/configuration_test.dart
index a5a1e97..913325d 100644
--- a/pkgs/test/test/runner/configuration/configuration_test.dart
+++ b/pkgs/test/test/runner/configuration/configuration_test.dart
@@ -23,6 +23,7 @@
         expect(merged.configurationPath, equals('dart_test.yaml'));
         expect(merged.dart2jsPath, equals(p.join(sdkDir, 'bin', 'dart2js')));
         expect(merged.reporter, equals(defaultReporter));
+        expect(merged.fileReporters, isEmpty);
         expect(merged.pubServeUrl, isNull);
         expect(merged.shardIndex, isNull);
         expect(merged.totalShards, isNull);
@@ -39,6 +40,7 @@
             configurationPath: 'special_test.yaml',
             dart2jsPath: '/tmp/dart2js',
             reporter: 'json',
+            fileReporters: {'json': 'out.json'},
             pubServePort: 1234,
             shardIndex: 3,
             totalShards: 10,
@@ -52,6 +54,7 @@
         expect(merged.configurationPath, equals('special_test.yaml'));
         expect(merged.dart2jsPath, equals('/tmp/dart2js'));
         expect(merged.reporter, equals('json'));
+        expect(merged.fileReporters, equals({'json': 'out.json'}));
         expect(merged.pubServeUrl.port, equals(1234));
         expect(merged.shardIndex, equals(3));
         expect(merged.totalShards, equals(10));
@@ -68,6 +71,7 @@
             configurationPath: 'special_test.yaml',
             dart2jsPath: '/tmp/dart2js',
             reporter: 'json',
+            fileReporters: {'json': 'out.json'},
             pubServePort: 1234,
             shardIndex: 3,
             totalShards: 10,
@@ -81,6 +85,7 @@
         expect(merged.configurationPath, equals('special_test.yaml'));
         expect(merged.dart2jsPath, equals('/tmp/dart2js'));
         expect(merged.reporter, equals('json'));
+        expect(merged.fileReporters, equals({'json': 'out.json'}));
         expect(merged.pubServeUrl.port, equals(1234));
         expect(merged.shardIndex, equals(3));
         expect(merged.totalShards, equals(10));
@@ -99,6 +104,7 @@
             configurationPath: 'special_test.yaml',
             dart2jsPath: '/tmp/dart2js',
             reporter: 'json',
+            fileReporters: {'json': 'old.json'},
             pubServePort: 1234,
             shardIndex: 2,
             totalShards: 4,
@@ -112,6 +118,7 @@
             configurationPath: 'test_special.yaml',
             dart2jsPath: '../dart2js',
             reporter: 'compact',
+            fileReporters: {'json': 'new.json'},
             pubServePort: 5678,
             shardIndex: 3,
             totalShards: 10,
@@ -126,6 +133,7 @@
         expect(merged.configurationPath, equals('test_special.yaml'));
         expect(merged.dart2jsPath, equals('../dart2js'));
         expect(merged.reporter, equals('compact'));
+        expect(merged.fileReporters, equals({'json': 'new.json'}));
         expect(merged.pubServeUrl.port, equals(5678));
         expect(merged.shardIndex, equals(3));
         expect(merged.totalShards, equals(10));
diff --git a/pkgs/test/test/runner/configuration/top_level_error_test.dart b/pkgs/test/test/runner/configuration/top_level_error_test.dart
index 7746789..eb08350 100644
--- a/pkgs/test/test/runner/configuration/top_level_error_test.dart
+++ b/pkgs/test/test/runner/configuration/top_level_error_test.dart
@@ -157,6 +157,51 @@
     });
   });
 
+  group('file_reporters', () {
+    test('rejects an invalid type', () async {
+      await d
+          .file('dart_test.yaml', jsonEncode({'file_reporters': 12}))
+          .create();
+
+      var test = await runTest(['test.dart']);
+      expect(
+          test.stderr, containsInOrder(['file_reporters must be a map', '^^']));
+      await test.shouldExit(exit_codes.data);
+    });
+
+    test('rejects an invalid value type', () async {
+      await d
+          .file(
+              'dart_test.yaml',
+              jsonEncode({
+                'file_reporters': {'json': 12}
+              }))
+          .create();
+
+      var test = await runTest(['test.dart']);
+      expect(test.stderr,
+          containsInOrder(['file_reporters value must be a string', '^^']));
+      await test.shouldExit(exit_codes.data);
+    });
+
+    test('rejects an invalid name', () async {
+      await d
+          .file(
+              'dart_test.yaml',
+              jsonEncode({
+                'file_reporters': {'non-existent': 'out'}
+              }))
+          .create();
+
+      var test = await runTest(['test.dart']);
+      expect(
+          test.stderr,
+          containsInOrder(
+              ['Unknown reporter "non-existent"', '^^^^^^^^^^^^^^']));
+      await test.shouldExit(exit_codes.data);
+    });
+  });
+
   test('rejects an invalid pub serve port', () async {
     await d.file('dart_test.yaml', jsonEncode({'pub_serve': 'foo'})).create();
 
diff --git a/pkgs/test/test/runner/json_file_reporter_test.dart b/pkgs/test/test/runner/json_file_reporter_test.dart
new file mode 100644
index 0000000..85ab331
--- /dev/null
+++ b/pkgs/test/test/runner/json_file_reporter_test.dart
@@ -0,0 +1,149 @@
+// Copyright (c) 2020, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+@TestOn('vm')
+
+import 'dart:async';
+import 'dart:io';
+
+import 'package:path/path.dart' as p;
+
+import 'package:test_descriptor/test_descriptor.dart' as d;
+
+import 'package:test/test.dart';
+import 'package:test_core/src/util/exit_codes.dart' as exit_codes;
+
+import '../io.dart';
+import 'json_reporter_utils.dart';
+
+void main() {
+  test('runs successful tests with a stdout reporter and file reporter', () {
+    return _expectReports('''
+      test('success 1', () {});
+      test('success 2', () {});
+      test('success 3', () {});
+    ''', '''
+      +0: success 1
+      +1: success 2
+      +2: success 3
+      +3: All tests passed!''', [
+      [
+        suiteJson(0),
+        testStartJson(1, 'loading test.dart', groupIDs: []),
+        testDoneJson(1, hidden: true),
+      ],
+      [
+        groupJson(2, testCount: 3),
+        testStartJson(3, 'success 1', line: 6, column: 7),
+        testDoneJson(3),
+        testStartJson(4, 'success 2', line: 7, column: 7),
+        testDoneJson(4),
+        testStartJson(5, 'success 3', line: 8, column: 7),
+        testDoneJson(5),
+      ]
+    ], doneJson());
+  });
+
+  test('runs failing tests with a stdout reporter and file reporter', () {
+    return _expectReports('''
+      test('failure 1', () => throw new TestFailure('oh no'));
+      test('failure 2', () => throw new TestFailure('oh no'));
+      test('failure 3', () => throw new TestFailure('oh no'));
+    ''', '''
+      +0: failure 1
+      +0 -1: failure 1 [E]
+        oh no
+        test.dart 6:31  main.<fn>
+
+      +0 -1: failure 2
+      +0 -2: failure 2 [E]
+        oh no
+        test.dart 7:31  main.<fn>
+
+      +0 -2: failure 3
+      +0 -3: failure 3 [E]
+        oh no
+        test.dart 8:31  main.<fn>
+
+      +0 -3: Some tests failed.''', [
+      [
+        suiteJson(0),
+        testStartJson(1, 'loading test.dart', groupIDs: []),
+        testDoneJson(1, hidden: true),
+      ],
+      [
+        groupJson(2, testCount: 3),
+        testStartJson(3, 'failure 1', line: 6, column: 7),
+        errorJson(3, 'oh no', isFailure: true),
+        testDoneJson(3, result: 'failure'),
+        testStartJson(4, 'failure 2', line: 7, column: 7),
+        errorJson(4, 'oh no', isFailure: true),
+        testDoneJson(4, result: 'failure'),
+        testStartJson(5, 'failure 3', line: 8, column: 7),
+        errorJson(5, 'oh no', isFailure: true),
+        testDoneJson(5, result: 'failure'),
+      ]
+    ], doneJson(success: false));
+  });
+
+  group('reports an error if --file-reporter', () {
+    test('is not in the form <reporter>:<filepath>', () async {
+      var test = await runTest(['--file-reporter=json']);
+      expect(test.stderr,
+          emits(contains('option must be in the form <reporter>:<filepath>')));
+      await test.shouldExit(exit_codes.usage);
+    });
+
+    test('targets a non-existent reporter', () async {
+      var test = await runTest(['--file-reporter=nope:output.txt']);
+      expect(
+          test.stderr, emits(contains('"nope" is not a supported reporter')));
+      await test.shouldExit(exit_codes.usage);
+    });
+  });
+}
+
+Future _expectReports(String tests, String stdoutExpected,
+    List<List<dynamic /*Map|Matcher*/ >> jsonFileExpected, Map jsonFileDone,
+    {List<String> args}) async {
+  await d.file('test.dart', '''
+    import 'dart:async';
+
+    import 'package:test/test.dart';
+
+    void main() {
+$tests
+    }
+  ''').create();
+
+  var test = await runTest(['test.dart', ...?args],
+      // Write to a file within a dir that doesn't yet exist to verify that the
+      // file is created recursively.
+      fileReporter: 'json:reports/tests.json');
+  await test.shouldExit();
+
+  // ---- stdout reporter verification ----
+  var stdoutLines = await test.stdoutStream().toList();
+
+  // Remove excess trailing whitespace and trim off timestamps.
+  var actual = stdoutLines.map((line) {
+    if (line.startsWith('  ') || line.isEmpty) return line.trimRight();
+    return line.trim().replaceFirst(RegExp('^[0-9]{2}:[0-9]{2} '), '');
+  }).join('\n');
+
+  // Un-indent the expected string.
+  var indentation = stdoutExpected.indexOf(RegExp('[^ ]'));
+  stdoutExpected = stdoutExpected.split('\n').map((line) {
+    if (line.isEmpty) return line;
+    return line.substring(indentation);
+  }).join('\n');
+
+  expect(actual, equals(stdoutExpected));
+
+  // ---- file reporter verification ----
+  var fileOutputLines =
+      File(p.join(d.sandbox, 'reports', 'tests.json')).readAsLinesSync();
+  await expectJsonReport(
+      fileOutputLines, test.pid, jsonFileExpected, jsonFileDone);
+}
diff --git a/pkgs/test/test/runner/json_reporter_test.dart b/pkgs/test/test/runner/json_reporter_test.dart
index c529073..58d0e47 100644
--- a/pkgs/test/test/runner/json_reporter_test.dart
+++ b/pkgs/test/test/runner/json_reporter_test.dart
@@ -5,16 +5,14 @@
 @TestOn('vm')
 
 import 'dart:async';
-import 'dart:convert';
 
 import 'package:path/path.dart' as p;
-
 import 'package:test_descriptor/test_descriptor.dart' as d;
 
-import 'package:test_core/src/runner/version.dart';
 import 'package:test/test.dart';
 
 import '../io.dart';
+import 'json_reporter_utils.dart';
 
 void main() {
   test('runs several successful tests and reports when each completes', () {
@@ -24,20 +22,20 @@
       test('success 3', () {});
     ''', [
       [
-        _suite(0),
-        _testStart(1, 'loading test.dart', groupIDs: []),
-        _testDone(1, hidden: true),
+        suiteJson(0),
+        testStartJson(1, 'loading test.dart', groupIDs: []),
+        testDoneJson(1, hidden: true),
       ],
       [
-        _group(2, testCount: 3),
-        _testStart(3, 'success 1', line: 6, column: 7),
-        _testDone(3),
-        _testStart(4, 'success 2', line: 7, column: 7),
-        _testDone(4),
-        _testStart(5, 'success 3', line: 8, column: 7),
-        _testDone(5),
+        groupJson(2, testCount: 3),
+        testStartJson(3, 'success 1', line: 6, column: 7),
+        testDoneJson(3),
+        testStartJson(4, 'success 2', line: 7, column: 7),
+        testDoneJson(4),
+        testStartJson(5, 'success 3', line: 8, column: 7),
+        testDoneJson(5),
       ]
-    ], _done());
+    ], doneJson());
   });
 
   test('runs several failing tests and reports when each fails', () {
@@ -47,23 +45,23 @@
       test('failure 3', () => throw new TestFailure('oh no'));
     ''', [
       [
-        _suite(0),
-        _testStart(1, 'loading test.dart', groupIDs: []),
-        _testDone(1, hidden: true),
+        suiteJson(0),
+        testStartJson(1, 'loading test.dart', groupIDs: []),
+        testDoneJson(1, hidden: true),
       ],
       [
-        _group(2, testCount: 3),
-        _testStart(3, 'failure 1', line: 6, column: 7),
-        _error(3, 'oh no', isFailure: true),
-        _testDone(3, result: 'failure'),
-        _testStart(4, 'failure 2', line: 7, column: 7),
-        _error(4, 'oh no', isFailure: true),
-        _testDone(4, result: 'failure'),
-        _testStart(5, 'failure 3', line: 8, column: 7),
-        _error(5, 'oh no', isFailure: true),
-        _testDone(5, result: 'failure'),
+        groupJson(2, testCount: 3),
+        testStartJson(3, 'failure 1', line: 6, column: 7),
+        errorJson(3, 'oh no', isFailure: true),
+        testDoneJson(3, result: 'failure'),
+        testStartJson(4, 'failure 2', line: 7, column: 7),
+        errorJson(4, 'oh no', isFailure: true),
+        testDoneJson(4, result: 'failure'),
+        testStartJson(5, 'failure 3', line: 8, column: 7),
+        errorJson(5, 'oh no', isFailure: true),
+        testDoneJson(5, result: 'failure'),
       ]
-    ], _done(success: false));
+    ], doneJson(success: false));
   });
 
   test('includes the full stack trace with --verbose-trace', () async {
@@ -91,24 +89,24 @@
       test('success 2', () {});
     ''', [
       [
-        _suite(0),
-        _testStart(1, 'loading test.dart', groupIDs: []),
-        _testDone(1, hidden: true),
+        suiteJson(0),
+        testStartJson(1, 'loading test.dart', groupIDs: []),
+        testDoneJson(1, hidden: true),
       ],
       [
-        _group(2, testCount: 4),
-        _testStart(3, 'failure 1', line: 6, column: 7),
-        _error(3, 'oh no', isFailure: true),
-        _testDone(3, result: 'failure'),
-        _testStart(4, 'success 1', line: 7, column: 7),
-        _testDone(4),
-        _testStart(5, 'failure 2', line: 8, column: 7),
-        _error(5, 'oh no', isFailure: true),
-        _testDone(5, result: 'failure'),
-        _testStart(6, 'success 2', line: 9, column: 7),
-        _testDone(6),
+        groupJson(2, testCount: 4),
+        testStartJson(3, 'failure 1', line: 6, column: 7),
+        errorJson(3, 'oh no', isFailure: true),
+        testDoneJson(3, result: 'failure'),
+        testStartJson(4, 'success 1', line: 7, column: 7),
+        testDoneJson(4),
+        testStartJson(5, 'failure 2', line: 8, column: 7),
+        errorJson(5, 'oh no', isFailure: true),
+        testDoneJson(5, result: 'failure'),
+        testStartJson(6, 'success 2', line: 9, column: 7),
+        testDoneJson(6),
       ]
-    ], _done(success: false));
+    ], doneJson(success: false));
   });
 
   test('gracefully handles multiple test failures in a row', () {
@@ -125,21 +123,21 @@
       test('wait', () => completer.future);
     ''', [
       [
-        _suite(0),
-        _testStart(1, 'loading test.dart', groupIDs: []),
-        _testDone(1, hidden: true),
+        suiteJson(0),
+        testStartJson(1, 'loading test.dart', groupIDs: []),
+        testDoneJson(1, hidden: true),
       ],
       [
-        _group(2, testCount: 2),
-        _testStart(3, 'failures', line: 9, column: 7),
-        _error(3, 'first error'),
-        _error(3, 'second error'),
-        _error(3, 'third error'),
-        _testDone(3, result: 'error'),
-        _testStart(4, 'wait', line: 15, column: 7),
-        _testDone(4),
+        groupJson(2, testCount: 2),
+        testStartJson(3, 'failures', line: 9, column: 7),
+        errorJson(3, 'first error'),
+        errorJson(3, 'second error'),
+        errorJson(3, 'third error'),
+        testDoneJson(3, result: 'error'),
+        testStartJson(4, 'wait', line: 15, column: 7),
+        testDoneJson(4),
       ]
-    ], _done(success: false));
+    ], doneJson(success: false));
   });
 
   test('gracefully handles a test failing after completion', () {
@@ -161,24 +159,24 @@
       });
     ''', [
       [
-        _suite(0),
-        _testStart(1, 'loading test.dart', groupIDs: []),
-        _testDone(1, hidden: true),
+        suiteJson(0),
+        testStartJson(1, 'loading test.dart', groupIDs: []),
+        testDoneJson(1, hidden: true),
       ],
       [
-        _group(2, testCount: 2),
-        _testStart(3, 'failure', line: 11, column: 7),
-        _testDone(3),
-        _testStart(4, 'wait', line: 17, column: 7),
-        _error(3, 'oh no'),
-        _error(
+        groupJson(2, testCount: 2),
+        testStartJson(3, 'failure', line: 11, column: 7),
+        testDoneJson(3),
+        testStartJson(4, 'wait', line: 17, column: 7),
+        errorJson(3, 'oh no'),
+        errorJson(
             3,
             'This test failed after it had already completed. Make sure to '
             'use [expectAsync]\n'
             'or the [completes] matcher when testing async code.'),
-        _testDone(4),
+        testDoneJson(4),
       ]
-    ], _done(success: false));
+    ], doneJson(success: false));
   });
 
   test('reports each test in its proper groups', () {
@@ -195,25 +193,27 @@
       });
     ''', [
       [
-        _suite(0),
-        _testStart(1, 'loading test.dart', groupIDs: []),
-        _testDone(1, hidden: true),
+        suiteJson(0),
+        testStartJson(1, 'loading test.dart', groupIDs: []),
+        testDoneJson(1, hidden: true),
       ],
       [
-        _group(2, testCount: 3),
-        _group(3,
+        groupJson(2, testCount: 3),
+        groupJson(3,
             name: 'group 1', parentID: 2, testCount: 3, line: 6, column: 7),
-        _group(4, name: 'group 1 .2', parentID: 3, line: 7, column: 9),
-        _group(5, name: 'group 1 .2 .3', parentID: 4, line: 8, column: 11),
-        _testStart(6, 'group 1 .2 .3 success',
+        groupJson(4, name: 'group 1 .2', parentID: 3, line: 7, column: 9),
+        groupJson(5, name: 'group 1 .2 .3', parentID: 4, line: 8, column: 11),
+        testStartJson(6, 'group 1 .2 .3 success',
             groupIDs: [2, 3, 4, 5], line: 9, column: 13),
-        _testDone(6),
-        _testStart(7, 'group 1 success', groupIDs: [2, 3], line: 13, column: 9),
-        _testDone(7),
-        _testStart(8, 'group 1 success', groupIDs: [2, 3], line: 14, column: 9),
-        _testDone(8),
+        testDoneJson(6),
+        testStartJson(7, 'group 1 success',
+            groupIDs: [2, 3], line: 13, column: 9),
+        testDoneJson(7),
+        testStartJson(8, 'group 1 success',
+            groupIDs: [2, 3], line: 14, column: 9),
+        testDoneJson(8),
       ]
-    ], _done());
+    ], doneJson());
   });
 
   group('print:', () {
@@ -227,20 +227,20 @@
         });
       ''', [
         [
-          _suite(0),
-          _testStart(1, 'loading test.dart', groupIDs: []),
-          _testDone(1, hidden: true),
+          suiteJson(0),
+          testStartJson(1, 'loading test.dart', groupIDs: []),
+          testDoneJson(1, hidden: true),
         ],
         [
-          _group(2),
-          _testStart(3, 'test', line: 6, column: 9),
-          _print(3, 'one'),
-          _print(3, 'two'),
-          _print(3, 'three'),
-          _print(3, 'four'),
-          _testDone(3),
+          groupJson(2),
+          testStartJson(3, 'test', line: 6, column: 9),
+          printJson(3, 'one'),
+          printJson(3, 'two'),
+          printJson(3, 'three'),
+          printJson(3, 'four'),
+          testDoneJson(3),
         ]
-      ], _done());
+      ], doneJson());
     });
 
     test('handles a print after the test completes', () {
@@ -265,22 +265,22 @@
         });
       ''', [
         [
-          _suite(0),
-          _testStart(1, 'loading test.dart', groupIDs: []),
-          _testDone(1, hidden: true),
+          suiteJson(0),
+          testStartJson(1, 'loading test.dart', groupIDs: []),
+          testDoneJson(1, hidden: true),
         ],
         [
-          _group(2, testCount: 2),
-          _testStart(3, 'test', line: 10, column: 9),
-          _testDone(3),
-          _testStart(4, 'wait', line: 20, column: 9),
-          _print(3, 'one'),
-          _print(3, 'two'),
-          _print(3, 'three'),
-          _print(3, 'four'),
-          _testDone(4),
+          groupJson(2, testCount: 2),
+          testStartJson(3, 'test', line: 10, column: 9),
+          testDoneJson(3),
+          testStartJson(4, 'wait', line: 20, column: 9),
+          printJson(3, 'one'),
+          printJson(3, 'two'),
+          printJson(3, 'three'),
+          printJson(3, 'four'),
+          testDoneJson(4),
         ]
-      ], _done());
+      ], doneJson());
     });
 
     test('interleaves prints and errors', () {
@@ -309,26 +309,26 @@
         test('wait', () => completer.future);
       ''', [
         [
-          _suite(0),
-          _testStart(1, 'loading test.dart', groupIDs: []),
-          _testDone(1, hidden: true),
+          suiteJson(0),
+          testStartJson(1, 'loading test.dart', groupIDs: []),
+          testDoneJson(1, hidden: true),
         ],
         [
-          _group(2, testCount: 2),
-          _testStart(3, 'test', line: 9, column: 9),
-          _print(3, 'one'),
-          _print(3, 'two'),
-          _error(3, 'first error'),
-          _print(3, 'three'),
-          _print(3, 'four'),
-          _error(3, 'second error'),
-          _print(3, 'five'),
-          _print(3, 'six'),
-          _testDone(3, result: 'error'),
-          _testStart(4, 'wait', line: 27, column: 9),
-          _testDone(4),
+          groupJson(2, testCount: 2),
+          testStartJson(3, 'test', line: 9, column: 9),
+          printJson(3, 'one'),
+          printJson(3, 'two'),
+          errorJson(3, 'first error'),
+          printJson(3, 'three'),
+          printJson(3, 'four'),
+          errorJson(3, 'second error'),
+          printJson(3, 'five'),
+          printJson(3, 'six'),
+          testDoneJson(3, result: 'error'),
+          testStartJson(4, 'wait', line: 27, column: 9),
+          testDoneJson(4),
         ]
-      ], _done(success: false));
+      ], doneJson(success: false));
     });
   });
 
@@ -340,20 +340,20 @@
         test('skip 3', () {}, skip: true);
       ''', [
         [
-          _suite(0),
-          _testStart(1, 'loading test.dart', groupIDs: []),
-          _testDone(1, hidden: true),
+          suiteJson(0),
+          testStartJson(1, 'loading test.dart', groupIDs: []),
+          testDoneJson(1, hidden: true),
         ],
         [
-          _group(2, testCount: 3),
-          _testStart(3, 'skip 1', skip: true, line: 6, column: 9),
-          _testDone(3, skipped: true),
-          _testStart(4, 'skip 2', skip: true, line: 7, column: 9),
-          _testDone(4, skipped: true),
-          _testStart(5, 'skip 3', skip: true, line: 8, column: 9),
-          _testDone(5, skipped: true),
+          groupJson(2, testCount: 3),
+          testStartJson(3, 'skip 1', skip: true, line: 6, column: 9),
+          testDoneJson(3, skipped: true),
+          testStartJson(4, 'skip 2', skip: true, line: 7, column: 9),
+          testDoneJson(4, skipped: true),
+          testStartJson(5, 'skip 3', skip: true, line: 8, column: 9),
+          testDoneJson(5, skipped: true),
         ]
-      ], _done());
+      ], doneJson());
     });
 
     test('reports skipped groups', () {
@@ -365,30 +365,30 @@
         }, skip: true);
       ''', [
         [
-          _suite(0),
-          _testStart(1, 'loading test.dart', groupIDs: []),
-          _testDone(1, hidden: true),
+          suiteJson(0),
+          testStartJson(1, 'loading test.dart', groupIDs: []),
+          testDoneJson(1, hidden: true),
         ],
         [
-          _group(2, testCount: 3),
-          _group(3,
+          groupJson(2, testCount: 3),
+          groupJson(3,
               name: 'skip',
               parentID: 2,
               skip: true,
               testCount: 3,
               line: 6,
               column: 9),
-          _testStart(4, 'skip success 1',
+          testStartJson(4, 'skip success 1',
               groupIDs: [2, 3], skip: true, line: 7, column: 11),
-          _testDone(4, skipped: true),
-          _testStart(5, 'skip success 2',
+          testDoneJson(4, skipped: true),
+          testStartJson(5, 'skip success 2',
               groupIDs: [2, 3], skip: true, line: 8, column: 11),
-          _testDone(5, skipped: true),
-          _testStart(6, 'skip success 3',
+          testDoneJson(5, skipped: true),
+          testStartJson(6, 'skip success 3',
               groupIDs: [2, 3], skip: true, line: 9, column: 11),
-          _testDone(6, skipped: true),
+          testDoneJson(6, skipped: true),
         ]
-      ], _done());
+      ], doneJson());
     });
 
     test('reports the skip reason if available', () {
@@ -397,20 +397,20 @@
         test('skip 2', () {}, skip: 'or another');
       ''', [
         [
-          _suite(0),
-          _testStart(1, 'loading test.dart', groupIDs: []),
-          _testDone(1, hidden: true),
+          suiteJson(0),
+          testStartJson(1, 'loading test.dart', groupIDs: []),
+          testDoneJson(1, hidden: true),
         ],
         [
-          _group(2, testCount: 2),
-          _testStart(3, 'skip 1', skip: 'some reason', line: 6, column: 9),
-          _print(3, 'Skip: some reason', type: 'skip'),
-          _testDone(3, skipped: true),
-          _testStart(4, 'skip 2', skip: 'or another', line: 7, column: 9),
-          _print(4, 'Skip: or another', type: 'skip'),
-          _testDone(4, skipped: true),
+          groupJson(2, testCount: 2),
+          testStartJson(3, 'skip 1', skip: 'some reason', line: 6, column: 9),
+          printJson(3, 'Skip: some reason', type: 'skip'),
+          testDoneJson(3, skipped: true),
+          testStartJson(4, 'skip 2', skip: 'or another', line: 7, column: 9),
+          printJson(4, 'Skip: or another', type: 'skip'),
+          testDoneJson(4, skipped: true),
         ]
-      ], _done());
+      ], doneJson());
     });
 
     test('runs skipped tests with --run-skipped', () {
@@ -421,19 +421,19 @@
       ''',
           [
             [
-              _suite(0),
-              _testStart(1, 'loading test.dart', groupIDs: []),
-              _testDone(1, hidden: true),
+              suiteJson(0),
+              testStartJson(1, 'loading test.dart', groupIDs: []),
+              testDoneJson(1, hidden: true),
             ],
             [
-              _group(2, testCount: 2),
-              _testStart(3, 'skip 1', line: 6, column: 9),
-              _testDone(3),
-              _testStart(4, 'skip 2', line: 7, column: 9),
-              _testDone(4),
+              groupJson(2, testCount: 2),
+              testStartJson(3, 'skip 1', line: 6, column: 9),
+              testDoneJson(3),
+              testStartJson(4, 'skip 2', line: 7, column: 9),
+              testDoneJson(4),
             ]
           ],
-          _done(),
+          doneJson(),
           args: ['--run-skipped']);
     });
   });
@@ -447,20 +447,20 @@
         test('success', () {});
       ''', [
         [
-          _suite(0),
-          _testStart(1, 'loading test.dart', groupIDs: []),
-          _testDone(1, hidden: true),
+          suiteJson(0),
+          testStartJson(1, 'loading test.dart', groupIDs: []),
+          testDoneJson(1, hidden: true),
         ],
         [
-          _group(2, testCount: 1),
-          _testStart(3, '(setUpAll)', line: 6, column: 9),
-          _testDone(3, hidden: true),
-          _testStart(4, 'success', line: 9, column: 9),
-          _testDone(4),
-          _testStart(5, '(tearDownAll)'),
-          _testDone(5, hidden: true),
+          groupJson(2, testCount: 1),
+          testStartJson(3, '(setUpAll)', line: 6, column: 9),
+          testDoneJson(3, hidden: true),
+          testStartJson(4, 'success', line: 9, column: 9),
+          testDoneJson(4),
+          testStartJson(5, '(tearDownAll)'),
+          testDoneJson(5, hidden: true),
         ]
-      ], _done());
+      ], doneJson());
     });
 
     test('the first call to tearDownAll()', () {
@@ -471,18 +471,18 @@
         test('success', () {});
       ''', [
         [
-          _testStart(1, 'loading test.dart', groupIDs: []),
-          _testDone(1, hidden: true),
+          testStartJson(1, 'loading test.dart', groupIDs: []),
+          testDoneJson(1, hidden: true),
         ],
         [
-          _suite(0),
-          _group(2, testCount: 1),
-          _testStart(3, 'success', line: 9, column: 9),
-          _testDone(3),
-          _testStart(4, '(tearDownAll)', line: 6, column: 9),
-          _testDone(4, hidden: true),
+          suiteJson(0),
+          groupJson(2, testCount: 1),
+          testStartJson(3, 'success', line: 9, column: 9),
+          testDoneJson(3),
+          testStartJson(4, '(tearDownAll)', line: 6, column: 9),
+          testDoneJson(4, hidden: true),
         ]
-      ], _done());
+      ], doneJson());
     });
 
     test('a test compiled to JS', () {
@@ -492,18 +492,18 @@
       ''',
           [
             [
-              _suite(0, platform: 'chrome'),
-              _testStart(1, 'compiling test.dart', groupIDs: []),
-              _print(1, startsWith('Compiled')),
-              _testDone(1, hidden: true),
+              suiteJson(0, platform: 'chrome'),
+              testStartJson(1, 'compiling test.dart', groupIDs: []),
+              printJson(1, startsWith('Compiled')),
+              testDoneJson(1, hidden: true),
             ],
             [
-              _group(2, testCount: 1),
-              _testStart(3, 'success', line: 6, column: 9),
-              _testDone(3),
+              groupJson(2, testCount: 1),
+              testStartJson(3, 'success', line: 6, column: 9),
+              testDoneJson(3),
             ]
           ],
-          _done(),
+          doneJson(),
           args: ['-p', 'chrome']);
     }, tags: ['chrome'], skip: 'https://github.com/dart-lang/test/issues/872');
 
@@ -515,25 +515,25 @@
     ''',
           [
             [
-              _suite(0),
-              _testStart(1, 'loading test.dart', groupIDs: []),
-              _testDone(1, hidden: true),
+              suiteJson(0),
+              testStartJson(1, 'loading test.dart', groupIDs: []),
+              testDoneJson(1, hidden: true),
             ],
             [
-              _group(2, testCount: 2),
-              _testStart(3, 'success 1',
+              groupJson(2, testCount: 2),
+              testStartJson(3, 'success 1',
                   line: 3,
                   column: 50,
                   url: p.toUri(p.join(d.sandbox, 'common.dart')).toString(),
                   root_column: 7,
                   root_line: 7,
                   root_url: p.toUri(p.join(d.sandbox, 'test.dart')).toString()),
-              _testDone(3),
-              _testStart(4, 'success 2', line: 8, column: 7),
-              _testDone(4),
+              testDoneJson(3),
+              testStartJson(4, 'success 2', line: 8, column: 7),
+              testDoneJson(4),
             ]
           ],
-          _done(),
+          doneJson(),
           externalLibraries: {
             'common.dart': '''
 import 'package:test/test.dart';
@@ -553,18 +553,18 @@
     ''',
         [
           [
-            _suite(0, platform: 'chrome'),
-            _testStart(1, 'compiling test.dart', groupIDs: []),
-            _print(1, startsWith('Compiled')),
-            _testDone(1, hidden: true),
+            suiteJson(0, platform: 'chrome'),
+            testStartJson(1, 'compiling test.dart', groupIDs: []),
+            printJson(1, startsWith('Compiled')),
+            testDoneJson(1, hidden: true),
           ],
           [
-            _group(2, testCount: 1),
-            _testStart(3, 'success'),
-            _testDone(3),
+            groupJson(2, testCount: 1),
+            testStartJson(3, 'success'),
+            testDoneJson(3),
           ],
         ],
-        _done(),
+        doneJson(),
         args: ['-p', 'chrome', '--js-trace']);
   }, tags: ['chrome']);
 }
@@ -598,205 +598,5 @@
   await test.shouldExit();
 
   var stdoutLines = await test.stdoutStream().toList();
-  // Ensure the output is of the same length, including start, done and all
-  // suites messages.
-  expect(stdoutLines.length, equals(expected.fold(3, (a, m) => a + m.length)),
-      reason: 'Expected $stdoutLines to match $expected.');
-
-  dynamic decodeLine(String l) =>
-      jsonDecode(l)..remove('time')..remove('stackTrace');
-
-  // Should contain all suites message.
-  expect(stdoutLines.map(decodeLine), containsAll([_allSuites()]));
-
-  // A single start event is emitted first.
-  final _start = {
-    'type': 'start',
-    'protocolVersion': '0.1.1',
-    'runnerVersion': testVersion,
-    'pid': test.pid
-  };
-  expect(decodeLine(stdoutLines.first), equals(_start));
-
-  // A single done event is emmited last.
-  expect(decodeLine(stdoutLines.last), equals(done));
-
-  for (var value in expected) {
-    expect(stdoutLines.map(decodeLine), containsAllInOrder(value));
-  }
-}
-
-/// Returns the event emitted by the JSON reporter providing information about
-/// all suites.
-///
-/// The [count] defaults to 1.
-Map _allSuites({int count}) {
-  return {'type': 'allSuites', 'count': count ?? 1};
-}
-
-/// Returns the event emitted by the JSON reporter indicating that a suite has
-/// begun running.
-///
-/// The [platform] defaults to `"vm"`, the [path] defaults to `"test.dart"`.
-Map _suite(int id, {String platform, String path}) {
-  return {
-    'type': 'suite',
-    'suite': {
-      'id': id,
-      'platform': platform ?? 'vm',
-      'path': path ?? 'test.dart'
-    }
-  };
-}
-
-/// Returns the event emitted by the JSON reporter indicating that a group has
-/// begun running.
-///
-/// If [skip] is `true`, the group is expected to be marked as skipped without a
-/// reason. If it's a [String], the group is expected to be marked as skipped
-/// with that reason.
-///
-/// The [testCount] parameter indicates the number of tests in the group. It
-/// defaults to 1.
-Map _group(int id,
-    {String name,
-    int suiteID,
-    int parentID,
-    skip,
-    int testCount,
-    int line,
-    int column}) {
-  if ((line == null) != (column == null)) {
-    throw ArgumentError(
-        'line and column must either both be null or both be passed');
-  }
-
-  return {
-    'type': 'group',
-    'group': {
-      'id': id,
-      'name': name,
-      'suiteID': suiteID ?? 0,
-      'parentID': parentID,
-      'metadata': _metadata(skip: skip),
-      'testCount': testCount ?? 1,
-      'line': line,
-      'column': column,
-      'url': line == null
-          ? null
-          : p.toUri(p.join(d.sandbox, 'test.dart')).toString()
-    }
-  };
-}
-
-/// Returns the event emitted by the JSON reporter indicating that a test has
-/// begun running.
-///
-/// If [parentIDs] is passed, it's the IDs of groups containing this test. If
-/// [skip] is `true`, the test is expected to be marked as skipped without a
-/// reason. If it's a [String], the test is expected to be marked as skipped
-/// with that reason.
-Map _testStart(int id, String name,
-    {int suiteID,
-    Iterable<int> groupIDs,
-    int line,
-    int column,
-    String url,
-    skip,
-    int root_line,
-    int root_column,
-    String root_url}) {
-  if ((line == null) != (column == null)) {
-    throw ArgumentError(
-        'line and column must either both be null or both be passed');
-  }
-
-  url ??=
-      line == null ? null : p.toUri(p.join(d.sandbox, 'test.dart')).toString();
-  var expected = {
-    'type': 'testStart',
-    'test': {
-      'id': id,
-      'name': name,
-      'suiteID': suiteID ?? 0,
-      'groupIDs': groupIDs ?? [2],
-      'metadata': _metadata(skip: skip),
-      'line': line,
-      'column': column,
-      'url': url,
-    }
-  };
-  var testObj = expected['test'] as Map<String, dynamic>;
-  if (root_line != null) {
-    testObj['root_line'] = root_line;
-  }
-  if (root_column != null) {
-    testObj['root_column'] = root_column;
-  }
-  if (root_url != null) {
-    testObj['root_url'] = root_url;
-  }
-  return expected;
-}
-
-/// Returns the event emitted by the JSON reporter indicating that a test
-/// printed [message].
-Matcher _print(int id, dynamic /*String|Matcher*/ message, {String type}) {
-  return allOf(
-    hasLength(4),
-    containsPair('type', 'print'),
-    containsPair('testID', id),
-    containsPair('message', message),
-    containsPair('messageType', type ?? 'print'),
-  );
-}
-
-/// Returns the event emitted by the JSON reporter indicating that a test
-/// emitted [error].
-///
-/// The [isFailure] parameter indicates whether the error was a [TestFailure] or
-/// not.
-Map _error(int id, String error, {bool isFailure = false}) {
-  return {
-    'type': 'error',
-    'testID': id,
-    'error': error,
-    'isFailure': isFailure
-  };
-}
-
-/// Returns the event emitted by the JSON reporter indicating that a test
-/// finished.
-///
-/// The [result] parameter indicates the result of the test. It defaults to
-/// `"success"`.
-///
-/// The [hidden] parameter indicates whether the test should not be displayed
-/// after finishing. The [skipped] parameter indicates whether the test was
-/// skipped.
-Map _testDone(int id,
-    {String result, bool hidden = false, bool skipped = false}) {
-  result ??= 'success';
-  return {
-    'type': 'testDone',
-    'testID': id,
-    'result': result,
-    'hidden': hidden,
-    'skipped': skipped
-  };
-}
-
-/// Returns the event emitted by the JSON reporter indicating that the entire
-/// run finished.
-Map _done({bool success = true}) => {'type': 'done', 'success': success};
-
-/// Returns the serialized metadata corresponding to [skip].
-Map _metadata({skip}) {
-  if (skip == true) {
-    return {'skip': true, 'skipReason': null};
-  } else if (skip is String) {
-    return {'skip': true, 'skipReason': skip};
-  } else {
-    return {'skip': false, 'skipReason': null};
-  }
+  return expectJsonReport(stdoutLines, test.pid, expected, done);
 }
diff --git a/pkgs/test/test/runner/json_reporter_utils.dart b/pkgs/test/test/runner/json_reporter_utils.dart
new file mode 100644
index 0000000..8677ece
--- /dev/null
+++ b/pkgs/test/test/runner/json_reporter_utils.dart
@@ -0,0 +1,221 @@
+// Copyright (c) 2020, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.import 'dart:convert';
+
+import 'dart:convert';
+
+import 'package:path/path.dart' as p;
+import 'package:test_descriptor/test_descriptor.dart' as d;
+
+import 'package:test/test.dart';
+import 'package:test_core/src/runner/version.dart';
+
+/// Asserts that the outputs from running tests with a JSON reporter match the
+/// given expectations.
+///
+/// Verifies that [outputLines] matches each set of matchers in [expected],
+/// includes the [testPid] from the test process, and ends with [done].
+Future expectJsonReport(List<String> outputLines, int testPid,
+    List<List<dynamic /*Map|Matcher*/ >> expected, Map done) async {
+  // Ensure the output is of the same length, including start, done and all
+  // suites messages.
+  expect(outputLines.length, equals(expected.fold(3, (a, m) => a + m.length)),
+      reason: 'Expected $outputLines to match $expected.');
+
+  dynamic decodeLine(String l) =>
+      jsonDecode(l)..remove('time')..remove('stackTrace');
+
+  // Should contain all suites message.
+  expect(outputLines.map(decodeLine), containsAll([allSuitesJson()]));
+
+  // A single start event is emitted first.
+  final _start = {
+    'type': 'start',
+    'protocolVersion': '0.1.1',
+    'runnerVersion': testVersion,
+    'pid': testPid,
+  };
+  expect(decodeLine(outputLines.first), equals(_start));
+
+  // A single done event is emmited last.
+  expect(decodeLine(outputLines.last), equals(done));
+
+  for (var value in expected) {
+    expect(outputLines.map(decodeLine), containsAllInOrder(value));
+  }
+}
+
+/// Returns the event emitted by the JSON reporter providing information about
+/// all suites.
+///
+/// The [count] defaults to 1.
+Map allSuitesJson({int count}) {
+  return {'type': 'allSuites', 'count': count ?? 1};
+}
+
+/// Returns the event emitted by the JSON reporter indicating that a suite has
+/// begun running.
+///
+/// The [platform] defaults to `"vm"`, the [path] defaults to `"test.dart"`.
+Map suiteJson(int id, {String platform, String path}) {
+  return {
+    'type': 'suite',
+    'suite': {
+      'id': id,
+      'platform': platform ?? 'vm',
+      'path': path ?? 'test.dart'
+    }
+  };
+}
+
+/// Returns the event emitted by the JSON reporter indicating that a group has
+/// begun running.
+///
+/// If [skip] is `true`, the group is expected to be marked as skipped without a
+/// reason. If it's a [String], the group is expected to be marked as skipped
+/// with that reason.
+///
+/// The [testCount] parameter indicates the number of tests in the group. It
+/// defaults to 1.
+Map groupJson(int id,
+    {String name,
+    int suiteID,
+    int parentID,
+    skip,
+    int testCount,
+    int line,
+    int column}) {
+  if ((line == null) != (column == null)) {
+    throw ArgumentError(
+        'line and column must either both be null or both be passed');
+  }
+
+  return {
+    'type': 'group',
+    'group': {
+      'id': id,
+      'name': name,
+      'suiteID': suiteID ?? 0,
+      'parentID': parentID,
+      'metadata': metadataJson(skip: skip),
+      'testCount': testCount ?? 1,
+      'line': line,
+      'column': column,
+      'url': line == null
+          ? null
+          : p.toUri(p.join(d.sandbox, 'test.dart')).toString()
+    }
+  };
+}
+
+/// Returns the event emitted by the JSON reporter indicating that a test has
+/// begun running.
+///
+/// If [parentIDs] is passed, it's the IDs of groups containing this test. If
+/// [skip] is `true`, the test is expected to be marked as skipped without a
+/// reason. If it's a [String], the test is expected to be marked as skipped
+/// with that reason.
+Map testStartJson(int id, String name,
+    {int suiteID,
+    Iterable<int> groupIDs,
+    int line,
+    int column,
+    String url,
+    skip,
+    int root_line,
+    int root_column,
+    String root_url}) {
+  if ((line == null) != (column == null)) {
+    throw ArgumentError(
+        'line and column must either both be null or both be passed');
+  }
+
+  url ??=
+      line == null ? null : p.toUri(p.join(d.sandbox, 'test.dart')).toString();
+  var expected = {
+    'type': 'testStart',
+    'test': {
+      'id': id,
+      'name': name,
+      'suiteID': suiteID ?? 0,
+      'groupIDs': groupIDs ?? [2],
+      'metadata': metadataJson(skip: skip),
+      'line': line,
+      'column': column,
+      'url': url,
+    }
+  };
+  var testObj = expected['test'] as Map<String, dynamic>;
+  if (root_line != null) {
+    testObj['root_line'] = root_line;
+  }
+  if (root_column != null) {
+    testObj['root_column'] = root_column;
+  }
+  if (root_url != null) {
+    testObj['root_url'] = root_url;
+  }
+  return expected;
+}
+
+/// Returns the event emitted by the JSON reporter indicating that a test
+/// printed [message].
+Matcher printJson(int id, dynamic /*String|Matcher*/ message, {String type}) {
+  return allOf(
+    hasLength(4),
+    containsPair('type', 'print'),
+    containsPair('testID', id),
+    containsPair('message', message),
+    containsPair('messageType', type ?? 'print'),
+  );
+}
+
+/// Returns the event emitted by the JSON reporter indicating that a test
+/// emitted [error].
+///
+/// The [isFailure] parameter indicates whether the error was a [TestFailure] or
+/// not.
+Map errorJson(int id, String error, {bool isFailure = false}) {
+  return {
+    'type': 'error',
+    'testID': id,
+    'error': error,
+    'isFailure': isFailure
+  };
+}
+
+/// Returns the event emitted by the JSON reporter indicating that a test
+/// finished.
+///
+/// The [result] parameter indicates the result of the test. It defaults to
+/// `"success"`.
+///
+/// The [hidden] parameter indicates whether the test should not be displayed
+/// after finishing. The [skipped] parameter indicates whether the test was
+/// skipped.
+Map testDoneJson(int id,
+    {String result, bool hidden = false, bool skipped = false}) {
+  result ??= 'success';
+  return {
+    'type': 'testDone',
+    'testID': id,
+    'result': result,
+    'hidden': hidden,
+    'skipped': skipped
+  };
+}
+
+/// Returns the event emitted by the JSON reporter indicating that the entire
+/// run finished.
+Map doneJson({bool success = true}) => {'type': 'done', 'success': success};
+
+/// Returns the serialized metadata corresponding to [skip].
+Map metadataJson({skip}) {
+  if (skip == true) {
+    return {'skip': true, 'skipReason': null};
+  } else if (skip is String) {
+    return {'skip': true, 'skipReason': skip};
+  } else {
+    return {'skip': false, 'skipReason': null};
+  }
+}
diff --git a/pkgs/test/test/runner/runner_test.dart b/pkgs/test/test/runner/runner_test.dart
index ae1e1e5..586a52e 100644
--- a/pkgs/test/test/runner/runner_test.dart
+++ b/pkgs/test/test/runner/runner_test.dart
@@ -112,6 +112,9 @@
           [expanded] (default)        A separate line for each update.
           [json]                      A machine-readable format (see https://goo.gl/gBsV1a).
 
+    --file-reporter                   The reporter used to write test results to a file.
+                                      Should be in the form <reporter>:<filepath>, e.g. "json:reports/tests.json"
+
     --verbose-trace                   Whether to emit stack traces with core library frames.
     --js-trace                        Whether to emit raw JavaScript stack traces for browser tests.
     --[no-]color                      Whether to use terminal colors.
diff --git a/pkgs/test_core/CHANGELOG.md b/pkgs/test_core/CHANGELOG.md
index 656b2e8..289de7a 100644
--- a/pkgs/test_core/CHANGELOG.md
+++ b/pkgs/test_core/CHANGELOG.md
@@ -1,5 +1,7 @@
 ## 0.2.17-dev
 
+* Add `file_reporters` configuration option and `--file-reporter` CLI option to
+  allow specifying a separate reporter that writes to a file instead of stdout.
 * Internal cleanup.
 
 ## 0.2.16
diff --git a/pkgs/test_core/lib/src/runner.dart b/pkgs/test_core/lib/src/runner.dart
index 037a7cb..7f13751 100644
--- a/pkgs/test_core/lib/src/runner.dart
+++ b/pkgs/test_core/lib/src/runner.dart
@@ -16,6 +16,7 @@
 import 'package:test_api/src/backend/suite_platform.dart'; // ignore: implementation_imports
 import 'package:test_api/src/backend/test.dart'; // ignore: implementation_imports
 import 'package:test_api/src/utils.dart'; // ignore: implementation_imports
+import 'package:test_core/src/runner/reporter/multiplex.dart';
 
 import 'util/io.dart';
 import 'runner/application_exception.dart';
@@ -69,17 +70,36 @@
   final _closeMemo = AsyncMemoizer();
   bool get _closed => _closeMemo.hasRun;
 
+  /// Sinks created for each file reporter (if there are any).
+  final List<IOSink> _sinks;
+
   /// Creates a new runner based on [configuration].
   factory Runner(Configuration config) => config.asCurrent(() {
         var engine =
             Engine(concurrency: config.concurrency, coverage: config.coverage);
 
-        var reporterDetails = allReporters[config.reporter];
+        var sinks = <IOSink>[];
+        Reporter createFileReporter(String reporterName, String filepath) {
+          final sink =
+              (File(filepath)..createSync(recursive: true)).openWrite();
+          sinks.add(sink);
+          return allReporters[reporterName].factory(config, engine, sink);
+        }
+
         return Runner._(
-            engine, reporterDetails.factory(config, engine, stdout));
+          engine,
+          MultiplexReporter([
+            // Standard reporter.
+            allReporters[config.reporter].factory(config, engine, stdout),
+            // File reporters.
+            for (var reporter in config.fileReporters.keys)
+              createFileReporter(reporter, config.fileReporters[reporter]),
+          ]),
+          sinks,
+        );
       });
 
-  Runner._(this._engine, this._reporter);
+  Runner._(this._engine, this._reporter, this._sinks);
 
   /// Starts the runner.
   ///
@@ -231,6 +251,10 @@
         await Future.wait([_loader.closeEphemeral(), _engine.close()]);
         if (timer != null) timer.cancel();
         await _loader.close();
+
+        // Flush any IOSinks created for file reporters.
+        await Future.wait(_sinks.map((s) => s.flush().then((_) => s.close())));
+        _sinks.clear();
       });
 
   /// Return a stream of [LoadSuite]s in [_config.paths].
diff --git a/pkgs/test_core/lib/src/runner/configuration.dart b/pkgs/test_core/lib/src/runner/configuration.dart
index 22ff258..e442f91 100644
--- a/pkgs/test_core/lib/src/runner/configuration.dart
+++ b/pkgs/test_core/lib/src/runner/configuration.dart
@@ -76,6 +76,10 @@
   String get reporter => _reporter ?? defaultReporter;
   final String _reporter;
 
+  /// The map of file reporters where the key is the name of the reporter and
+  /// the value is the filepath to which its output should be written.
+  final Map<String, String> fileReporters;
+
   /// Whether to disable retries of tests.
   bool get noRetry => _noRetry ?? false;
   final bool _noRetry;
@@ -227,6 +231,7 @@
       String configurationPath,
       String dart2jsPath,
       String reporter,
+      Map<String, String> fileReporters,
       String coverage,
       int pubServePort,
       int concurrency,
@@ -275,6 +280,7 @@
         configurationPath: configurationPath,
         dart2jsPath: dart2jsPath,
         reporter: reporter,
+        fileReporters: fileReporters,
         coverage: coverage,
         pubServePort: pubServePort,
         concurrency: concurrency,
@@ -336,6 +342,7 @@
       String configurationPath,
       String dart2jsPath,
       String reporter,
+      Map<String, String> fileReporters,
       String coverage,
       int pubServePort,
       int concurrency,
@@ -360,6 +367,7 @@
         _configurationPath = configurationPath,
         _dart2jsPath = dart2jsPath,
         _reporter = reporter,
+        fileReporters = fileReporters ?? {},
         _coverage = coverage,
         pubServeUrl = pubServePort == null
             ? null
@@ -485,6 +493,7 @@
         configurationPath: other._configurationPath ?? _configurationPath,
         dart2jsPath: other._dart2jsPath ?? _dart2jsPath,
         reporter: other._reporter ?? _reporter,
+        fileReporters: mergeMaps(fileReporters, other.fileReporters),
         coverage: other._coverage ?? _coverage,
         pubServePort: (other.pubServeUrl ?? pubServeUrl)?.port,
         concurrency: other._concurrency ?? _concurrency,
diff --git a/pkgs/test_core/lib/src/runner/configuration/args.dart b/pkgs/test_core/lib/src/runner/configuration/args.dart
index f71ae2d..558a4e8 100644
--- a/pkgs/test_core/lib/src/runner/configuration/args.dart
+++ b/pkgs/test_core/lib/src/runner/configuration/args.dart
@@ -124,6 +124,10 @@
       defaultsTo: defaultReporter,
       allowed: reporterDescriptions.keys.toList(),
       allowedHelp: reporterDescriptions);
+  parser.addOption('file-reporter',
+      help: 'The reporter used to write test results to a file.\n'
+          'Should be in the form <reporter>:<filepath>, '
+          'e.g. "json:reports/tests.json"');
   parser.addFlag('verbose-trace',
       negatable: false,
       help: 'Whether to emit stack traces with core library frames.');
@@ -235,6 +239,7 @@
         dart2jsArgs: _ifParsed('dart2js-args'),
         precompiledPath: _ifParsed('precompiled'),
         reporter: _ifParsed('reporter'),
+        fileReporters: _parseFileReporterOption(),
         coverage: _ifParsed('coverage'),
         pubServePort: _parseOption('pub-serve', int.parse),
         concurrency: _parseOption('concurrency', int.parse),
@@ -273,6 +278,21 @@
     return _wrapFormatException(name, () => parse(value as String));
   }
 
+  Map<String, String> _parseFileReporterOption() =>
+      _parseOption('file-reporter', (value) {
+        if (!value.contains(':')) {
+          throw FormatException(
+              'option must be in the form <reporter>:<filepath>, e.g. '
+              '"json:reports/tests.json"');
+        }
+        final sep = value.indexOf(':');
+        final reporter = value.substring(0, sep);
+        if (!allReporters.containsKey(reporter)) {
+          throw FormatException('"$reporter" is not a supported reporter');
+        }
+        return {reporter: value.substring(sep + 1)};
+      });
+
   /// Runs [parse], and wraps any [FormatException] it throws with additional
   /// information.
   T _wrapFormatException<T>(String name, T Function() parse) {
diff --git a/pkgs/test_core/lib/src/runner/configuration/load.dart b/pkgs/test_core/lib/src/runner/configuration/load.dart
index 732cd11..d188b3e 100644
--- a/pkgs/test_core/lib/src/runner/configuration/load.dart
+++ b/pkgs/test_core/lib/src/runner/configuration/load.dart
@@ -216,6 +216,7 @@
     if (!_runnerConfig) {
       _disallow('pause_after_load');
       _disallow('reporter');
+      _disallow('file_reporters');
       _disallow('concurrency');
       _disallow('names');
       _disallow('plain_names');
@@ -234,6 +235,20 @@
       _error('Unknown reporter "$reporter".', 'reporter');
     }
 
+    var fileReporters = _getMap('file_reporters', key: (keyNode) {
+      _validate(keyNode, 'file_reporters key must be a string',
+          (value) => value is String);
+      final reporter = keyNode.value as String;
+      if (!allReporters.keys.contains(reporter)) {
+        _error('Unknown reporter "$reporter".', 'file_reporters');
+      }
+      return reporter;
+    }, value: (valueNode) {
+      _validate(valueNode, 'file_reporters value must be a string',
+          (value) => value is String);
+      return valueNode.value as String;
+    });
+
     var concurrency = _getInt('concurrency');
 
     // The UI term "platform" corresponds with the implementation term
@@ -258,6 +273,7 @@
         customHtmlTemplatePath: customHtmlTemplatePath,
         runSkipped: runSkipped,
         reporter: reporter,
+        fileReporters: fileReporters,
         concurrency: concurrency,
         runtimes: runtimes,
         chosenPresets: chosenPresets,
diff --git a/pkgs/test_core/lib/src/runner/plugin/platform_helpers.dart b/pkgs/test_core/lib/src/runner/plugin/platform_helpers.dart
index a353d57..c63967c 100644
--- a/pkgs/test_core/lib/src/runner/plugin/platform_helpers.dart
+++ b/pkgs/test_core/lib/src/runner/plugin/platform_helpers.dart
@@ -51,7 +51,8 @@
     'metadata': suiteConfig.metadata.serialize(),
     'asciiGlyphs': Platform.isWindows,
     'path': path,
-    'collectTraces': Configuration.current.reporter == 'json',
+    'collectTraces': Configuration.current.reporter == 'json' ||
+        Configuration.current.fileReporters.containsKey('json'),
     'noRetry': Configuration.current.noRetry,
     'foldTraceExcept': Configuration.current.foldTraceExcept.toList(),
     'foldTraceOnly': Configuration.current.foldTraceOnly.toList(),
diff --git a/pkgs/test_core/lib/src/runner/reporter/multiplex.dart b/pkgs/test_core/lib/src/runner/reporter/multiplex.dart
new file mode 100644
index 0000000..e13c1b4
--- /dev/null
+++ b/pkgs/test_core/lib/src/runner/reporter/multiplex.dart
@@ -0,0 +1,32 @@
+// Copyright (c) 2020, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import '../reporter.dart';
+
+class MultiplexReporter implements Reporter {
+  Iterable<Reporter> delegates;
+
+  MultiplexReporter(this.delegates);
+
+  @override
+  void cancel() {
+    for (var d in delegates) {
+      d.cancel();
+    }
+  }
+
+  @override
+  void pause() {
+    for (var d in delegates) {
+      d.pause();
+    }
+  }
+
+  @override
+  void resume() {
+    for (var d in delegates) {
+      d.resume();
+    }
+  }
+}