Add support for running perftools to use hardware performance counters when benchmarking. (#98)

Add a new class PerfBenchmarkBase that extends BenchmarkBase, with a new asynchronous reportPerf() method that runs the benchmark while attached to a "perf stat" process that measures performance with CPU hardware performance counters.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ce0c0eb..ec69baf 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,10 @@
-## 2.2.3-wip
+## 2.2.3
 
 - Require Dart 3.2.
+- Add `PerfBenchmarkBase` class which runs the 'perf stat' command from
+linux-tools on a benchmark and reports metrics from the hardware
+performance counters and the iteration count, as well as the run time
+measurement reported by `BenchmarkBase`.
 
 ## 2.2.2
 
diff --git a/integration_test/perf_benchmark_test.dart b/integration_test/perf_benchmark_test.dart
new file mode 100644
index 0000000..339777f
--- /dev/null
+++ b/integration_test/perf_benchmark_test.dart
@@ -0,0 +1,26 @@
+// Copyright 2024, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'package:benchmark_harness/perf_benchmark_harness.dart';
+import 'package:test/test.dart';
+
+class PerfBenchmark extends PerfBenchmarkBase {
+  PerfBenchmark(super.name);
+  int runCount = 0;
+
+  @override
+  void run() {
+    runCount++;
+    for (final i in List.filled(1000, 7)) {
+      runCount += i - i;
+    }
+  }
+}
+
+void main() {
+  test('run is called', () async {
+    final benchmark = PerfBenchmark('ForLoop');
+    await benchmark.reportPerf();
+  });
+}
diff --git a/lib/benchmark_harness.dart b/lib/benchmark_harness.dart
index ee1563c..b46a36f 100644
--- a/lib/benchmark_harness.dart
+++ b/lib/benchmark_harness.dart
@@ -3,5 +3,5 @@
 // BSD-style license that can be found in the LICENSE file.
 
 export 'src/async_benchmark_base.dart';
-export 'src/benchmark_base.dart';
+export 'src/benchmark_base.dart' show BenchmarkBase;
 export 'src/score_emitter.dart';
diff --git a/lib/perf_benchmark_harness.dart b/lib/perf_benchmark_harness.dart
new file mode 100644
index 0000000..3de8329
--- /dev/null
+++ b/lib/perf_benchmark_harness.dart
@@ -0,0 +1,7 @@
+// Copyright (c) 2024, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+export 'src/perf_benchmark_base_stub.dart'
+    if (dart.library.io) 'src/perf_benchmark_base.dart';
+export 'src/score_emitter.dart';
diff --git a/lib/src/async_benchmark_base.dart b/lib/src/async_benchmark_base.dart
index 1472ee7..b342a3d 100644
--- a/lib/src/async_benchmark_base.dart
+++ b/lib/src/async_benchmark_base.dart
@@ -64,6 +64,6 @@
 
   /// Run the benchmark and report results on the [emitter].
   Future<void> report() async {
-    emitter.emit(name, await measure());
+    emitter.emit(name, await measure(), unit: 'us.');
   }
 }
diff --git a/lib/src/benchmark_base.dart b/lib/src/benchmark_base.dart
index ad1bb91..51a89bb 100644
--- a/lib/src/benchmark_base.dart
+++ b/lib/src/benchmark_base.dart
@@ -6,7 +6,7 @@
 
 import 'score_emitter.dart';
 
-const int _minimumMeasureDurationMillis = 2000;
+const int minimumMeasureDurationMillis = 2000;
 
 class BenchmarkBase {
   final String name;
@@ -40,56 +40,62 @@
 
   /// Measures the score for this benchmark by executing it enough times
   /// to reach [minimumMillis].
-  static _Measurement _measureForImpl(void Function() f, int minimumMillis) {
-    final minimumMicros = minimumMillis * 1000;
-    // If running a long measurement permit some amount of measurement jitter
-    // to avoid discarding results that are almost good, but not quite there.
-    final allowedJitter =
-        minimumMillis < 1000 ? 0 : (minimumMicros * 0.1).floor();
-    var iter = 2;
-    final watch = Stopwatch()..start();
-    while (true) {
-      watch.reset();
-      for (var i = 0; i < iter; i++) {
-        f();
-      }
-      final elapsed = watch.elapsedMicroseconds;
-      final measurement = _Measurement(elapsed, iter);
-      if (measurement.elapsedMicros >= (minimumMicros - allowedJitter)) {
-        return measurement;
-      }
-
-      iter = measurement.estimateIterationsNeededToReach(
-          minimumMicros: minimumMicros);
-    }
-  }
 
   /// Measures the score for this benchmark by executing it repeatedly until
   /// time minimum has been reached.
   static double measureFor(void Function() f, int minimumMillis) =>
-      _measureForImpl(f, minimumMillis).score;
+      measureForImpl(f, minimumMillis).score;
 
   /// Measures the score for the benchmark and returns it.
   double measure() {
     setup();
     // Warmup for at least 100ms. Discard result.
-    _measureForImpl(warmup, 100);
+    measureForImpl(warmup, 100);
     // Run the benchmark for at least 2000ms.
-    var result = _measureForImpl(exercise, _minimumMeasureDurationMillis);
+    var result = measureForImpl(exercise, minimumMeasureDurationMillis);
     teardown();
     return result.score;
   }
 
   void report() {
-    emitter.emit(name, measure());
+    emitter.emit(name, measure(), unit: 'us.');
   }
 }
 
-class _Measurement {
+/// Measures the score for this benchmark by executing it enough times
+/// to reach [minimumMillis].
+Measurement measureForImpl(void Function() f, int minimumMillis) {
+  final minimumMicros = minimumMillis * 1000;
+  // If running a long measurement permit some amount of measurement jitter
+  // to avoid discarding results that are almost good, but not quite there.
+  final allowedJitter =
+      minimumMillis < 1000 ? 0 : (minimumMicros * 0.1).floor();
+  var iter = 2;
+  var totalIterations = iter;
+  final watch = Stopwatch()..start();
+  while (true) {
+    watch.reset();
+    for (var i = 0; i < iter; i++) {
+      f();
+    }
+    final elapsed = watch.elapsedMicroseconds;
+    final measurement = Measurement(elapsed, iter, totalIterations);
+    if (measurement.elapsedMicros >= (minimumMicros - allowedJitter)) {
+      return measurement;
+    }
+
+    iter = measurement.estimateIterationsNeededToReach(
+        minimumMicros: minimumMicros);
+    totalIterations += iter;
+  }
+}
+
+class Measurement {
   final int elapsedMicros;
   final int iterations;
+  final int totalIterations;
 
-  _Measurement(this.elapsedMicros, this.iterations);
+  Measurement(this.elapsedMicros, this.iterations, this.totalIterations);
 
   double get score => elapsedMicros / iterations;
 
diff --git a/lib/src/perf_benchmark_base.dart b/lib/src/perf_benchmark_base.dart
new file mode 100644
index 0000000..3c4a5a1
--- /dev/null
+++ b/lib/src/perf_benchmark_base.dart
@@ -0,0 +1,132 @@
+// Copyright (c) 2024, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:convert';
+import 'dart:io';
+
+import 'benchmark_base.dart';
+import 'score_emitter.dart';
+
+class PerfBenchmarkBase extends BenchmarkBase {
+  late final Directory fifoDir;
+  late final String perfControlFifo;
+  late final RandomAccessFile openedFifo;
+  late final String perfControlAck;
+  late final RandomAccessFile openedAck;
+  late final Process perfProcess;
+  late final List<String> perfProcessArgs;
+
+  PerfBenchmarkBase(super.name, {super.emitter = const PrintEmitter()});
+
+  Future<void> _createFifos() async {
+    perfControlFifo = '${fifoDir.path}/perf_control_fifo';
+    perfControlAck = '${fifoDir.path}/perf_control_ack';
+    for (final path in [perfControlFifo, perfControlAck]) {
+      final fifoResult = await Process.run('mkfifo', [path]);
+      if (fifoResult.exitCode != 0) {
+        throw ProcessException('mkfifo', [path],
+            'Cannot create fifo: ${fifoResult.stderr}', fifoResult.exitCode);
+      }
+    }
+  }
+
+  Future<void> _startPerfStat() async {
+    await _createFifos();
+    perfProcessArgs = [
+      'stat',
+      '--delay=-1',
+      '--control=fifo:$perfControlFifo,$perfControlAck',
+      '-x\\t',
+      '--pid=$pid',
+    ];
+    perfProcess = await Process.start('perf', perfProcessArgs);
+  }
+
+  void _enablePerf() {
+    openedFifo = File(perfControlFifo).openSync(mode: FileMode.writeOnly);
+    openedAck = File(perfControlAck).openSync();
+    openedFifo.writeStringSync('enable\n');
+    _waitForAck();
+  }
+
+  Future<void> _stopPerfStat(int totalIterations) async {
+    openedFifo.writeStringSync('disable\n');
+    openedFifo.closeSync();
+    _waitForAck();
+    openedAck.closeSync();
+    perfProcess.kill(ProcessSignal.sigint);
+    unawaited(perfProcess.stdout.drain());
+    final lines = await perfProcess.stderr
+        .transform(utf8.decoder)
+        .transform(const LineSplitter())
+        .toList();
+    final exitCode = await perfProcess.exitCode;
+    // Exit code from perf is -SIGINT when terminated with SIGINT.
+    if (exitCode != 0 && exitCode != -ProcessSignal.sigint.signalNumber) {
+      throw ProcessException(
+          'perf', perfProcessArgs, lines.join('\n'), exitCode);
+    }
+
+    const metrics = {
+      'cycles': 'CpuCycles',
+      'page-faults': 'MajorPageFaults',
+    };
+    for (final line in lines) {
+      if (line.split('\t')
+          case [
+            String counter,
+            _,
+            String event && ('cycles' || 'page-faults'),
+            ...
+          ]) {
+        emitter.emit(name, double.parse(counter) / totalIterations,
+            metric: metrics[event]!);
+      }
+    }
+    emitter.emit('$name.totalIterations', totalIterations.toDouble(),
+        metric: 'Count');
+  }
+
+  /// Measures the score for the benchmark and returns it.
+  Future<double> measurePerf() async {
+    Measurement result;
+    setup();
+    try {
+      fifoDir = await Directory.systemTemp.createTemp('fifo');
+      try {
+        // Warmup for at least 100ms. Discard result.
+        measureForImpl(warmup, 100);
+        await _startPerfStat();
+        try {
+          _enablePerf();
+          // Run the benchmark for at least 2000ms.
+          result = measureForImpl(exercise, minimumMeasureDurationMillis);
+          await _stopPerfStat(result.totalIterations);
+        } catch (_) {
+          perfProcess.kill(ProcessSignal.sigkill);
+          rethrow;
+        }
+      } finally {
+        await fifoDir.delete(recursive: true);
+      }
+    } finally {
+      teardown();
+    }
+    return result.score;
+  }
+
+  Future<void> reportPerf() async {
+    emitter.emit(name, await measurePerf(), unit: 'us.');
+  }
+
+  void _waitForAck() {
+    // Perf writes 'ack\n\x00' to the acknowledgement fifo.
+    const ackLength = 'ack\n\x00'.length;
+    var ack = <int>[...openedAck.readSync(ackLength)];
+    while (ack.length < ackLength) {
+      ack.addAll(openedAck.readSync(ackLength - ack.length));
+    }
+  }
+}
diff --git a/lib/src/perf_benchmark_base_stub.dart b/lib/src/perf_benchmark_base_stub.dart
new file mode 100644
index 0000000..81aa0ea
--- /dev/null
+++ b/lib/src/perf_benchmark_base_stub.dart
@@ -0,0 +1,18 @@
+// Copyright (c) 2024, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'benchmark_base.dart';
+import 'score_emitter.dart';
+
+class PerfBenchmarkBase extends BenchmarkBase {
+  PerfBenchmarkBase(super.name, {super.emitter = const PrintEmitter()});
+
+  Future<double> measurePerf() async {
+    return super.measure();
+  }
+
+  Future<void> reportPerf() async {
+    super.report();
+  }
+}
diff --git a/lib/src/score_emitter.dart b/lib/src/score_emitter.dart
index f7138d3..43ea7aa 100644
--- a/lib/src/score_emitter.dart
+++ b/lib/src/score_emitter.dart
@@ -3,14 +3,16 @@
 // BSD-style license that can be found in the LICENSE file.
 
 abstract class ScoreEmitter {
-  void emit(String testName, double value);
+  void emit(String testName, double value,
+      {String metric = 'RunTime', String unit});
 }
 
 class PrintEmitter implements ScoreEmitter {
   const PrintEmitter();
 
   @override
-  void emit(String testName, double value) {
-    print('$testName(RunTime): $value us.');
+  void emit(String testName, double value,
+      {String metric = 'RunTime', String unit = ''}) {
+    print(['$testName($metric):', value, if (unit.isNotEmpty) unit].join(' '));
   }
 }
diff --git a/pubspec.yaml b/pubspec.yaml
index adc5f6d..465d274 100644
--- a/pubspec.yaml
+++ b/pubspec.yaml
@@ -1,5 +1,5 @@
 name: benchmark_harness
-version: 2.2.3-wip
+version: 2.2.3
 description: The official Dart project benchmark harness.
 repository: https://github.com/dart-lang/benchmark_harness
 
diff --git a/test/result_emitter_test.dart b/test/result_emitter_test.dart
index e2cd1ea..bfbce4e 100644
--- a/test/result_emitter_test.dart
+++ b/test/result_emitter_test.dart
@@ -13,7 +13,8 @@
   int emitCount = 0;
 
   @override
-  void emit(String name, double value) {
+  void emit(String name, double value,
+      {String metric = 'RunTime', String unit = ''}) {
     emitCount++;
   }
 }