Skip to content
This repository has been archived by the owner on Nov 1, 2024. It is now read-only.

Commit

Permalink
Put PerfBenchmarkBase into a new class, don't modify BenchmarkBase
Browse files Browse the repository at this point in the history
When PerfBenchmarkBase also starts, attaches, and stops the
"perf stat" subprocess, measure() and report() will need
to be async functions, so add measurePerf() and reportPerf() functions.

This version still requires "perf stat" to be wrapped around the
benchmark command.
  • Loading branch information
whesse committed Feb 8, 2024
1 parent c7a2166 commit bb86037
Show file tree
Hide file tree
Showing 4 changed files with 114 additions and 35 deletions.
5 changes: 3 additions & 2 deletions lib/benchmark_harness.dart
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
// BSD-style license that can be found in the LICENSE file.

export 'src/async_benchmark_base.dart';
export 'src/benchmark_base.dart'
if (dart.library.io) 'src/benchmark_base_perf.dart';
export 'src/benchmark_base.dart' show BenchmarkBase;
export 'src/perf_benchmark_base_stub.dart'
if (dart.library.io) 'src/perf_benchmark_base.dart';
export 'src/score_emitter.dart';
67 changes: 34 additions & 33 deletions lib/src/benchmark_base.dart
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import 'dart:math' as math;

import 'score_emitter.dart';

const int _minimumMeasureDurationMillis = 2000;
const int minimumMeasureDurationMillis = 2000;

class BenchmarkBase {
final String name;
Expand Down Expand Up @@ -47,46 +47,19 @@ class BenchmarkBase {

/// Measures the score for this benchmark by executing it enough times
/// to reach [minimumMillis].
static _Measurement _measureForImpl(void Function() f, int minimumMillis) {
final minimumMicros = minimumMillis * 1000;
// If running a long measurement permit some amount of measurement jitter
// to avoid discarding results that are almost good, but not quite there.
final allowedJitter =
minimumMillis < 1000 ? 0 : (minimumMicros * 0.1).floor();
var iter = 2;
var totalIterations = iter;
final watch = Stopwatch()..start();
while (true) {
watch.reset();
for (var i = 0; i < iter; i++) {
f();
}
final elapsed = watch.elapsedMicroseconds;
final measurement = _Measurement(elapsed, iter, totalIterations);
if (measurement.elapsedMicros >= (minimumMicros - allowedJitter)) {
return measurement;
}

iter = measurement.estimateIterationsNeededToReach(
minimumMicros: minimumMicros);
totalIterations += iter;
}
}
/// Measures the score for this benchmark by executing it repeatedly until
/// time minimum has been reached.
static double measureFor(void Function() f, int minimumMillis) =>
_measureForImpl(f, minimumMillis).score;
measureForImpl(f, minimumMillis).score;

/// Measures the score for the benchmark and returns it.
double measure() {
setup();
// Warmup for at least 100ms. Discard result.
_measureForImpl(warmup, 100);
beforeTimedRuns();
measureForImpl(warmup, 100);
// Run the benchmark for at least 2000ms.
var result = _measureForImpl(exercise, _minimumMeasureDurationMillis);
afterTimedRuns(result.totalIterations);
var result = measureForImpl(exercise, minimumMeasureDurationMillis);
teardown();
return result.score;
}
Expand All @@ -96,12 +69,40 @@ class BenchmarkBase {
}
}

class _Measurement {
/// Measures the score for this benchmark by executing it enough times
/// to reach [minimumMillis].
Measurement measureForImpl(void Function() f, int minimumMillis) {
final minimumMicros = minimumMillis * 1000;
// If running a long measurement permit some amount of measurement jitter
// to avoid discarding results that are almost good, but not quite there.
final allowedJitter =
minimumMillis < 1000 ? 0 : (minimumMicros * 0.1).floor();
var iter = 2;
var totalIterations = iter;
final watch = Stopwatch()..start();
while (true) {
watch.reset();
for (var i = 0; i < iter; i++) {
f();
}
final elapsed = watch.elapsedMicroseconds;
final measurement = Measurement(elapsed, iter, totalIterations);
if (measurement.elapsedMicros >= (minimumMicros - allowedJitter)) {
return measurement;
}

iter = measurement.estimateIterationsNeededToReach(
minimumMicros: minimumMicros);
totalIterations += iter;
}
}

class Measurement {
final int elapsedMicros;
final int iterations;
final int totalIterations;

_Measurement(this.elapsedMicros, this.iterations, this.totalIterations);
Measurement(this.elapsedMicros, this.iterations, this.totalIterations);

double get score => elapsedMicros / iterations;

Expand Down
1 change: 1 addition & 0 deletions lib/src/benchmark_base_perf.dart
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class BenchmarkBase extends base.BenchmarkBase {
late RandomAccessFile openedFifo;
String? perfControlAck;
late RandomAccessFile openedAck;
late Process perfProcess;

@override
void beforeTimedRuns() {
Expand Down
76 changes: 76 additions & 0 deletions lib/src/perf_benchmark_base.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
// Copyright (c) 2024, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.

import 'dart:io';

import 'benchmark_base.dart';
import 'score_emitter.dart';

const perfControlFifoVariable = 'PERF_CONTROL_FIFO';
const perfControlAckVariable = 'PERF_CONTROL_ACK';

class PerfBenchmarkBase extends BenchmarkBase {
PerfBenchmarkBase(super.name, {super.emitter = const PrintEmitter()});

String? perfControlFifo;
late RandomAccessFile openedFifo;
String? perfControlAck;
late RandomAccessFile openedAck;
late Process perfProcess;

Future<void> _startPerfStat() async {
perfControlFifo = Platform.environment[perfControlFifoVariable];
perfControlAck = Platform.environment[perfControlAckVariable];
print(perfControlFifo);
if (perfControlFifo != null) {
openedFifo = File(perfControlFifo!).openSync(mode: FileMode.writeOnly);
if (perfControlAck != null) {
openedAck = File(perfControlAck!).openSync();
openedFifo.writeStringSync('enable\n');
_waitForAck();
} else {
openedFifo.writeStringSync('enable\n');
}
}
}

Future<void> _stopPerfStat(int totalIterations) async {
if (perfControlFifo != null) {
openedFifo.writeStringSync('disable\n');
openedFifo.closeSync();
if (perfControlAck != null) {
_waitForAck();
openedAck.closeSync();
}
emitter.emit('$name.totalIterations', totalIterations.toDouble());
}
}

/// Measures the score for the benchmark and returns it.
Future<double> measurePerf() async {
setup();
// Warmup for at least 100ms. Discard result.
measureForImpl(warmup, 100);
await _startPerfStat();
// Run the benchmark for at least 2000ms.
var result = measureForImpl(exercise, minimumMeasureDurationMillis);
await _stopPerfStat(result.totalIterations);
teardown();
return result.score;
}

Future<void> reportPerf() async {
emitter.emit(name, await measurePerf());
}

void _waitForAck() {
var ack = <int>[...openedAck.readSync(5)];
while (ack.length < 5) {
ack.addAll(openedAck.readSync(5 - ack.length));
}
if (String.fromCharCodes(ack) != 'ack\n\x00') {
print('Ack was $ack');
}
}
}

0 comments on commit bb86037

Please sign in to comment.