Unverified Commit 5bb55227 authored by Mouad Debbar's avatar Mouad Debbar Committed by GitHub

[web] Add benchmarks for text layout (#51663)

parent fc5350ed
......@@ -9,7 +9,7 @@ import 'recorder.dart';
/// Repeatedly paints a grid of rectangles.
///
/// Measures the performance of the `drawRect` operation.
class BenchDrawRect extends RawRecorder {
class BenchDrawRect extends SceneBuilderRecorder {
BenchDrawRect() : super(name: benchmarkName);
static const String benchmarkName = 'draw_rect';
......
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:ui';
import 'recorder.dart';
int _counter = 0;
Paragraph _generateParagraph() {
final ParagraphBuilder builder =
ParagraphBuilder(ParagraphStyle(fontFamily: 'sans-serif'))
..pushStyle(TextStyle(fontSize: 12.0))
..addText(
'$_counter Lorem ipsum dolor sit amet, consectetur adipiscing elit, '
'sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
);
_counter++;
return builder.build();
}
/// Repeatedly lays out a paragraph using the DOM measurement approach.
///
/// Creates a different paragraph each time in order to avoid hitting the cache.
class BenchTextDomLayout extends RawRecorder {
BenchTextDomLayout() : super(name: benchmarkName);
static const String benchmarkName = 'text_dom_layout';
@override
void body(Profile profile) {
final Paragraph paragraph = _generateParagraph();
profile.record('layout', () {
paragraph.layout(const ParagraphConstraints(width: double.infinity));
});
}
}
/// Repeatedly lays out a paragraph using the DOM measurement approach.
///
/// Uses the same paragraph content to make sure we hit the cache. It doesn't
/// use the same paragraph instance because the layout method will shortcircuit
/// in that case.
class BenchTextDomCachedLayout extends RawRecorder {
BenchTextDomCachedLayout() : super(name: benchmarkName);
static const String benchmarkName = 'text_dom_cached_layout';
final ParagraphBuilder builder =
ParagraphBuilder(ParagraphStyle(fontFamily: 'sans-serif'))
..pushStyle(TextStyle(fontSize: 12.0))
..addText(
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, '
'sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
);
@override
void body(Profile profile) {
final Paragraph paragraph = builder.build();
profile.record('layout', () {
paragraph.layout(const ParagraphConstraints(width: double.infinity));
});
}
}
......@@ -27,7 +27,7 @@ import 'test_data.dart';
///
/// This reproduces the bug where we render more than visible causing
/// performance issues: https://github.com/flutter/flutter/issues/48516
class BenchTextOutOfPictureBounds extends RawRecorder {
class BenchTextOutOfPictureBounds extends SceneBuilderRecorder {
BenchTextOutOfPictureBounds() : super(name: benchmarkName) {
const Color red = Color.fromARGB(255, 255, 0, 0);
const Color green = Color.fromARGB(255, 0, 255, 0);
......
......@@ -44,6 +44,79 @@ Duration timeAction(VoidCallback action) {
return stopwatch.elapsed;
}
/// Base class for benchmark recorders.
///
/// Each benchmark recorder has a [name] and a [run] method at a minimum.
abstract class Recorder {
Recorder._(this.name);
/// The name of the benchmark.
///
/// The results displayed in the Flutter Dashboard will use this name as a
/// prefix.
final String name;
/// The implementation of the benchmark that will produce a [Profile].
Future<Profile> run();
}
/// A recorder for benchmarking raw execution of Dart code.
///
/// This is useful for benchmarks that don't need frames or widgets.
///
/// Example:
///
/// ```
/// class BenchForLoop extends RawRecorder {
/// BenchForLoop() : super(name: benchmarkName);
///
/// static const String benchmarkName = 'for_loop';
///
/// @override
/// void body(Profile profile) {
/// profile.record('loop', () {
/// double x = 0;
/// for (int i = 0; i < 10000000; i++) {
/// x *= 1.5;
/// }
/// });
/// }
/// }
/// ```
abstract class RawRecorder extends Recorder {
RawRecorder({@required String name}) : super._(name);
/// Called once before all runs of this benchmark recorder.
///
/// This is useful for doing one-time setup work that's needed for the
/// benchmark.
void setUpAll() {}
/// Called once after all runs of this benchmark recorder.
///
/// This is useful for doing one-time clean up work after the benchmark is
/// complete.
void tearDownAll() {}
/// The body of the benchmark.
///
/// This is the part that records measurements of the benchmark.
void body(Profile profile);
@override
@nonVirtual
Future<Profile> run() async {
final Profile profile = Profile(name: name);
setUpAll();
do {
await Future<void>.delayed(Duration.zero);
body(profile);
} while (profile.shouldContinue());
tearDownAll();
return profile;
}
}
/// A recorder for benchmarking interactions with the engine without the
/// framework by directly exercising [SceneBuilder].
///
......@@ -52,13 +125,13 @@ Duration timeAction(VoidCallback action) {
/// Example:
///
/// ```
/// class BenchDrawCircle extends RawRecorder {
/// class BenchDrawCircle extends SceneBuilderRecorder {
/// BenchDrawCircle() : super(name: benchmarkName);
///
/// static const String benchmarkName = 'draw_circle';
///
/// @override
/// void onDrawFrame(SceneBuilder sceneBuilder, FrameMetricsBuilder metricsBuilder) {
/// void onDrawFrame(SceneBuilder sceneBuilder) {
/// final PictureRecorder pictureRecorder = PictureRecorder();
/// final Canvas canvas = Canvas(pictureRecorder);
/// final Paint paint = Paint()..color = const Color.fromARGB(255, 255, 0, 0);
......@@ -69,8 +142,8 @@ Duration timeAction(VoidCallback action) {
/// }
/// }
/// ```
abstract class RawRecorder extends Recorder {
RawRecorder({@required String name}) : super._(name);
abstract class SceneBuilderRecorder extends Recorder {
SceneBuilderRecorder({@required String name}) : super._(name);
/// Called from [Window.onBeginFrame].
@mustCallSuper
......@@ -81,39 +154,31 @@ abstract class RawRecorder extends Recorder {
/// An implementation should exercise the [sceneBuilder] to build a frame.
/// However, it must not call [SceneBuilder.build] or [Window.render].
/// Instead the benchmark harness will call them and time them appropriately.
///
/// The callback is given a [FrameMetricsBuilder] that can be populated
/// with various frame-related metrics, such as paint time and layout time.
void onDrawFrame(SceneBuilder sceneBuilder);
@override
Future<Profile> run() {
final Completer<Profile> profileCompleter = Completer<Profile>();
final Profile profile = Profile(name: name);
window.onBeginFrame = (_) {
onBeginFrame();
};
window.onDrawFrame = () {
Duration sceneBuildDuration;
Duration windowRenderDuration;
final Duration drawFrameDuration = timeAction(() {
profile.record('drawFrameDuration', () {
final SceneBuilder sceneBuilder = SceneBuilder();
onDrawFrame(sceneBuilder);
sceneBuildDuration = timeAction(() {
profile.record('sceneBuildDuration', () {
final Scene scene = sceneBuilder.build();
windowRenderDuration = timeAction(() {
profile.record('windowRenderDuration', () {
window.render(scene);
});
});
});
_frames.add(FrameMetrics._(
drawFrameDuration: drawFrameDuration,
sceneBuildDuration: sceneBuildDuration,
windowRenderDuration: windowRenderDuration,
));
if (_shouldContinue()) {
if (profile.shouldContinue()) {
window.scheduleFrame();
} else {
final Profile profile = _generateProfile();
profileCompleter.complete(profile);
}
};
......@@ -183,7 +248,8 @@ abstract class RawRecorder extends Recorder {
/// }
/// }
/// ```
abstract class WidgetRecorder extends Recorder implements _RecordingWidgetsBindingListener {
abstract class WidgetRecorder extends Recorder
implements RecordingWidgetsBindingListener {
WidgetRecorder({@required String name}) : super._(name);
/// Creates a widget to be benchmarked.
......@@ -194,6 +260,9 @@ abstract class WidgetRecorder extends Recorder implements _RecordingWidgetsBindi
/// low.
Widget createWidget();
@override
Profile profile;
final Completer<Profile> _profileCompleter = Completer<Profile>();
Stopwatch _drawFrameStopwatch;
......@@ -205,15 +274,11 @@ abstract class WidgetRecorder extends Recorder implements _RecordingWidgetsBindi
@override
void _frameDidDraw() {
_frames.add(FrameMetrics._(
drawFrameDuration: _drawFrameStopwatch.elapsed,
sceneBuildDuration: null,
windowRenderDuration: null,
));
if (_shouldContinue()) {
profile.addDataPoint('drawFrameDuration', _drawFrameStopwatch.elapsed);
if (profile.shouldContinue()) {
window.scheduleFrame();
} else {
final Profile profile = _generateProfile();
_profileCompleter.complete(profile);
}
}
......@@ -225,10 +290,15 @@ abstract class WidgetRecorder extends Recorder implements _RecordingWidgetsBindi
@override
Future<Profile> run() {
profile = Profile(name: name);
final _RecordingWidgetsBinding binding =
_RecordingWidgetsBinding.ensureInitialized();
final Widget widget = createWidget();
binding._beginRecording(this, widget);
_profileCompleter.future.whenComplete(() {
profile = null;
});
return _profileCompleter.future;
}
}
......@@ -240,7 +310,8 @@ abstract class WidgetRecorder extends Recorder implements _RecordingWidgetsBindi
/// another frame that clears the screen. It repeats this process, measuring the
/// performance of frames that render the widget and ignoring the frames that
/// clear the screen.
abstract class WidgetBuildRecorder extends Recorder implements _RecordingWidgetsBindingListener {
abstract class WidgetBuildRecorder extends Recorder
implements RecordingWidgetsBindingListener {
WidgetBuildRecorder({@required String name}) : super._(name);
/// Creates a widget to be benchmarked.
......@@ -250,6 +321,9 @@ abstract class WidgetBuildRecorder extends Recorder implements _RecordingWidgets
/// consider using [WidgetRecorder].
Widget createWidget();
@override
Profile profile;
final Completer<Profile> _profileCompleter = Completer<Profile>();
Stopwatch _drawFrameStopwatch;
......@@ -279,17 +353,13 @@ abstract class WidgetBuildRecorder extends Recorder implements _RecordingWidgets
void _frameDidDraw() {
// Only record frames that show the widget.
if (_showWidget) {
_frames.add(FrameMetrics._(
drawFrameDuration: _drawFrameStopwatch.elapsed,
sceneBuildDuration: null,
windowRenderDuration: null,
));
profile.addDataPoint('drawFrameDuration', _drawFrameStopwatch.elapsed);
}
if (_shouldContinue()) {
if (profile.shouldContinue()) {
_showWidget = !_showWidget;
_hostState._setStateTrampoline();
} else {
final Profile profile = _generateProfile();
_profileCompleter.complete(profile);
}
}
......@@ -301,9 +371,14 @@ abstract class WidgetBuildRecorder extends Recorder implements _RecordingWidgets
@override
Future<Profile> run() {
profile = Profile(name: name);
final _RecordingWidgetsBinding binding =
_RecordingWidgetsBinding.ensureInitialized();
binding._beginRecording(this, _WidgetBuildRecorderHost(this));
_profileCompleter.future.whenComplete(() {
profile = null;
});
return _profileCompleter.future;
}
}
......@@ -315,7 +390,8 @@ class _WidgetBuildRecorderHost extends StatefulWidget {
final WidgetBuildRecorder recorder;
@override
State<StatefulWidget> createState() => recorder._hostState = _WidgetBuildRecorderHostState();
State<StatefulWidget> createState() =>
recorder._hostState = _WidgetBuildRecorderHostState();
}
class _WidgetBuildRecorderHostState extends State<_WidgetBuildRecorderHost> {
......@@ -332,175 +408,181 @@ class _WidgetBuildRecorderHostState extends State<_WidgetBuildRecorderHost> {
}
}
/// Pumps frames and records frame metrics.
abstract class Recorder {
Recorder._(this.name);
/// Series of time recordings indexed in time order.
///
/// It can calculate [average], [standardDeviation] and [noise]. If the amount
/// of data collected is higher than [_kMeasuredSampleCount], then these
/// calculations will only apply to the latest [_kMeasuredSampleCount] data
/// points.
class Timeseries {
Timeseries();
/// List of all the values that have been recorded.
///
/// This list has no limit.
final List<num> _allValues = <num>[];
/// The name of the benchmark being recorded.
final String name;
/// List of values that are being used for measurement purposes.
///
/// [average], [standardDeviation] and [noise] are all based on this list, not
/// the [_allValues] list.
final List<num> _measuredValues = <num>[];
/// Frame metrics recorded during a single benchmark run.
final List<FrameMetrics> _frames = <FrameMetrics>[];
/// The total amount of data collected, including ones that were dropped
/// because of the sample size limit.
int get count => _allValues.length;
/// Runs the benchmark and records a profile containing frame metrics.
Future<Profile> run();
double get average => _computeMean(_measuredValues);
/// Decides whether the data collected so far is sufficient to stop, or
/// whether the benchmark should continue collecting more data.
///
/// The signals used are sample size, noise, and duration.
bool _shouldContinue() {
// Run through a minimum number of frames.
if (_frames.length < _kMinSampleCount) {
return true;
}
double get standardDeviation =>
_computeStandardDeviationForPopulation(_measuredValues);
final Profile profile = _generateProfile();
// Is it still too noisy?
if (profile.drawFrameDurationNoise > _kNoiseThreshold) {
// If the benchmark has run long enough, stop it, even if it's noisy under
// the assumption that this benchmark is always noisy and there's nothing
// we can do about it.
if (_frames.length > _kMaxSampleCount) {
print(
'WARNING: Benchmark noise did not converge below ${_kNoiseThreshold * 100}%. '
'Stopping because it reached the maximum number of samples $_kMaxSampleCount. '
'Noise level is ${profile.drawFrameDurationNoise * 100}%.',
);
return false;
}
double get noise => standardDeviation / average;
// Keep running.
return true;
void add(num value) {
_measuredValues.add(value);
_allValues.add(value);
// Don't let the [_measuredValues] list grow beyond [_kMeasuredSampleCount].
if (_measuredValues.length > _kMeasuredSampleCount) {
_measuredValues.removeAt(0);
}
print(
'SUCCESS: Benchmark converged below ${_kNoiseThreshold * 100}%. '
'Noise level is ${profile.drawFrameDurationNoise * 100}%.',
);
return false;
}
Profile _generateProfile() {
final List<FrameMetrics> measuredFrames =
_frames.sublist(_frames.length - _kMeasuredSampleCount);
final Iterable<double> noiseCheckDrawFrameTimes =
measuredFrames.map<double>((FrameMetrics metric) =>
metric.drawFrameDuration.inMicroseconds.toDouble());
final double averageDrawFrameDurationMicros =
_computeMean(noiseCheckDrawFrameTimes);
final double standardDeviation =
_computeStandardDeviationForPopulation(noiseCheckDrawFrameTimes);
final double drawFrameDurationNoise =
standardDeviation / averageDrawFrameDurationMicros;
return Profile._(
name: name,
averageDrawFrameDuration:
Duration(microseconds: averageDrawFrameDurationMicros.toInt()),
drawFrameDurationNoise: drawFrameDurationNoise,
frames: measuredFrames,
);
}
}
/// Contains metrics for a series of rendered frames.
@immutable
/// Base class for a profile collected from running a benchmark.
class Profile {
Profile._({
@required this.name,
@required this.drawFrameDurationNoise,
@required this.averageDrawFrameDuration,
@required List<FrameMetrics> frames,
}) : frames = List<FrameMetrics>.unmodifiable(frames);
Profile({@required this.name}) : assert(name != null);
/// The name of the benchmark that produced this profile.
final String name;
/// Average amount of time [Window.onDrawFrame] took.
final Duration averageDrawFrameDuration;
/// The noise, as a fraction of [averageDrawFrameDuration], measure from the [frames].
final double drawFrameDurationNoise;
/// This data will be used to display cards in the Flutter Dashboard.
final Map<String, Timeseries> scoreData = <String, Timeseries>{};
/// Frame metrics recorded during a single benchmark run.
final List<FrameMetrics> frames;
/// This data isn't displayed anywhere. It's stored for completeness purposes.
final Map<String, dynamic> extraData = <String, dynamic>{};
Map<String, dynamic> toJson() {
return <String, dynamic>{
'name': name,
'scoreKeys': <String>['averageDrawFrameDuration'],
'averageDrawFrameDuration': averageDrawFrameDuration.inMicroseconds,
'drawFrameDurationNoise': drawFrameDurationNoise,
'frames': frames
.map((FrameMetrics frameMetrics) => frameMetrics.toJson())
.toList(),
};
/// Invokes [callback] and records the duration of its execution under [key].
Duration record(String key, VoidCallback callback) {
final Duration duration = timeAction(callback);
addDataPoint(key, duration);
return duration;
}
@override
String toString() {
return _formatToStringLines(<String>[
'benchmark: $name',
'averageDrawFrameDuration: ${averageDrawFrameDuration.inMicroseconds}μs',
'drawFrameDurationNoise: ${drawFrameDurationNoise * 100}%',
'frames:',
...frames.expand((FrameMetrics frame) =>
'$frame\n'.split('\n').map((String line) => '- $line\n')),
]);
void addDataPoint(String key, Duration duration) {
scoreData.putIfAbsent(key, () => Timeseries()).add(duration.inMicroseconds);
}
}
/// Contains metrics for a single frame.
class FrameMetrics {
FrameMetrics._({
@required this.drawFrameDuration,
@required this.sceneBuildDuration,
@required this.windowRenderDuration,
});
/// Decides whether the data collected so far is sufficient to stop, or
/// whether the benchmark should continue collecting more data.
///
/// The signals used are sample size, noise, and duration.
///
/// If any of the timeseries doesn't satisfy the noise requirements, this
/// method will return true (asking the benchmark to continue collecting
/// data).
bool shouldContinue() {
// If we haven't recorded anything yet, we don't wanna stop now.
if (scoreData.isEmpty) {
return true;
}
/// Total amount of time taken by [Window.onDrawFrame].
final Duration drawFrameDuration;
// Accumulates all the messages to be printed when the final decision is to
// stop collecting data.
final StringBuffer buffer = StringBuffer();
/// The amount of time [SceneBuilder.build] took.
final Duration sceneBuildDuration;
final Iterable<bool> shouldContinueList = scoreData.keys.map((String key) {
final Timeseries timeseries = scoreData[key];
/// The amount of time [Window.render] took.
final Duration windowRenderDuration;
// Collect enough data points before considering to stop.
if (timeseries.count < _kMinSampleCount) {
return true;
}
// Is it still too noisy?
if (timeseries.noise > _kNoiseThreshold) {
// If the timeseries has enough data, stop it, even if it's noisy under
// the assumption that this benchmark is always noisy and there's nothing
// we can do about it.
if (timeseries.count > _kMaxSampleCount) {
buffer.writeln(
'WARNING: Noise of benchmark "$name.$key" did not converge below '
'${_ratioToPercent(_kNoiseThreshold)}. Stopping because it reached the '
'maximum number of samples $_kMaxSampleCount. Noise level is '
'${_ratioToPercent(timeseries.noise)}.',
);
return false;
} else {
return true;
}
}
buffer.writeln(
'SUCCESS: Benchmark converged below ${_ratioToPercent(_kNoiseThreshold)}. '
'Noise level is ${_ratioToPercent(timeseries.noise)}.',
);
return false;
});
// If any of the score data needs to continue to be collected, we should
// return true.
final bool finalDecision =
shouldContinueList.any((bool element) => element);
if (!finalDecision) {
print(buffer.toString());
}
return finalDecision;
}
/// Returns a JSON representation of the profile that will be sent to the
/// server.
Map<String, dynamic> toJson() {
return <String, dynamic>{
'drawFrameDuration': drawFrameDuration.inMicroseconds,
if (sceneBuildDuration != null)
'sceneBuildDuration': sceneBuildDuration.inMicroseconds,
if (windowRenderDuration != null)
'windowRenderDuration': windowRenderDuration.inMicroseconds,
final List<String> scoreKeys = <String>[];
final Map<String, dynamic> json = <String, dynamic>{
'name': name,
'scoreKeys': scoreKeys,
};
for (final String key in scoreData.keys) {
scoreKeys.add('$key.average');
final Timeseries timeseries = scoreData[key];
json['$key.average'] = timeseries.average;
json['$key.noise'] = timeseries.noise;
}
json.addAll(extraData);
return json;
}
@override
String toString() {
return _formatToStringLines(<String>[
'drawFrameDuration: ${drawFrameDuration.inMicroseconds}μs',
if (sceneBuildDuration != null)
'sceneBuildDuration: ${sceneBuildDuration.inMicroseconds}μs',
if (windowRenderDuration != null)
'windowRenderDuration: ${windowRenderDuration.inMicroseconds}μs',
]);
final StringBuffer buffer = StringBuffer();
buffer.writeln('name: $name');
for (final String key in scoreData.keys) {
final Timeseries timeseries = scoreData[key];
buffer.writeln('$key:');
buffer.writeln(' | average: ${timeseries.average} μs');
buffer.writeln(' | noise: ${_ratioToPercent(timeseries.noise)}');
}
for (final String key in extraData.keys) {
final dynamic value = extraData[key];
if (value is List) {
buffer.writeln('$key:');
for (final dynamic item in value) {
buffer.writeln(' - $item');
}
} else {
buffer.writeln('$key: $value');
}
}
return buffer.toString();
}
}
String _formatToStringLines(List<String> lines) {
return lines
.map((String line) => line.trim())
.where((String line) => line.isNotEmpty)
.join('\n');
}
/// Computes the arithmetic mean (or average) of given [values].
double _computeMean(Iterable<double> values) {
final double sum = values.reduce((double a, double b) => a + b);
double _computeMean(Iterable<num> values) {
final num sum = values.reduce((num a, num b) => a + b);
return sum / values.length;
}
......@@ -511,20 +593,24 @@ double _computeMean(Iterable<double> values) {
/// See also:
///
/// * https://en.wikipedia.org/wiki/Standard_deviation
double _computeStandardDeviationForPopulation(Iterable<double> population) {
double _computeStandardDeviationForPopulation(Iterable<num> population) {
final double mean = _computeMean(population);
final double sumOfSquaredDeltas = population.fold<double>(
0.0,
(double previous, double value) => previous += math.pow(value - mean, 2),
(double previous, num value) => previous += math.pow(value - mean, 2),
);
return math.sqrt(sumOfSquaredDeltas / population.length);
}
String _ratioToPercent(double value) {
return '${(value * 100).toStringAsFixed(2)}%';
}
/// Implemented by recorders that use [_RecordingWidgetsBinding] to receive
/// frame life-cycle calls.
abstract class _RecordingWidgetsBindingListener {
/// Whether the binding should continue pumping frames.
bool _shouldContinue();
abstract class RecordingWidgetsBindingListener {
/// The profile where the benchmark is collecting metrics.
Profile profile;
/// Called just before calling [SchedulerBinding.handleDrawFrame].
void _frameWillDraw();
......@@ -563,10 +649,11 @@ class _RecordingWidgetsBinding extends BindingBase
return WidgetsBinding.instance as _RecordingWidgetsBinding;
}
_RecordingWidgetsBindingListener _listener;
RecordingWidgetsBindingListener _listener;
bool _hasErrored = false;
void _beginRecording(_RecordingWidgetsBindingListener recorder, Widget widget) {
void _beginRecording(
RecordingWidgetsBindingListener recorder, Widget widget) {
final FlutterExceptionHandler originalOnError = FlutterError.onError;
// Fail hard and fast on errors. Benchmarks should not have any errors.
......@@ -582,7 +669,7 @@ class _RecordingWidgetsBinding extends BindingBase
runApp(widget);
}
/// To avoid calling [Recorder._shouldContinue] every time [scheduleFrame] is
/// To avoid calling [Profile.shouldContinue] every time [scheduleFrame] is
/// called, we cache this value at the beginning of the frame.
bool _benchmarkStopped = false;
......@@ -592,7 +679,7 @@ class _RecordingWidgetsBinding extends BindingBase
if (_hasErrored) {
return;
}
_benchmarkStopped = !_listener._shouldContinue();
_benchmarkStopped = !_listener.profile.shouldContinue();
super.handleBeginFrame(rawTimeStamp);
}
......
......@@ -6,6 +6,7 @@ import 'dart:async';
import 'dart:convert' show json;
import 'dart:html' as html;
import 'package:macrobenchmarks/src/web/bench_text_layout.dart';
import 'package:macrobenchmarks/src/web/bench_text_out_of_picture_bounds.dart';
import 'src/web/bench_build_material_checkbox.dart';
......@@ -17,6 +18,8 @@ import 'src/web/recorder.dart';
typedef RecorderFactory = Recorder Function();
const bool isCanvasKit = bool.fromEnvironment('FLUTTER_WEB_USE_SKIA', defaultValue: false);
/// List of all benchmarks that run in the devicelab.
///
/// When adding a new benchmark, add it to this map. Make sure that the name
......@@ -27,6 +30,12 @@ final Map<String, RecorderFactory> benchmarks = <String, RecorderFactory>{
BenchTextOutOfPictureBounds.benchmarkName: () => BenchTextOutOfPictureBounds(),
BenchSimpleLazyTextScroll.benchmarkName: () => BenchSimpleLazyTextScroll(),
BenchBuildMaterialCheckbox.benchmarkName: () => BenchBuildMaterialCheckbox(),
// Benchmarks that we don't want to run using CanvasKit.
if (!isCanvasKit) ...<String, RecorderFactory>{
BenchTextDomLayout.benchmarkName: () => BenchTextDomLayout(),
BenchTextDomCachedLayout.benchmarkName: () => BenchTextDomCachedLayout(),
}
};
/// Whether we fell back to manual mode.
......
......@@ -138,10 +138,6 @@ Future<TaskResult> runWebBenchmark({ @required bool useCanvasKit }) async {
throw 'Score key is empty in benchmark "$benchmarkName". '
'Received [${scoreKeys.join(', ')}]';
}
if (scoreKey.contains('.')) {
throw 'Score key contain dots in benchmark "$benchmarkName". '
'Received [${scoreKeys.join(', ')}]';
}
benchmarkScoreKeys.add('$namespace.$scoreKey');
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment