Unverified Commit 02f8e028 authored by Tianguang's avatar Tianguang Committed by GitHub

Add Web Benchmarks for Flutter Gallery (Flutter Side) — 1/4 (#57576)

parent 5d61bff2
......@@ -18,8 +18,10 @@ web_shard_template: &WEB_SHARD_TEMPLATE
linux_shard_template: &LINUX_SHARD_TEMPLATE
environment:
# Some of the host-only devicelab tests are pretty involved and need a lot of RAM.
CPU: 2
MEMORY: 8G
# In June 2020, the CPU and memory were increased so that
# web benchmarks (including gallery benchmarks) can be run successfully on Linux.
CPU: 4
MEMORY: 16G
script:
- dart --enable-asserts ./dev/bots/test.dart
......
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:flutter/material.dart';
import 'package:gallery/benchmarks/gallery_automator.dart';
import 'package:macrobenchmarks/src/web/recorder.dart';
/// A recorder that measures frame building durations for the Gallery.
class GalleryRecorder extends WidgetRecorder {
GalleryRecorder({
@required this.benchmarkName,
this.shouldRunPredicate,
this.testScrollsOnly = false,
}) : assert(testScrollsOnly || shouldRunPredicate != null),
super(name: benchmarkName, useCustomWarmUp: true);
/// The name of the gallery benchmark to be run.
final String benchmarkName;
/// A function that accepts the name of a demo and returns whether we should
/// run this demo in this benchmark.
final bool Function(String) shouldRunPredicate;
/// Whether this benchmark only tests scrolling.
final bool testScrollsOnly;
/// Whether we should continue recording.
@override
bool shouldContinue() => !_finished || profile.shouldContinue();
GalleryAutomator _galleryAutomator;
bool get _finished => _galleryAutomator?.finished ?? false;
/// Creates the [GalleryAutomator] widget.
@override
Widget createWidget() {
_galleryAutomator = GalleryAutomator(
benchmarkName: benchmarkName,
shouldRunPredicate: shouldRunPredicate,
testScrollsOnly: testScrollsOnly,
stopWarmingUpCallback: profile.stopWarmingUp,
);
return _galleryAutomator.createWidget();
}
}
......@@ -235,7 +235,7 @@ abstract class SceneBuilderRecorder extends Recorder {
window.onBeginFrame = (_) {
try {
startMeasureFrame();
startMeasureFrame(profile);
onBeginFrame();
} catch (error, stackTrace) {
profileCompleter.completeError(error, stackTrace);
......@@ -333,7 +333,10 @@ abstract class SceneBuilderRecorder extends Recorder {
/// }
/// ```
abstract class WidgetRecorder extends Recorder implements FrameRecorder {
WidgetRecorder({@required String name}) : super._(name, true);
WidgetRecorder({
@required String name,
this.useCustomWarmUp = false,
}) : super._(name, true);
/// Creates a widget to be benchmarked.
///
......@@ -349,12 +352,15 @@ abstract class WidgetRecorder extends Recorder implements FrameRecorder {
Profile profile;
Completer<void> _runCompleter;
/// Whether to delimit warm-up frames in a custom way.
final bool useCustomWarmUp;
Stopwatch _drawFrameStopwatch;
@override
@mustCallSuper
void frameWillDraw() {
startMeasureFrame();
startMeasureFrame(profile);
_drawFrameStopwatch = Stopwatch()..start();
}
......@@ -380,7 +386,7 @@ abstract class WidgetRecorder extends Recorder implements FrameRecorder {
@override
Future<Profile> run() async {
_runCompleter = Completer<void>();
final Profile localProfile = profile = Profile(name: name);
final Profile localProfile = profile = Profile(name: name, useCustomWarmUp: useCustomWarmUp);
final _RecordingWidgetsBinding binding =
_RecordingWidgetsBinding.ensureInitialized();
final Widget widget = createWidget();
......@@ -460,7 +466,7 @@ abstract class WidgetBuildRecorder extends Recorder implements FrameRecorder {
@mustCallSuper
void frameWillDraw() {
if (showWidget) {
startMeasureFrame();
startMeasureFrame(profile);
_drawFrameStopwatch = Stopwatch()..start();
}
}
......@@ -538,7 +544,8 @@ class _WidgetBuildRecorderHostState extends State<_WidgetBuildRecorderHost> {
/// calculations will only apply to the latest [_kMeasuredSampleCount] data
/// points.
class Timeseries {
Timeseries(this.name, this.isReported);
Timeseries(this.name, this.isReported, {this.useCustomWarmUp = false})
: _warmUpFrameCount = useCustomWarmUp ? 0 : null;
/// The label of this timeseries used for debugging and result inspection.
final String name;
......@@ -553,6 +560,18 @@ class Timeseries {
/// but that are too fine-grained to be useful for tracking on the dashboard.
final bool isReported;
/// Whether to delimit warm-up frames in a custom way.
final bool useCustomWarmUp;
/// The number of frames ignored as warm-up frames, used only
/// when [useCustomWarmUp] is true.
int _warmUpFrameCount;
/// The number of frames ignored as warm-up frames.
int get warmUpFrameCount => useCustomWarmUp
? _warmUpFrameCount
: count - _kMeasuredSampleCount;
/// List of all the values that have been recorded.
///
/// This list has no limit.
......@@ -566,11 +585,15 @@ class Timeseries {
///
/// See [TimeseriesStats] for more details.
TimeseriesStats computeStats() {
final int finalWarmUpFrameCount = warmUpFrameCount;
assert(finalWarmUpFrameCount >= 0 && finalWarmUpFrameCount < count);
// The first few values we simply discard and never look at. They're from the warm-up phase.
final List<double> warmUpValues = _allValues.sublist(0, _allValues.length - _kMeasuredSampleCount);
final List<double> warmUpValues = _allValues.sublist(0, finalWarmUpFrameCount);
// Values we analyze.
final List<double> candidateValues = _allValues.sublist(_allValues.length - _kMeasuredSampleCount);
final List<double> candidateValues = _allValues.sublist(finalWarmUpFrameCount);
// The average that includes outliers.
final double dirtyAverage = _computeAverage(name, candidateValues);
......@@ -628,13 +651,16 @@ class Timeseries {
}
/// Adds a value to this timeseries.
void add(double value) {
void add(double value, {@required bool isWarmUpValue}) {
if (value < 0.0) {
throw StateError(
'Timeseries $name: negative metric values are not supported. Got: $value',
);
}
_allValues.add(value);
if (useCustomWarmUp && isWarmUpValue) {
_warmUpFrameCount += 1;
}
}
}
......@@ -748,11 +774,36 @@ class AnnotatedSample {
/// Base class for a profile collected from running a benchmark.
class Profile {
Profile({@required this.name}) : assert(name != null);
Profile({@required this.name, this.useCustomWarmUp = false})
: assert(name != null),
_isWarmingUp = useCustomWarmUp;
/// The name of the benchmark that produced this profile.
final String name;
/// Whether to delimit warm-up frames in a custom way.
final bool useCustomWarmUp;
/// Whether we are measuring warm-up frames currently.
bool get isWarmingUp => _isWarmingUp;
bool _isWarmingUp;
/// Stop the warm-up phase.
///
/// Call this method only when [useCustomWarmUp] and [isWarmingUp] are both
/// true.
/// Call this method only once for each profile.
void stopWarmingUp() {
if (!useCustomWarmUp) {
throw Exception('`stopWarmingUp` should be used only when `useCustomWarmUp` is true.');
} else if (!_isWarmingUp) {
throw Exception('Warm-up already stopped.');
} else {
_isWarmingUp = false;
}
}
/// This data will be used to display cards in the Flutter Dashboard.
final Map<String, Timeseries> scoreData = <String, Timeseries>{};
......@@ -773,7 +824,10 @@ class Profile {
/// Set [reported] to `false` to store the data, but not show it on the
/// dashboard UI.
void addDataPoint(String key, Duration duration, { @required bool reported }) {
scoreData.putIfAbsent(key, () => Timeseries(key, reported)).add(duration.inMicroseconds.toDouble());
scoreData.putIfAbsent(
key,
() => Timeseries(key, reported, useCustomWarmUp: useCustomWarmUp),
).add(duration.inMicroseconds.toDouble(), isWarmUpValue: isWarmingUp);
}
/// Decides whether the data collected so far is sufficient to stop, or
......@@ -1007,6 +1061,20 @@ class _RecordingWidgetsBinding extends BindingBase
int _currentFrameNumber = 1;
/// If [_calledStartMeasureFrame] is true, we have called [startMeasureFrame]
/// but have not its pairing [endMeasureFrame] yet.
///
/// This flag ensures that [startMeasureFrame] and [endMeasureFrame] are always
/// called in pairs, with [startMeasureFrame] followed by [endMeasureFrame].
bool _calledStartMeasureFrame = false;
/// Whether we are recording a measured frame.
///
/// This flag ensures that we always stop measuring a frame if we
/// have started one. Because we want to skip warm-up frames, this flag
/// is necessary.
bool _isMeasuringFrame = false;
/// Adds a marker indication the beginning of frame rendering.
///
/// This adds an event to the performance trace used to find measured frames in
......@@ -1014,22 +1082,53 @@ int _currentFrameNumber = 1;
/// benchmarks are only interested in a subset of frames. For example,
/// [WidgetBuildRecorder] only measures frames that build widgets, and ignores
/// frames that clear the screen.
void startMeasureFrame() {
html.window.performance.mark('measured_frame_start#$_currentFrameNumber');
///
/// Warm-up frames are not measured. If [profile.isWarmingUp] is true,
/// this function does nothing.
void startMeasureFrame(Profile profile) {
if (_calledStartMeasureFrame) {
throw Exception('`startMeasureFrame` called twice in a row.');
}
_calledStartMeasureFrame = true;
if (!profile.isWarmingUp) {
// Tell the browser to mark the beginning of the frame.
html.window.performance.mark('measured_frame_start#$_currentFrameNumber');
_isMeasuringFrame = true;
}
}
/// Signals the end of a measured frame.
///
/// See [startMeasureFrame] for details on what this instrumentation is used
/// for.
///
/// Warm-up frames are not measured. If [profile.isWarmingUp] was true
/// when the corresponding [startMeasureFrame] was called,
/// this function does nothing.
void endMeasureFrame() {
html.window.performance.mark('measured_frame_end#$_currentFrameNumber');
html.window.performance.measure(
'measured_frame',
'measured_frame_start#$_currentFrameNumber',
'measured_frame_end#$_currentFrameNumber',
);
_currentFrameNumber += 1;
if (!_calledStartMeasureFrame) {
throw Exception('`startMeasureFrame` has not been called before calling `endMeasureFrame`');
}
_calledStartMeasureFrame = false;
if (_isMeasuringFrame) {
// Tell the browser to mark the end of the frame, and measure the duration.
html.window.performance.mark('measured_frame_end#$_currentFrameNumber');
html.window.performance.measure(
'measured_frame',
'measured_frame_start#$_currentFrameNumber',
'measured_frame_end#$_currentFrameNumber',
);
// Increment the current frame number.
_currentFrameNumber += 1;
_isMeasuringFrame = false;
}
}
/// A function that receives a benchmark value from the framework.
......
......@@ -10,6 +10,8 @@ import 'dart:math' as math;
import 'package:macrobenchmarks/src/web/bench_text_layout.dart';
import 'package:macrobenchmarks/src/web/bench_text_out_of_picture_bounds.dart';
import 'package:gallery/benchmarks/gallery_automator.dart' show DemoType, typeOfDemo;
import 'src/web/bench_build_material_checkbox.dart';
import 'src/web/bench_card_infinite_scroll.dart';
import 'src/web/bench_child_layers.dart';
......@@ -22,12 +24,15 @@ import 'src/web/bench_paths.dart';
import 'src/web/bench_picture_recording.dart';
import 'src/web/bench_simple_lazy_text_scroll.dart';
import 'src/web/bench_text_out_of_picture_bounds.dart';
import 'src/web/gallery/gallery_recorder.dart';
import 'src/web/recorder.dart';
typedef RecorderFactory = Recorder Function();
const bool isCanvasKit = bool.fromEnvironment('FLUTTER_WEB_USE_SKIA', defaultValue: false);
const String _galleryBenchmarkPrefix = 'gallery_v2';
/// List of all benchmarks that run in the devicelab.
///
/// When adding a new benchmark, add it to this map. Make sure that the name
......@@ -57,7 +62,16 @@ final Map<String, RecorderFactory> benchmarks = <String, RecorderFactory>{
BenchTextCachedLayout.canvasBenchmarkName: () => BenchTextCachedLayout(useCanvas: true),
BenchBuildColorsGrid.domBenchmarkName: () => BenchBuildColorsGrid.dom(),
BenchBuildColorsGrid.canvasBenchmarkName: () => BenchBuildColorsGrid.canvas(),
}
// The following benchmark is for the Flutter Gallery.
// This benchmark is failing when run with CanvasKit, so we skip it
// for now.
// TODO(yjbanov): https://github.com/flutter/flutter/issues/59082
'${_galleryBenchmarkPrefix}_studies_perf': () => GalleryRecorder(
benchmarkName: '${_galleryBenchmarkPrefix}_studies_perf',
shouldRunPredicate: (String demo) => typeOfDemo(demo) == DemoType.study,
),
},
};
final LocalBenchmarkServerClient _client = LocalBenchmarkServerClient();
......@@ -83,29 +97,48 @@ Future<void> _runBenchmark(String benchmarkName) async {
return;
}
try {
final Recorder recorder = recorderFactory();
final Runner runner = recorder.isTracingEnabled && !_client.isInManualMode
? Runner(
recorder: recorder,
setUpAllDidRun: () => _client.startPerformanceTracing(benchmarkName),
tearDownAllWillRun: _client.stopPerformanceTracing,
)
: Runner(recorder: recorder);
final Profile profile = await runner.run();
if (!_client.isInManualMode) {
await _client.sendProfileData(profile);
} else {
_printResultsToScreen(profile);
print(profile);
}
} catch (error, stackTrace) {
if (_client.isInManualMode) {
rethrow;
}
await _client.reportError(error, stackTrace);
}
await runZoned<Future<void>>(
() async {
final Recorder recorder = recorderFactory();
final Runner runner = recorder.isTracingEnabled && !_client.isInManualMode
? Runner(
recorder: recorder,
setUpAllDidRun: () => _client.startPerformanceTracing(benchmarkName),
tearDownAllWillRun: _client.stopPerformanceTracing,
)
: Runner(recorder: recorder);
final Profile profile = await runner.run();
if (!_client.isInManualMode) {
await _client.sendProfileData(profile);
} else {
_printResultsToScreen(profile);
print(profile);
}
},
zoneSpecification: ZoneSpecification(
print: (Zone self, ZoneDelegate parent, Zone zone, String line) async {
if (_client.isInManualMode) {
parent.print(zone, '[$benchmarkName] $line');
} else {
await _client.printToConsole(line);
}
},
handleUncaughtError: (
Zone self,
ZoneDelegate parent,
Zone zone, Object error,
StackTrace stackTrace,
) async {
if (_client.isInManualMode) {
parent.print(zone, '[$benchmarkName] $error, $stackTrace');
parent.handleUncaughtError(zone, error, stackTrace);
} else {
await _client.reportError(error, stackTrace);
}
},
),
);
}
void _fallbackToManual(String error) {
......@@ -354,6 +387,17 @@ class LocalBenchmarkServerClient {
);
}
/// Reports a message about the demo to the benchmark server.
Future<void> printToConsole(String report) async {
_checkNotManualMode();
await html.HttpRequest.request(
'/print-to-console',
method: 'POST',
mimeType: 'text/plain',
sendData: report,
);
}
/// This is the same as calling [html.HttpRequest.request] but it doesn't
/// crash on 404, which we use to detect `flutter run`.
Future<html.HttpRequest> _requestXhr(
......
This diff is collapsed.
......@@ -99,6 +99,13 @@ Future<TaskResult> runWebBenchmark({ @required bool useCanvasKit }) async {
profileData.complete(collectedProfiles);
return Response.notFound('Finished running benchmarks.');
}
} else if (request.requestedUri.path.endsWith('/print-to-console')) {
// A passthrough used by
// `dev/benchmarks/macrobenchmarks/lib/web_benchmarks.dart`
// to print information.
final String message = await request.readAsString();
print('[Gallery] $message');
return Response.ok('Reported.');
} else {
return Response.notFound(
'This request is not handled by the profile-data handler.');
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment