Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in
Toggle navigation
F
Front-End
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
abdullh.alsoleman
Front-End
Commits
423459da
Unverified
Commit
423459da
authored
Jan 30, 2020
by
Yegor
Committed by
GitHub
Jan 30, 2020
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Initial web benchmark harness and 3 benchmarks (#49460)
Initial web benchmark harness and 3 benchmarks
parent
e7baef08
Changes
16
Show whitespace changes
Inline
Side-by-side
Showing
16 changed files
with
1235 additions
and
27 deletions
+1235
-27
bench_draw_rect.dart
...nchmarks/macrobenchmarks/lib/src/web/bench_draw_rect.dart
+62
-0
bench_simple_lazy_text_scroll.dart
...benchmarks/lib/src/web/bench_simple_lazy_text_scroll.dart
+111
-0
bench_text_out_of_picture_bounds.dart
...chmarks/lib/src/web/bench_text_out_of_picture_bounds.dart
+153
-0
recorder.dart
dev/benchmarks/macrobenchmarks/lib/src/web/recorder.dart
+471
-0
test_data.dart
dev/benchmarks/macrobenchmarks/lib/src/web/test_data.dart
+22
-0
web_benchmarks.dart
dev/benchmarks/macrobenchmarks/lib/web_benchmarks.dart
+157
-0
pubspec.yaml
dev/benchmarks/macrobenchmarks/pubspec.yaml
+1
-1
index.html
dev/benchmarks/macrobenchmarks/web/index.html
+13
-0
test.dart
dev/bots/test.dart
+11
-0
web_benchmarks_canvaskit.dart
dev/devicelab/bin/tasks/web_benchmarks_canvaskit.dart
+15
-0
web_benchmarks_html.dart
dev/devicelab/bin/tasks/web_benchmarks_html.dart
+15
-0
framework.dart
dev/devicelab/lib/framework/framework.dart
+4
-12
utils.dart
dev/devicelab/lib/framework/utils.dart
+14
-7
web_benchmarks.dart
dev/devicelab/lib/tasks/web_benchmarks.dart
+166
-0
manifest.yaml
dev/devicelab/manifest.yaml
+16
-0
pubspec.yaml
examples/flutter_gallery/pubspec.yaml
+4
-7
No files found.
dev/benchmarks/macrobenchmarks/lib/src/web/bench_draw_rect.dart
0 → 100644
View file @
423459da
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import
'dart:ui'
;
import
'recorder.dart'
;
/// Repeatedly paints a grid of rectangles.
///
/// Measures the performance of the `drawRect` operation.
class
BenchDrawRect
extends
RawRecorder
{
BenchDrawRect
()
:
super
(
name:
benchmarkName
);
static
const
String
benchmarkName
=
'draw_rect'
;
/// Number of rows in the grid.
static
const
int
kRows
=
25
;
/// Number of columns in the grid.
static
const
int
kColumns
=
40
;
/// Counter used to offset the rendered rects to make them wobble.
///
/// The wobbling is there so a human could visually verify that the benchmark
/// is correctly pumping frames.
double
wobbleCounter
=
0
;
@override
void
onDrawFrame
(
SceneBuilder
sceneBuilder
)
{
final
PictureRecorder
pictureRecorder
=
PictureRecorder
();
final
Canvas
canvas
=
Canvas
(
pictureRecorder
);
final
Paint
paint
=
Paint
()..
color
=
const
Color
.
fromARGB
(
255
,
255
,
0
,
0
);
final
Size
windowSize
=
window
.
physicalSize
;
final
Size
cellSize
=
Size
(
windowSize
.
width
/
kColumns
,
windowSize
.
height
/
kRows
,
);
final
Size
rectSize
=
cellSize
*
0.8
;
for
(
int
row
=
0
;
row
<
kRows
;
row
++)
{
canvas
.
save
();
for
(
int
col
=
0
;
col
<
kColumns
;
col
++)
{
canvas
.
drawRect
(
Offset
((
wobbleCounter
-
5
).
abs
(),
0
)
&
rectSize
,
paint
,
);
canvas
.
translate
(
cellSize
.
width
,
0
);
}
canvas
.
restore
();
canvas
.
translate
(
0
,
cellSize
.
height
);
}
wobbleCounter
+=
1
;
wobbleCounter
=
wobbleCounter
%
10
;
final
Picture
picture
=
pictureRecorder
.
endRecording
();
sceneBuilder
.
pushOffset
(
0.0
,
0.0
);
sceneBuilder
.
addPicture
(
Offset
.
zero
,
picture
);
sceneBuilder
.
pop
();
}
}
dev/benchmarks/macrobenchmarks/lib/src/web/bench_simple_lazy_text_scroll.dart
0 → 100644
View file @
423459da
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import
'dart:async'
;
import
'package:flutter/widgets.dart'
;
import
'recorder.dart'
;
import
'test_data.dart'
;
/// Creates several list views containing text items, then continuously scrolls
/// them up and down.
///
/// Measures our ability to lazily render virtually infinitely big content.
class
BenchSimpleLazyTextScroll
extends
WidgetRecorder
{
BenchSimpleLazyTextScroll
()
:
super
(
name:
benchmarkName
);
static
const
String
benchmarkName
=
'bench_simple_lazy_text_scroll'
;
@override
Widget
createWidget
()
{
return
Directionality
(
textDirection:
TextDirection
.
ltr
,
child:
Row
(
children:
const
<
Widget
>[
Flexible
(
flex:
1
,
child:
_TestScrollingWidget
(
initialScrollOffset:
0
,
scrollDistance:
300
,
scrollDuration:
Duration
(
seconds:
1
),
),
),
Flexible
(
flex:
1
,
child:
_TestScrollingWidget
(
initialScrollOffset:
1000
,
scrollDistance:
500
,
scrollDuration:
Duration
(
milliseconds:
1500
),
),
),
Flexible
(
flex:
1
,
child:
_TestScrollingWidget
(
initialScrollOffset:
2000
,
scrollDistance:
700
,
scrollDuration:
Duration
(
milliseconds:
2000
),
),
),
],
),
);
}
}
class
_TestScrollingWidget
extends
StatefulWidget
{
const
_TestScrollingWidget
({
@required
this
.
initialScrollOffset
,
@required
this
.
scrollDistance
,
@required
this
.
scrollDuration
,
});
final
double
initialScrollOffset
;
final
double
scrollDistance
;
final
Duration
scrollDuration
;
@override
State
<
StatefulWidget
>
createState
()
{
return
_TestScrollingWidgetState
();
}
}
class
_TestScrollingWidgetState
extends
State
<
_TestScrollingWidget
>
{
ScrollController
scrollController
;
@override
void
initState
()
{
super
.
initState
();
scrollController
=
ScrollController
(
initialScrollOffset:
widget
.
initialScrollOffset
,
);
// Without the timer the animation doesn't begin.
Timer
.
run
(()
async
{
bool
forward
=
true
;
while
(
true
)
{
await
scrollController
.
animateTo
(
forward
?
widget
.
initialScrollOffset
+
widget
.
scrollDistance
:
widget
.
initialScrollOffset
,
curve:
Curves
.
linear
,
duration:
widget
.
scrollDuration
,
);
forward
=
!
forward
;
}
});
}
@override
Widget
build
(
BuildContext
context
)
{
return
ListView
.
builder
(
controller:
scrollController
,
itemCount:
10000
,
itemBuilder:
(
BuildContext
context
,
int
index
)
{
return
Text
(
lipsum
[
index
%
lipsum
.
length
]);
},
);
}
}
dev/benchmarks/macrobenchmarks/lib/src/web/bench_text_out_of_picture_bounds.dart
0 → 100644
View file @
423459da
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import
'dart:math'
as
math
;
import
'dart:ui'
;
import
'recorder.dart'
;
import
'test_data.dart'
;
/// Draws 9 screens worth of text in a 3x3 grid with only the middle cell
/// appearing on the visible screen:
///
/// +-------------+-------------+-------------+
/// | | | |
/// | invisible | invisible | invisible |
/// | | | |
/// +-----------------------------------------+
/// | | | |
/// | invisible | visible | invisible |
/// | | | |
/// +-----------------------------------------+
/// | | | |
/// | invisible | invisible | invisible |
/// | | | |
/// +-------------+-------------+-------------+
///
/// This reproduces the bug where we render more than visible causing
/// performance issues: https://github.com/flutter/flutter/issues/48516
class
BenchTextOutOfPictureBounds
extends
RawRecorder
{
BenchTextOutOfPictureBounds
()
:
super
(
name:
benchmarkName
)
{
const
Color
red
=
Color
.
fromARGB
(
255
,
255
,
0
,
0
);
const
Color
green
=
Color
.
fromARGB
(
255
,
0
,
255
,
0
);
// We don't want paragraph generation and layout to pollute benchmark numbers.
singleLineParagraphs
=
_generateParagraphs
(
paragraphCount:
500
,
minWordCountPerParagraph:
2
,
maxWordCountPerParagraph:
5
,
color:
red
,
);
multiLineParagraphs
=
_generateParagraphs
(
paragraphCount:
50
,
minWordCountPerParagraph:
30
,
maxWordCountPerParagraph:
50
,
color:
green
,
);
}
// Use hard-coded seed to make sure the data is stable across benchmark runs.
static
final
math
.
Random
_random
=
math
.
Random
(
0
);
static
const
String
benchmarkName
=
'text_out_of_picture_bounds'
;
List
<
Paragraph
>
singleLineParagraphs
;
List
<
Paragraph
>
multiLineParagraphs
;
@override
void
onDrawFrame
(
SceneBuilder
sceneBuilder
)
{
final
PictureRecorder
pictureRecorder
=
PictureRecorder
();
final
Canvas
canvas
=
Canvas
(
pictureRecorder
);
final
Size
screenSize
=
window
.
physicalSize
;
const
double
padding
=
10.0
;
// Fills a single cell with random text.
void
fillCellWithText
(
List
<
Paragraph
>
textSource
)
{
canvas
.
save
();
double
topOffset
=
0
;
while
(
topOffset
<
screenSize
.
height
)
{
final
Paragraph
paragraph
=
textSource
[
_random
.
nextInt
(
textSource
.
length
)];
// Give it enough space to make sure it ends up being a single-line paragraph.
paragraph
.
layout
(
ParagraphConstraints
(
width:
screenSize
.
width
/
2
));
canvas
.
drawParagraph
(
paragraph
,
Offset
.
zero
);
canvas
.
translate
(
0
,
paragraph
.
height
+
padding
);
topOffset
+=
paragraph
.
height
+
padding
;
}
canvas
.
restore
();
}
// Starting with the top-left cell, fill every cell with text.
canvas
.
translate
(-
screenSize
.
width
,
-
screenSize
.
height
);
for
(
int
row
=
0
;
row
<
3
;
row
++)
{
canvas
.
save
();
for
(
int
col
=
0
;
col
<
3
;
col
++)
{
canvas
.
drawRect
(
Offset
.
zero
&
screenSize
,
Paint
()
..
style
=
PaintingStyle
.
stroke
..
strokeWidth
=
2.0
,
);
// Fill single-line text.
fillCellWithText
(
singleLineParagraphs
);
// Fill multi-line text.
canvas
.
save
();
canvas
.
translate
(
screenSize
.
width
/
2
,
0
);
fillCellWithText
(
multiLineParagraphs
);
canvas
.
restore
();
// Shift to next column.
canvas
.
translate
(
screenSize
.
width
,
0
);
}
// Undo horizontal shift.
canvas
.
restore
();
// Shift to next row.
canvas
.
translate
(
0
,
screenSize
.
height
);
}
final
Picture
picture
=
pictureRecorder
.
endRecording
();
sceneBuilder
.
pushOffset
(
0.0
,
0.0
);
sceneBuilder
.
addPicture
(
Offset
.
zero
,
picture
);
sceneBuilder
.
pop
();
}
/// Generates strings and builds pre-laid out paragraphs to be used by the
/// benchmark.
List
<
Paragraph
>
_generateParagraphs
({
int
paragraphCount
,
int
minWordCountPerParagraph
,
int
maxWordCountPerParagraph
,
Color
color
,
})
{
final
List
<
Paragraph
>
strings
=
<
Paragraph
>[];
int
wordPointer
=
0
;
// points to the next word in lipsum to extract
for
(
int
i
=
0
;
i
<
paragraphCount
;
i
++)
{
final
int
wordCount
=
minWordCountPerParagraph
+
_random
.
nextInt
(
maxWordCountPerParagraph
-
minWordCountPerParagraph
);
final
List
<
String
>
string
=
<
String
>[];
for
(
int
j
=
0
;
j
<
wordCount
;
j
++)
{
string
.
add
(
lipsum
[
wordPointer
]);
wordPointer
=
(
wordPointer
+
1
)
%
lipsum
.
length
;
}
final
ParagraphBuilder
builder
=
ParagraphBuilder
(
ParagraphStyle
(
fontFamily:
'sans-serif'
))
..
pushStyle
(
TextStyle
(
color:
color
,
fontSize:
18.0
))
..
addText
(
string
.
join
(
' '
))
..
pop
();
final
Paragraph
paragraph
=
builder
.
build
();
// Fill half the screen.
paragraph
.
layout
(
ParagraphConstraints
(
width:
window
.
physicalSize
.
width
/
2
));
strings
.
add
(
paragraph
);
}
return
strings
;
}
}
dev/benchmarks/macrobenchmarks/lib/src/web/recorder.dart
0 → 100644
View file @
423459da
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import
'dart:async'
;
import
'dart:math'
as
math
;
import
'dart:ui'
;
import
'package:meta/meta.dart'
;
import
'package:flutter/gestures.dart'
;
import
'package:flutter/foundation.dart'
;
import
'package:flutter/services.dart'
;
import
'package:flutter/scheduler.dart'
;
import
'package:flutter/rendering.dart'
;
import
'package:flutter/widgets.dart'
;
/// Minimum number of samples collected by a benchmark irrespective of noise
/// levels.
const
int
_kMinSampleCount
=
50
;
/// Maximum number of samples collected by a benchmark irrespective of noise
/// levels.
///
/// If the noise doesn't settle down before we reach the max we'll report noisy
/// results assuming the benchmarks is simply always noisy.
const
int
_kMaxSampleCount
=
10
*
_kMinSampleCount
;
/// The number of samples used to extract metrics, such as noise, means,
/// max/min values.
const
int
_kMeasuredSampleCount
=
10
;
/// Maximum tolerated noise level.
///
/// A benchmark continues running until a noise level below this threshold is
/// reached.
const
double
_kNoiseThreshold
=
0.05
;
// 5%
/// Measures the amount of time [action] takes.
Duration
timeAction
(
VoidCallback
action
)
{
final
Stopwatch
stopwatch
=
Stopwatch
()..
start
();
action
();
stopwatch
.
stop
();
return
stopwatch
.
elapsed
;
}
/// A recorder for benchmarking interactions with the engine without the
/// framework by directly exercising [SceneBuilder].
///
/// To implement a benchmark, extend this class and implement [onDrawFrame].
///
/// Example:
///
/// ```
/// class BenchDrawCircle extends RawRecorder {
/// BenchDrawCircle() : super(name: benchmarkName);
///
/// static const String benchmarkName = 'draw_circle';
///
/// @override
/// void onDrawFrame(SceneBuilder sceneBuilder, FrameMetricsBuilder metricsBuilder) {
/// final PictureRecorder pictureRecorder = PictureRecorder();
/// final Canvas canvas = Canvas(pictureRecorder);
/// final Paint paint = Paint()..color = const Color.fromARGB(255, 255, 0, 0);
/// final Size windowSize = window.physicalSize;
/// canvas.drawCircle(windowSize.center(Offset.zero), 50.0, paint);
/// final Picture picture = pictureRecorder.endRecording();
/// sceneBuilder.addPicture(picture);
/// }
/// }
/// ```
abstract
class
RawRecorder
extends
Recorder
{
RawRecorder
({
@required
String
name
})
:
super
.
_
(
name
);
/// Called from [Window.onBeginFrame].
@mustCallSuper
void
onBeginFrame
()
{}
/// Called on every frame.
///
/// An implementation should exercise the [sceneBuilder] to build a frame.
/// However, it must not call [SceneBuilder.build] or [Window.render].
/// Instead the benchmark harness will call them and time them appropriately.
///
/// The callback is given a [FrameMetricsBuilder] that can be populated
/// with various frame-related metrics, such as paint time and layout time.
void
onDrawFrame
(
SceneBuilder
sceneBuilder
);
@override
Future
<
Profile
>
run
()
{
final
Completer
<
Profile
>
profileCompleter
=
Completer
<
Profile
>();
window
.
onBeginFrame
=
(
_
)
{
onBeginFrame
();
};
window
.
onDrawFrame
=
()
{
Duration
sceneBuildDuration
;
Duration
windowRenderDuration
;
final
Duration
drawFrameDuration
=
timeAction
(()
{
final
SceneBuilder
sceneBuilder
=
SceneBuilder
();
onDrawFrame
(
sceneBuilder
);
sceneBuildDuration
=
timeAction
(()
{
final
Scene
scene
=
sceneBuilder
.
build
();
windowRenderDuration
=
timeAction
(()
{
window
.
render
(
scene
);
});
});
});
_frames
.
add
(
FrameMetrics
.
_
(
drawFrameDuration:
drawFrameDuration
,
sceneBuildDuration:
sceneBuildDuration
,
windowRenderDuration:
windowRenderDuration
,
));
if
(
_shouldContinue
())
{
window
.
scheduleFrame
();
}
else
{
final
Profile
profile
=
_generateProfile
();
profileCompleter
.
complete
(
profile
);
}
};
window
.
scheduleFrame
();
return
profileCompleter
.
future
;
}
}
/// A recorder for benchmarking interactions with the framework by creating
/// widgets.
///
/// To implement a benchmark, extend this class and implement [createWidget].
///
/// Example:
///
/// ```
/// class BenchListView extends WidgetRecorder {
/// BenchListView() : super(name: benchmarkName);
///
/// static const String benchmarkName = 'bench_list_view';
///
/// @override
/// Widget createWidget() {
/// return Directionality(
/// textDirection: TextDirection.ltr,
/// child: _TestListViewWidget(),
/// );
/// }
/// }
///
/// class _TestListViewWidget extends StatefulWidget {
/// @override
/// State<StatefulWidget> createState() {
/// return _TestListViewWidgetState();
/// }
/// }
///
/// class _TestListViewWidgetState extends State<_TestListViewWidget> {
/// ScrollController scrollController;
///
/// @override
/// void initState() {
/// super.initState();
/// scrollController = ScrollController();
/// Timer.run(() async {
/// bool forward = true;
/// while (true) {
/// await scrollController.animateTo(
/// forward ? 300 : 0,
/// curve: Curves.linear,
/// duration: const Duration(seconds: 1),
/// );
/// forward = !forward;
/// }
/// });
/// }
///
/// @override
/// Widget build(BuildContext context) {
/// return ListView.builder(
/// controller: scrollController,
/// itemCount: 10000,
/// itemBuilder: (BuildContext context, int index) {
/// return Text('Item #$index');
/// },
/// );
/// }
/// }
/// ```
abstract
class
WidgetRecorder
extends
Recorder
{
WidgetRecorder
({
@required
String
name
})
:
super
.
_
(
name
);
/// Creates a widget to be benchmarked.
///
/// The widget must create its own animation to drive the benchmark. The
/// animation should continue indefinitely. The benchmark harness will stop
/// pumping frames automatically as soon as the noise levels are sufficiently
/// low.
Widget
createWidget
();
final
Completer
<
Profile
>
_profileCompleter
=
Completer
<
Profile
>();
Stopwatch
_drawFrameStopwatch
;
void
_frameWillDraw
()
{
_drawFrameStopwatch
=
Stopwatch
()..
start
();
}
void
_frameDidDraw
()
{
_frames
.
add
(
FrameMetrics
.
_
(
drawFrameDuration:
_drawFrameStopwatch
.
elapsed
,
sceneBuildDuration:
null
,
windowRenderDuration:
null
,
));
if
(
_shouldContinue
())
{
window
.
scheduleFrame
();
}
else
{
final
Profile
profile
=
_generateProfile
();
_profileCompleter
.
complete
(
profile
);
}
}
@override
Future
<
Profile
>
run
()
{
final
_RecordingWidgetsBinding
binding
=
_RecordingWidgetsBinding
.
ensureInitialized
();
final
Widget
widget
=
createWidget
();
binding
.
_beginRecording
(
this
,
widget
);
return
_profileCompleter
.
future
;
}
}
/// Pumps frames and records frame metrics.
abstract
class
Recorder
{
Recorder
.
_
(
this
.
name
);
/// The name of the benchmark being recorded.
final
String
name
;
/// Frame metrics recorded during a single benchmark run.
final
List
<
FrameMetrics
>
_frames
=
<
FrameMetrics
>[];
/// Runs the benchmark and records a profile containing frame metrics.
Future
<
Profile
>
run
();
/// Decides whether the data collected so far is sufficient to stop, or
/// whether the benchmark should continue collecting more data.
///
/// The signals used are sample size, noise, and duration.
bool
_shouldContinue
()
{
// Run through a minimum number of frames.
if
(
_frames
.
length
<
_kMinSampleCount
)
{
return
true
;
}
final
Profile
profile
=
_generateProfile
();
// Is it still too noisy?
if
(
profile
.
drawFrameDurationNoise
>
_kNoiseThreshold
)
{
// If the benchmark has run long enough, stop it, even if it's noisy under
// the assumption that this benchmark is always noisy and there's nothing
// we can do about it.
if
(
_frames
.
length
>
_kMaxSampleCount
)
{
print
(
'WARNING: Benchmark noise did not converge below
${_kNoiseThreshold * 100}
%. '
'Stopping because it reached the maximum number of samples
$_kMaxSampleCount
. '
'Noise level is
${profile.drawFrameDurationNoise * 100}
%.'
,
);
return
false
;
}
// Keep running.
return
true
;
}
print
(
'SUCCESS: Benchmark converged below
${_kNoiseThreshold * 100}
%. '
'Noise level is
${profile.drawFrameDurationNoise * 100}
%.'
,
);
return
false
;
}
Profile
_generateProfile
()
{
final
List
<
FrameMetrics
>
measuredFrames
=
_frames
.
sublist
(
_frames
.
length
-
_kMeasuredSampleCount
);
final
Iterable
<
double
>
noiseCheckDrawFrameTimes
=
measuredFrames
.
map
<
double
>((
FrameMetrics
metric
)
=>
metric
.
drawFrameDuration
.
inMicroseconds
.
toDouble
());
final
double
averageDrawFrameDurationMicros
=
_computeMean
(
noiseCheckDrawFrameTimes
);
final
double
standardDeviation
=
_computeStandardDeviationForPopulation
(
noiseCheckDrawFrameTimes
);
final
double
drawFrameDurationNoise
=
standardDeviation
/
averageDrawFrameDurationMicros
;
return
Profile
.
_
(
name:
name
,
averageDrawFrameDuration:
Duration
(
microseconds:
averageDrawFrameDurationMicros
.
toInt
()),
drawFrameDurationNoise:
drawFrameDurationNoise
,
frames:
measuredFrames
,
);
}
}
/// Contains metrics for a series of rendered frames.
@immutable
class
Profile
{
Profile
.
_
({
@required
this
.
name
,
@required
this
.
drawFrameDurationNoise
,
@required
this
.
averageDrawFrameDuration
,
@required
List
<
FrameMetrics
>
frames
,
})
:
frames
=
List
<
FrameMetrics
>.
unmodifiable
(
frames
);
/// The name of the benchmark that produced this profile.
final
String
name
;
/// Average amount of time [Window.onDrawFrame] took.
final
Duration
averageDrawFrameDuration
;
/// The noise, as a fraction of [averageDrawFrameDuration], measure from the [frames].
final
double
drawFrameDurationNoise
;
/// Frame metrics recorded during a single benchmark run.
final
List
<
FrameMetrics
>
frames
;
Map
<
String
,
dynamic
>
toJson
()
{
return
<
String
,
dynamic
>{
'name'
:
name
,
'averageDrawFrameDuration'
:
averageDrawFrameDuration
.
inMicroseconds
,
'drawFrameDurationNoise'
:
drawFrameDurationNoise
,
'frames'
:
frames
.
map
((
FrameMetrics
frameMetrics
)
=>
frameMetrics
.
toJson
())
.
toList
(),
};
}
@override
String
toString
()
{
return
_formatToStringLines
(<
String
>[
'benchmark:
$name
'
,
'averageDrawFrameDuration:
${averageDrawFrameDuration.inMicroseconds}
μs'
,
'drawFrameDurationNoise:
${drawFrameDurationNoise * 100}
%'
,
'frames:'
,
...
frames
.
expand
((
FrameMetrics
frame
)
=>
'
$frame
\n
'
.
split
(
'
\n
'
).
map
((
String
line
)
=>
'-
$line
\n
'
)),
]);
}
}
/// Contains metrics for a single frame.
class
FrameMetrics
{
FrameMetrics
.
_
({
@required
this
.
drawFrameDuration
,
@required
this
.
sceneBuildDuration
,
@required
this
.
windowRenderDuration
,
});
/// Total amount of time taken by [Window.onDrawFrame].
final
Duration
drawFrameDuration
;
/// The amount of time [SceneBuilder.build] took.
final
Duration
sceneBuildDuration
;
/// The amount of time [Window.render] took.
final
Duration
windowRenderDuration
;
Map
<
String
,
dynamic
>
toJson
()
{
return
<
String
,
dynamic
>{
'drawFrameDuration'
:
drawFrameDuration
.
inMicroseconds
,
if
(
sceneBuildDuration
!=
null
)
'sceneBuildDuration'
:
sceneBuildDuration
.
inMicroseconds
,
if
(
windowRenderDuration
!=
null
)
'windowRenderDuration'
:
windowRenderDuration
.
inMicroseconds
,
};
}
@override
String
toString
()
{
return
_formatToStringLines
(<
String
>[
'drawFrameDuration:
${drawFrameDuration.inMicroseconds}
μs'
,
if
(
sceneBuildDuration
!=
null
)
'sceneBuildDuration:
${sceneBuildDuration.inMicroseconds}
μs'
,
if
(
windowRenderDuration
!=
null
)
'windowRenderDuration:
${windowRenderDuration.inMicroseconds}
μs'
,
]);
}
}
String
_formatToStringLines
(
List
<
String
>
lines
)
{
return
lines
.
map
((
String
line
)
=>
line
.
trim
())
.
where
((
String
line
)
=>
line
.
isNotEmpty
)
.
join
(
'
\n
'
);
}
/// Computes the arithmetic mean (or average) of given [values].
double
_computeMean
(
Iterable
<
double
>
values
)
{
final
double
sum
=
values
.
reduce
((
double
a
,
double
b
)
=>
a
+
b
);
return
sum
/
values
.
length
;
}
/// Computes population standard deviation.
///
/// Unlike sample standard deviation, which divides by N - 1, this divides by N.
///
/// See also:
///
/// * https://en.wikipedia.org/wiki/Standard_deviation
double
_computeStandardDeviationForPopulation
(
Iterable
<
double
>
population
)
{
final
double
mean
=
_computeMean
(
population
);
final
double
sumOfSquaredDeltas
=
population
.
fold
<
double
>(
0.0
,
(
double
previous
,
double
value
)
=>
previous
+=
math
.
pow
(
value
-
mean
,
2
),
);
return
math
.
sqrt
(
sumOfSquaredDeltas
/
population
.
length
);
}
/// A variant of [WidgetsBinding] that collaborates with a [Recorder] to decide
/// when to stop pumping frames.
///
/// A normal [WidgetsBinding] typically always pumps frames whenever a widget
/// instructs it to do so by calling [scheduleFrame] (transitively via
/// `setState`). This binding will stop pumping new frames as soon as benchmark
/// parameters are satisfactory (e.g. when the metric noise levels become low
/// enough).
class
_RecordingWidgetsBinding
extends
BindingBase
with
GestureBinding
,
ServicesBinding
,
SchedulerBinding
,
PaintingBinding
,
SemanticsBinding
,
RendererBinding
,
WidgetsBinding
{
/// Makes an instance of [_RecordingWidgetsBinding] the current binding.
static
_RecordingWidgetsBinding
ensureInitialized
()
{
if
(
WidgetsBinding
.
instance
==
null
)
{
_RecordingWidgetsBinding
();
}
return
WidgetsBinding
.
instance
as
_RecordingWidgetsBinding
;
}
WidgetRecorder
_recorder
;
void
_beginRecording
(
WidgetRecorder
recorder
,
Widget
widget
)
{
_recorder
=
recorder
;
runApp
(
widget
);
}
/// To avoid calling [Recorder._shouldContinue] every time [scheduleFrame] is
/// called, we cache this value at the beginning of the frame.
bool
_benchmarkStopped
=
false
;
@override
void
handleBeginFrame
(
Duration
rawTimeStamp
)
{
_benchmarkStopped
=
!
_recorder
.
_shouldContinue
();
super
.
handleBeginFrame
(
rawTimeStamp
);
}
@override
void
scheduleFrame
()
{
if
(!
_benchmarkStopped
)
{
super
.
scheduleFrame
();
}
}
@override
void
handleDrawFrame
()
{
_recorder
.
_frameWillDraw
();
super
.
handleDrawFrame
();
_recorder
.
_frameDidDraw
();
}
}
dev/benchmarks/macrobenchmarks/lib/src/web/test_data.dart
0 → 100644
View file @
423459da
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/// Random words used by benchmarks that contain text.
final
List
<
String
>
lipsum
=
'Lorem ipsum dolor sit amet, consectetur adipiscing '
'elit. Vivamus ut ligula a neque mattis posuere. Sed suscipit lobortis '
'sodales. Morbi sed neque molestie, hendrerit odio ac, aliquam velit. '
'Curabitur non quam sit amet nibh sollicitudin ultrices. Fusce '
'ullamcorper bibendum commodo. In et feugiat nisl. Aenean vulputate in '
'odio vestibulum ultricies. Nunc dolor libero, hendrerit eu urna sit '
'amet, pretium iaculis nulla. Ut porttitor nisl et leo iaculis, vel '
'fringilla odio pulvinar. Ut eget ligula id odio auctor egestas nec a '
'nisl. Aliquam luctus dolor et magna posuere mattis.'
'Suspendisse fringilla nisl et massa congue, eget '
'imperdiet lectus porta. Vestibulum sed dui sed dui porta imperdiet ut in risus. '
'Fusce diam purus, faucibus id accumsan sit amet, semper a sem. Sed aliquam '
'lacus eget libero ultricies, quis hendrerit tortor posuere. Pellentesque '
'sagittis eu est in maximus. Proin auctor fringilla dolor in hendrerit. Nam '
'pulvinar rhoncus tellus. Nullam vel mauris semper, volutpat tellus at, sagittis '
'lectus. Donec vitae nibh mauris. Morbi posuere sem id eros tristique tempus. '
'Vivamus lacinia sapien neque, eu semper purus gravida ut.'
.
split
(
' '
);
dev/benchmarks/macrobenchmarks/lib/web_benchmarks.dart
0 → 100644
View file @
423459da
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import
'dart:async'
;
import
'dart:convert'
show
json
;
import
'dart:html'
as
html
;
import
'package:macrobenchmarks/src/web/bench_text_out_of_picture_bounds.dart'
;
import
'src/web/bench_draw_rect.dart'
;
import
'src/web/bench_simple_lazy_text_scroll.dart'
;
import
'src/web/bench_text_out_of_picture_bounds.dart'
;
import
'src/web/recorder.dart'
;
typedef
RecorderFactory
=
Recorder
Function
();
final
Map
<
String
,
RecorderFactory
>
benchmarks
=
<
String
,
RecorderFactory
>{
BenchDrawRect
.
benchmarkName
:
()
=>
BenchDrawRect
(),
BenchTextOutOfPictureBounds
.
benchmarkName
:
()
=>
BenchTextOutOfPictureBounds
(),
BenchSimpleLazyTextScroll
.
benchmarkName
:
()
=>
BenchSimpleLazyTextScroll
(),
};
/// Whether we fell back to manual mode.
///
/// This happens when you run benchmarks using plain `flutter run` rather than
/// devicelab test harness. The test harness spins up a special server that
/// provides API for automatically picking the next benchmark to run.
bool
isInManualMode
=
false
;
Future
<
void
>
main
()
async
{
// Check if the benchmark server wants us to run a specific benchmark.
final
html
.
HttpRequest
request
=
await
requestXhr
(
'/next-benchmark'
,
method:
'POST'
,
mimeType:
'application/json'
,
sendData:
json
.
encode
(
benchmarks
.
keys
.
toList
()),
);
// 404 is expected in the following cases:
// - The benchmark is ran using plain `flutter run`, which does not provide "next-benchmark" handler.
// - We ran all benchmarks and the benchmark is telling us there are no more benchmarks to run.
if
(
request
.
status
==
404
)
{
_fallbackToManual
(
'The server did not tell us which benchmark to run next.'
);
return
;
}
final
String
benchmarkName
=
request
.
responseText
;
await
_runBenchmark
(
benchmarkName
);
html
.
window
.
location
.
reload
();
}
Future
<
void
>
_runBenchmark
(
String
benchmarkName
)
async
{
final
RecorderFactory
recorderFactory
=
benchmarks
[
benchmarkName
];
if
(
recorderFactory
==
null
)
{
_fallbackToManual
(
'Benchmark
$benchmarkName
not found.'
);
return
;
}
final
Recorder
recorder
=
recorderFactory
();
final
Profile
profile
=
await
recorder
.
run
();
if
(!
isInManualMode
)
{
final
html
.
HttpRequest
request
=
await
html
.
HttpRequest
.
request
(
'/profile-data'
,
method:
'POST'
,
mimeType:
'application/json'
,
sendData:
json
.
encode
(
profile
.
toJson
()),
);
if
(
request
.
status
!=
200
)
{
throw
Exception
(
'Failed to report profile data to benchmark server. '
'The server responded with status code
${request.status}
.'
);
}
}
else
{
print
(
profile
);
}
}
void
_fallbackToManual
(
String
error
)
{
isInManualMode
=
true
;
html
.
document
.
body
.
appendHtml
(
'''
<div id="manual-panel">
<h3>
$error
</h3>
<p>Choose one of the following benchmarks:</p>
<!-- Absolutely position it so it receives the clicks and not the glasspane -->
<ul style="position: absolute">
${
benchmarks.keys
.map((String name) => '<li><button id="$name">$name</button></li>')
.join('\n')
}
</ul>
</div>
'''
,
validator:
html
.
NodeValidatorBuilder
()..
allowHtml5
()..
allowInlineStyles
());
for
(
final
String
benchmarkName
in
benchmarks
.
keys
)
{
final
html
.
Element
button
=
html
.
document
.
querySelector
(
'#
$benchmarkName
'
);
button
.
addEventListener
(
'click'
,
(
_
)
{
final
html
.
Element
manualPanel
=
html
.
document
.
querySelector
(
'#manual-panel'
);
manualPanel
?.
remove
();
_runBenchmark
(
benchmarkName
);
});
}
}
Future
<
html
.
HttpRequest
>
requestXhr
(
String
url
,
{
String
method
,
bool
withCredentials
,
String
responseType
,
String
mimeType
,
Map
<
String
,
String
>
requestHeaders
,
dynamic
sendData
,
})
{
final
Completer
<
html
.
HttpRequest
>
completer
=
Completer
<
html
.
HttpRequest
>();
final
html
.
HttpRequest
xhr
=
html
.
HttpRequest
();
method
??=
'GET'
;
xhr
.
open
(
method
,
url
,
async:
true
);
if
(
withCredentials
!=
null
)
{
xhr
.
withCredentials
=
withCredentials
;
}
if
(
responseType
!=
null
)
{
xhr
.
responseType
=
responseType
;
}
if
(
mimeType
!=
null
)
{
xhr
.
overrideMimeType
(
mimeType
);
}
if
(
requestHeaders
!=
null
)
{
requestHeaders
.
forEach
((
String
header
,
String
value
)
{
xhr
.
setRequestHeader
(
header
,
value
);
});
}
xhr
.
onLoad
.
listen
((
html
.
ProgressEvent
e
)
{
completer
.
complete
(
xhr
);
});
xhr
.
onError
.
listen
(
completer
.
completeError
);
if
(
sendData
!=
null
)
{
xhr
.
send
(
sendData
);
}
else
{
xhr
.
send
();
}
return
completer
.
future
;
}
dev/benchmarks/macrobenchmarks/pubspec.yaml
View file @
423459da
...
...
@@ -3,7 +3,7 @@ description: Performance benchmarks using flutter drive.
environment
:
# The pub client defaults to an <2.0.0 sdk constraint which we need to explicitly overwrite.
sdk
:
"
>=2.
0.0-dev.68.0
<3.0.0"
sdk
:
"
>=2.
2.2
<3.0.0"
dependencies
:
flutter
:
...
...
dev/benchmarks/macrobenchmarks/web/index.html
0 → 100644
View file @
423459da
<!DOCTYPE HTML>
<!-- Copyright 2014 The Flutter Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file. -->
<html>
<head>
<meta
charset=
"UTF-8"
>
<title>
Web Benchmarks
</title>
</head>
<body>
<script
src=
"main.dart.js"
type=
"application/javascript"
></script>
</body>
</html>
dev/bots/test.dart
View file @
423459da
...
...
@@ -785,6 +785,15 @@ Future<void> _runHostOnlyDeviceLabTests() async {
// TODO(ianh): Move the tests that are not running on devicelab any more out
// of the device lab directory.
const
Map
<
String
,
String
>
kChromeVariables
=
<
String
,
String
>{
// This is required to be able to run Chrome on Cirrus and LUCI.
'CHROME_NO_SANDBOX'
:
'true'
,
// Causes Chrome to run in headless mode in environments without displays,
// such as Cirrus and LUCI. Do not use this variable when recording actual
// benchmark numbers.
'UNCALIBRATED_SMOKE_TEST'
:
'true'
,
};
// List the tests to run.
// We split these into subshards. The tests are randomly distributed into
// those subshards so as to get a uniform distribution of costs, but the
...
...
@@ -812,6 +821,8 @@ Future<void> _runHostOnlyDeviceLabTests() async {
if
(
Platform
.
isMacOS
)
()
=>
_runDevicelabTest
(
'build_ios_framework_module_test'
),
if
(
Platform
.
isMacOS
)
()
=>
_runDevicelabTest
(
'plugin_lint_mac'
),
()
=>
_runDevicelabTest
(
'plugin_test'
,
environment:
gradleEnvironment
),
if
(
Platform
.
isLinux
)
()
=>
_runDevicelabTest
(
'web_benchmarks_html'
,
environment:
kChromeVariables
),
if
(
Platform
.
isLinux
)
()
=>
_runDevicelabTest
(
'web_benchmarks_canvaskit'
,
environment:
kChromeVariables
),
]..
shuffle
(
math
.
Random
(
0
));
final
int
testsPerShard
=
tests
.
length
~/
kDeviceLabShardCount
;
...
...
dev/devicelab/bin/tasks/web_benchmarks_canvaskit.dart
0 → 100644
View file @
423459da
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import
'dart:async'
;
import
'package:flutter_devicelab/framework/framework.dart'
;
import
'package:flutter_devicelab/tasks/web_benchmarks.dart'
;
/// Runs all Web benchmarks using the CanvasKit rendering backend.
Future
<
void
>
main
()
async
{
await
task
(()
async
{
return
await
runWebBenchmark
(
useCanvasKit:
true
);
});
}
dev/devicelab/bin/tasks/web_benchmarks_html.dart
0 → 100644
View file @
423459da
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import
'dart:async'
;
import
'package:flutter_devicelab/framework/framework.dart'
;
import
'package:flutter_devicelab/tasks/web_benchmarks.dart'
;
/// Runs all Web benchmarks using the HTML rendering backend.
Future
<
void
>
main
()
async
{
await
task
(()
async
{
return
await
runWebBenchmark
(
useCanvasKit:
false
);
});
}
dev/devicelab/lib/framework/framework.dart
View file @
423459da
...
...
@@ -181,10 +181,10 @@ class TaskResult {
if
(
benchmarkScoreKeys
!=
null
)
{
for
(
final
String
key
in
benchmarkScoreKeys
)
{
if
(!
data
.
containsKey
(
key
))
{
throw
'Invalid
Golem
score key "
$key
". It does not exist in task '
throw
'Invalid
benchmark
score key "
$key
". It does not exist in task '
'result data
${prettyJson.convert(data)}
'
;
}
else
if
(
data
[
key
]
is
!
num
)
{
throw
'Invalid
Golem
score for key "
$key
". It is expected to be a num '
throw
'Invalid
benchmark
score for key "
$key
". It is expected to be a num '
'but was
${data[key].runtimeType}
:
${prettyJson.convert(data[key])}
'
;
}
}
...
...
@@ -212,17 +212,9 @@ class TaskResult {
/// Task-specific JSON data
final
Map
<
String
,
dynamic
>
data
;
/// Keys in [data] that store scores that will be submitted to
Golem
.
/// Keys in [data] that store scores that will be submitted to
Cocoon
.
///
/// Each key is also part of a benchmark's name tracked by Golem.
/// A benchmark name is computed by combining [Task.name] with a key
/// separated by a dot. For example, if a task's name is
/// `"complex_layout__start_up"` and score key is
/// `"engineEnterTimestampMicros"`, the score will be submitted to Golem under
/// `"complex_layout__start_up.engineEnterTimestampMicros"`.
///
/// This convention reduces the amount of configuration that needs to be done
/// to submit benchmark scores to Golem.
/// Each key is also part of a benchmark's name tracked by Cocoon.
final
List
<
String
>
benchmarkScoreKeys
;
/// Whether the task failed.
...
...
dev/devicelab/lib/framework/utils.dart
View file @
423459da
...
...
@@ -304,7 +304,20 @@ Future<int> exec(
String
workingDirectory
,
})
async
{
final
Process
process
=
await
startProcess
(
executable
,
arguments
,
environment:
environment
,
workingDirectory:
workingDirectory
);
await
forwardStandardStreams
(
process
);
final
int
exitCode
=
await
process
.
exitCode
;
if
(
exitCode
!=
0
&&
!
canFail
)
fail
(
'Executable "
$executable
" failed with exit code
$exitCode
.'
);
return
exitCode
;
}
/// Forwards standard out and standard error from [process] to this process'
/// respective outputs.
///
/// Returns a future that completes when both out and error streams a closed.
Future
<
void
>
forwardStandardStreams
(
Process
process
)
{
final
Completer
<
void
>
stdoutDone
=
Completer
<
void
>();
final
Completer
<
void
>
stderrDone
=
Completer
<
void
>();
process
.
stdout
...
...
@@ -320,13 +333,7 @@ Future<int> exec(
print
(
'stderr:
$line
'
);
},
onDone:
()
{
stderrDone
.
complete
();
});
await
Future
.
wait
<
void
>(<
Future
<
void
>>[
stdoutDone
.
future
,
stderrDone
.
future
]);
final
int
exitCode
=
await
process
.
exitCode
;
if
(
exitCode
!=
0
&&
!
canFail
)
fail
(
'Executable "
$executable
" failed with exit code
$exitCode
.'
);
return
exitCode
;
return
Future
.
wait
<
void
>(<
Future
<
void
>>[
stdoutDone
.
future
,
stderrDone
.
future
]);
}
/// Executes a command and returns its standard output as a String.
...
...
dev/devicelab/lib/tasks/web_benchmarks.dart
0 → 100644
View file @
423459da
// Copyright 2014 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import
'dart:async'
;
import
'dart:convert'
show
json
;
import
'dart:io'
as
io
;
import
'package:meta/meta.dart'
;
import
'package:path/path.dart'
as
path
;
import
'package:shelf/shelf.dart'
;
import
'package:shelf/shelf_io.dart'
as
shelf_io
;
import
'package:shelf_static/shelf_static.dart'
;
import
'package:flutter_devicelab/framework/framework.dart'
;
import
'package:flutter_devicelab/framework/utils.dart'
;
/// The port number used by the local benchmark server.
const
int
benchmarkServerPort
=
9999
;
Future
<
TaskResult
>
runWebBenchmark
({
@required
bool
useCanvasKit
})
async
{
final
String
macrobenchmarksDirectory
=
path
.
join
(
'
${flutterDirectory.path}
'
,
'dev'
,
'benchmarks'
,
'macrobenchmarks'
);
return
await
inDirectory
(
macrobenchmarksDirectory
,
()
async
{
await
evalFlutter
(
'build'
,
options:
<
String
>[
'web'
,
if
(
useCanvasKit
)
'--dart-define=FLUTTER_WEB_USE_SKIA=true'
,
'--profile'
,
'-t'
,
'lib/web_benchmarks.dart'
,
],
environment:
<
String
,
String
>{
'FLUTTER_WEB'
:
'true'
,
});
final
Completer
<
List
<
Map
<
String
,
dynamic
>>>
profileData
=
Completer
<
List
<
Map
<
String
,
dynamic
>>>();
final
List
<
Map
<
String
,
dynamic
>>
collectedProfiles
=
<
Map
<
String
,
dynamic
>>[];
List
<
String
>
benchmarks
;
Iterator
<
String
>
benchmarkIterator
;
io
.
HttpServer
server
;
Cascade
cascade
=
Cascade
();
cascade
=
cascade
.
add
((
Request
request
)
async
{
if
(
request
.
requestedUri
.
path
.
endsWith
(
'/profile-data'
))
{
final
Map
<
String
,
dynamic
>
profile
=
json
.
decode
(
await
request
.
readAsString
())
as
Map
<
String
,
dynamic
>;
final
String
benchmarkName
=
profile
[
'name'
]
as
String
;
if
(
benchmarkName
!=
benchmarkIterator
.
current
)
{
profileData
.
completeError
(
Exception
(
'Browser returned benchmark results from a wrong benchmark.
\n
'
'Requested to run bechmark
${benchmarkIterator.current}
, but '
'got results for
$benchmarkName
.'
,
));
server
.
close
();
}
collectedProfiles
.
add
(
profile
);
return
Response
.
ok
(
'Profile received'
);
}
else
if
(
request
.
requestedUri
.
path
.
endsWith
(
'/next-benchmark'
))
{
if
(
benchmarks
==
null
)
{
benchmarks
=
(
json
.
decode
(
await
request
.
readAsString
())
as
List
<
dynamic
>).
cast
<
String
>();
benchmarkIterator
=
benchmarks
.
iterator
;
}
if
(
benchmarkIterator
.
moveNext
())
{
final
String
nextBenchmark
=
benchmarkIterator
.
current
;
print
(
'Launching benchmark "
$nextBenchmark
"'
);
return
Response
.
ok
(
nextBenchmark
);
}
else
{
profileData
.
complete
(
collectedProfiles
);
return
Response
.
notFound
(
'Finished running benchmarks.'
);
}
}
else
{
return
Response
.
notFound
(
'This request is not handled by the profile-data handler.'
);
}
}).
add
(
createStaticHandler
(
path
.
join
(
'
$macrobenchmarksDirectory
'
,
'build'
,
'web'
),
));
server
=
await
io
.
HttpServer
.
bind
(
'localhost'
,
benchmarkServerPort
);
io
.
Process
chromeProcess
;
try
{
shelf_io
.
serveRequests
(
server
,
cascade
.
handler
);
final
bool
isChromeNoSandbox
=
io
.
Platform
.
environment
[
'CHROME_NO_SANDBOX'
]
==
'true'
;
final
String
dartToolDirectory
=
path
.
join
(
'
$macrobenchmarksDirectory
/.dart_tool'
);
final
String
userDataDir
=
io
.
Directory
(
dartToolDirectory
).
createTempSync
(
'chrome_user_data_'
).
path
;
final
List
<
String
>
args
=
<
String
>[
'--user-data-dir=
$userDataDir
'
,
'http://localhost:
$benchmarkServerPort
/index.html'
,
if
(
isChromeNoSandbox
)
'--no-sandbox'
,
'--window-size=1024,1024'
,
'--disable-extensions'
,
'--disable-popup-blocking'
,
// Indicates that the browser is in "browse without sign-in" (Guest session) mode.
'--bwsi'
,
'--no-first-run'
,
'--no-default-browser-check'
,
'--disable-default-apps'
,
'--disable-translate'
,
];
final
bool
isUncalibratedSmokeTest
=
io
.
Platform
.
environment
[
'UNCALIBRATED_SMOKE_TEST'
]
==
'true'
;
if
(
isUncalibratedSmokeTest
)
{
print
(
'Running in headless mode because running on uncalibrated hardware.'
);
args
.
add
(
'--headless'
);
// When running in headless mode Chrome exits immediately unless
// a debug port is specified.
args
.
add
(
'--remote-debugging-port=
${benchmarkServerPort + 1}
'
);
}
chromeProcess
=
await
startProcess
(
_findSystemChromeExecutable
(),
args
,
workingDirectory:
cwd
,
);
bool
receivedProfileData
=
false
;
chromeProcess
.
exitCode
.
then
((
int
exitCode
)
{
if
(!
receivedProfileData
)
{
profileData
.
completeError
(
Exception
(
'Chrome process existed prematurely with exit code
$exitCode
'
,
));
}
});
forwardStandardStreams
(
chromeProcess
);
print
(
'Waiting for the benchmark to report benchmark profile.'
);
final
String
backend
=
useCanvasKit
?
'canvaskit'
:
'html'
;
final
Map
<
String
,
dynamic
>
taskResult
=
<
String
,
dynamic
>{};
final
List
<
String
>
benchmarkScoreKeys
=
<
String
>[];
final
List
<
Map
<
String
,
dynamic
>>
profiles
=
await
profileData
.
future
;
print
(
'Received profile data'
);
receivedProfileData
=
true
;
for
(
final
Map
<
String
,
dynamic
>
profile
in
profiles
)
{
final
String
benchmarkName
=
profile
[
'name'
]
as
String
;
final
String
benchmarkScoreKey
=
'
$benchmarkName
.
$backend
.averageDrawFrameDuration'
;
taskResult
[
benchmarkScoreKey
]
=
profile
[
'averageDrawFrameDuration'
].
toDouble
();
// micros
taskResult
[
'
$benchmarkName
.
$backend
.drawFrameDurationNoise'
]
=
profile
[
'drawFrameDurationNoise'
].
toDouble
();
// micros
benchmarkScoreKeys
.
add
(
benchmarkScoreKey
);
}
return
TaskResult
.
success
(
taskResult
,
benchmarkScoreKeys:
benchmarkScoreKeys
);
}
finally
{
server
.
close
();
chromeProcess
?.
kill
();
}
});
}
String
_findSystemChromeExecutable
(
)
{
if
(
io
.
Platform
.
isLinux
)
{
final
io
.
ProcessResult
which
=
io
.
Process
.
runSync
(
'which'
,
<
String
>[
'google-chrome'
]);
if
(
which
.
exitCode
!=
0
)
{
throw
Exception
(
'Failed to locate system Chrome installation.'
);
}
return
(
which
.
stdout
as
String
).
trim
();
}
else
if
(
io
.
Platform
.
isMacOS
)
{
return
'/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'
;
}
else
{
throw
Exception
(
'Web benchmarks cannot run on
${io.Platform.operatingSystem}
yet.'
);
}
}
dev/devicelab/manifest.yaml
View file @
423459da
...
...
@@ -703,6 +703,22 @@ tasks:
stage
:
devicelab
required_agent_capabilities
:
[
"
linux/android"
]
web_benchmarks_html
:
description
:
>
Runs Web benchmarks on Chrome on a Linux machine using the HTML rendering backend.
stage
:
devicelab
required_agent_capabilities
:
[
"
linux-vm"
]
# TODO(yjbanov): This is a new test. Marking temporarily as flaky while debugging on devicelab.
flaky
:
true
web_benchmarks_canvaskit
:
description
:
>
Runs Web benchmarks on Chrome on a Linux machine using the CanvasKit rendering backend.
stage
:
devicelab
required_agent_capabilities
:
[
"
linux-vm"
]
# TODO(yjbanov): This is a new test. Marking temporarily as flaky while debugging on devicelab.
flaky
:
true
# run_without_leak_linux:
# description: >
# Checks that `flutter run` does not leak dart on Linux.
...
...
examples/flutter_gallery/pubspec.yaml
View file @
423459da
...
...
@@ -10,11 +10,11 @@ dependencies:
collection
:
1.14.11
device_info
:
0.4.1+4
intl
:
0.16.1
connectivity
:
0.4.6+
2
connectivity
:
0.4.6+
1
string_scanner
:
1.0.5
url_launcher
:
5.
4.1
url_launcher
:
5.
2.7
cupertino_icons
:
0.1.3
video_player
:
0.10.
5+3
video_player
:
0.10.
4+2
scoped_model
:
1.0.1
shrine_images
:
1.1.2
...
...
@@ -29,12 +29,9 @@ dependencies:
source_span
:
1.5.5
# THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
term_glyph
:
1.1.0
# THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
typed_data
:
1.1.6
# THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
url_launcher_macos
:
0.0.1+2
# THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
url_launcher_platform_interface
:
1.0.5
# THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
url_launcher_web
:
0.1.0+2
# THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
vector_math
:
2.0.8
# THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
video_player_platform_interface
:
1.0.4
# THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
video_player_web
:
0.1.2
# THIS LINE IS AUTOGENERATED - TO UPDATE USE "flutter update-packages --force-upgrade"
dev_dependencies
:
flutter_test
:
...
...
@@ -271,4 +268,4 @@ flutter:
-
asset
:
packages/flutter_gallery_assets/fonts/merriweather/Merriweather-Regular.ttf
-
asset
:
packages/flutter_gallery_assets/fonts/merriweather/Merriweather-Light.ttf
# PUBSPEC CHECKSUM:
27c
8
# PUBSPEC CHECKSUM:
ed6
8
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment