Commit fde985b3 authored by Yegor's avatar Yegor Committed by GitHub

resurrect analyzer benchmarks (#10668)

* resurrect analyzer benchmarks

* move analyzer_benchmark to the more stable linux/android

* report average rather than best result
parent 8bf17192
......@@ -6,15 +6,7 @@ import 'dart:async';
import 'package:flutter_devicelab/tasks/analysis.dart';
import 'package:flutter_devicelab/framework/framework.dart';
import 'package:flutter_devicelab/framework/utils.dart';
Future<Null> main() async {
final String revision = await getCurrentFlutterRepoCommit();
final DateTime revisionTimestamp = await getFlutterRepoCommitTimestamp(revision);
final String dartSdkVersion = await getDartVersion();
await task(createAnalyzerCliTest(
sdk: dartSdkVersion,
commit: revision,
timestamp: revisionTimestamp,
));
await task(analyzerBenchmarkTask);
}
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:flutter_devicelab/tasks/analysis.dart';
import 'package:flutter_devicelab/framework/framework.dart';
import 'package:flutter_devicelab/framework/utils.dart';
Future<Null> main() async {
final String revision = await getCurrentFlutterRepoCommit();
final DateTime revisionTimestamp = await getFlutterRepoCommitTimestamp(revision);
final String dartSdkVersion = await getDartVersion();
await task(createAnalyzerServerTest(
sdk: dartSdkVersion,
commit: revision,
timestamp: revisionTimestamp,
));
}
// Copyright (c) 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'framework.dart';
/// A benchmark harness used to run a benchmark multiple times and report the
/// best result.
abstract class Benchmark {
Benchmark(this.name);
final String name;
TaskResult bestResult;
Future<Null> init() => new Future<Null>.value();
Future<num> run();
TaskResult get lastResult;
@override
String toString() => name;
}
/// Runs a [benchmark] [iterations] times and reports the best result.
///
/// Use [warmUpBenchmark] to discard cold performance results.
Future<num> runBenchmark(Benchmark benchmark, {
int iterations: 1,
bool warmUpBenchmark: false
}) async {
await benchmark.init();
final List<num> allRuns = <num>[];
num minValue;
if (warmUpBenchmark)
await benchmark.run();
while (iterations > 0) {
iterations--;
print('');
try {
final num result = await benchmark.run();
allRuns.add(result);
if (minValue == null || result < minValue) {
benchmark.bestResult = benchmark.lastResult;
minValue = result;
}
} catch (error) {
print('benchmark failed with error: $error');
}
}
return minValue;
}
......@@ -380,35 +380,6 @@ void checkNotNull(Object o1,
throw 'o10 is null';
}
/// Add benchmark values to a JSON results file.
///
/// If the file contains information about how long the benchmark took to run
/// (a `time` field), then return that info.
// TODO(yjbanov): move this data to __metadata__
num addBuildInfo(File jsonFile,
{num expected, String sdk, String commit, DateTime timestamp}) {
Map<String, dynamic> json;
if (jsonFile.existsSync())
json = JSON.decode(jsonFile.readAsStringSync());
else
json = <String, dynamic>{};
if (expected != null)
json['expected'] = expected;
if (sdk != null)
json['sdk'] = sdk;
if (commit != null)
json['commit'] = commit;
if (timestamp != null)
json['timestamp'] = timestamp.millisecondsSinceEpoch;
jsonFile.writeAsStringSync(jsonEncode(json));
// Return the elapsed time of the benchmark (if any).
return json['time'];
}
/// Splits [from] into lines and selects those that contain [pattern].
Iterable<String> grep(Pattern pattern, {@required String from}) {
return from.split('\n').where((String line) {
......
......@@ -5,112 +5,98 @@
import 'dart:async';
import 'dart:io';
import 'package:meta/meta.dart';
import 'package:path/path.dart' as path;
import '../framework/benchmarks.dart';
import '../framework/framework.dart';
import '../framework/utils.dart';
TaskFunction createAnalyzerCliTest({
@required String sdk,
@required String commit,
@required DateTime timestamp,
}) {
return new AnalyzerCliTask(sdk, commit, timestamp);
}
/// Run each benchmark this many times and compute average.
const int _kRunsPerBenchmark = 3;
TaskFunction createAnalyzerServerTest({
@required String sdk,
@required String commit,
@required DateTime timestamp,
}) {
return new AnalyzerServerTask(sdk, commit, timestamp);
}
/// Runs a benchmark once and reports the result as a lower-is-better numeric
/// value.
typedef Future<double> _Benchmark();
abstract class AnalyzerTask {
Benchmark benchmark;
/// Path to the generated "mega gallery" app.
Directory get _megaGalleryDirectory => dir(path.join(Directory.systemTemp.path, 'mega_gallery'));
Future<TaskResult> call() async {
section(benchmark.name);
await runBenchmark(benchmark, iterations: 3, warmUpBenchmark: true);
return benchmark.bestResult;
}
}
Future<TaskResult> analyzerBenchmarkTask() async {
await inDirectory(flutterDirectory, () async {
rmTree(_megaGalleryDirectory);
mkdirs(_megaGalleryDirectory);
await dart(<String>['dev/tools/mega_gallery.dart', '--out=${_megaGalleryDirectory.path}']);
});
class AnalyzerCliTask extends AnalyzerTask {
AnalyzerCliTask(String sdk, String commit, DateTime timestamp) {
benchmark = new FlutterAnalyzeBenchmark(sdk, commit, timestamp);
}
}
final Map<String, dynamic> data = <String, dynamic>{
'flutter_repo_batch': await _run(new _FlutterRepoBenchmark()),
'flutter_repo_watch': await _run(new _FlutterRepoBenchmark(watch: true)),
'mega_gallery_batch': await _run(new _MegaGalleryBenchmark()),
'mega_gallery_watch': await _run(new _MegaGalleryBenchmark(watch: true)),
};
class AnalyzerServerTask extends AnalyzerTask {
AnalyzerServerTask(String sdk, String commit, DateTime timestamp) {
benchmark = new FlutterAnalyzeAppBenchmark(sdk, commit, timestamp);
}
return new TaskResult.success(data, benchmarkScoreKeys: data.keys.toList());
}
class FlutterAnalyzeBenchmark extends Benchmark {
FlutterAnalyzeBenchmark(this.sdk, this.commit, this.timestamp)
: super('flutter analyze --flutter-repo');
final String sdk;
final String commit;
final DateTime timestamp;
/// Times how long it takes to analyze the Flutter repository.
class _FlutterRepoBenchmark {
_FlutterRepoBenchmark({ this.watch = false });
File get benchmarkFile =>
file(path.join(flutterDirectory.path, 'analysis_benchmark.json'));
final bool watch;
@override
TaskResult get lastResult => new TaskResult.successFromFile(benchmarkFile);
@override
Future<num> run() async {
rm(benchmarkFile);
Future<double> call() async {
section('Analyze Flutter repo ${watch ? 'with watcher' : ''}');
final Stopwatch stopwatch = new Stopwatch();
await inDirectory(flutterDirectory, () async {
await flutter('analyze', options: <String>[
final List<String> options = <String>[
'--flutter-repo',
'--benchmark',
]);
];
if (watch)
options.add('--watch');
stopwatch.start();
await flutter('analyze', options: options);
stopwatch.stop();
});
return addBuildInfo(benchmarkFile,
timestamp: timestamp, expected: 25.0, sdk: sdk, commit: commit);
return stopwatch.elapsedMilliseconds / 1000;
}
}
class FlutterAnalyzeAppBenchmark extends Benchmark {
FlutterAnalyzeAppBenchmark(this.sdk, this.commit, this.timestamp)
: super('analysis server mega_gallery');
/// Times how long it takes to analyze the generated "mega_gallery" app.
class _MegaGalleryBenchmark {
_MegaGalleryBenchmark({ this.watch = false });
final String sdk;
final String commit;
final DateTime timestamp;
final bool watch;
@override
TaskResult get lastResult => new TaskResult.successFromFile(benchmarkFile);
Future<double> call() async {
section('Analyze mega gallery ${watch ? 'with watcher' : ''}');
final Stopwatch stopwatch = new Stopwatch();
await inDirectory(_megaGalleryDirectory, () async {
final List<String> options = <String>[
'--benchmark',
];
Directory get megaDir => dir(
path.join(flutterDirectory.path, 'dev/benchmarks/mega_gallery'));
File get benchmarkFile =>
file(path.join(megaDir.path, 'analysis_benchmark.json'));
if (watch)
options.add('--watch');
@override
Future<Null> init() {
return inDirectory(flutterDirectory, () async {
await dart(<String>['dev/tools/mega_gallery.dart']);
stopwatch.start();
await flutter('analyze', options: options);
stopwatch.stop();
});
return stopwatch.elapsedMilliseconds / 1000;
}
}
@override
Future<num> run() async {
rm(benchmarkFile);
await inDirectory(megaDir, () async {
await flutter('analyze', options: <String>[
'--watch',
'--benchmark',
]);
});
return addBuildInfo(benchmarkFile,
timestamp: timestamp, expected: 10.0, sdk: sdk, commit: commit);
/// Runs a [benchmark] several times and reports the average result.
Future<double> _run(_Benchmark benchmark) async {
double total = 0.0;
for (int i = 0; i < _kRunsPerBenchmark; i++) {
// Delete cached analysis results.
rmTree(dir('${Platform.environment['HOME']}/.dartServer'));
total += await benchmark();
}
final double average = total / _kRunsPerBenchmark;
return average;
}
......@@ -50,18 +50,6 @@ tasks:
stage: devicelab
required_agent_capabilities: ["has-android-device"]
analyzer_cli__analysis_time:
description: >
Measures the speed of analyzing Flutter itself in batch mode.
stage: devicelab
required_agent_capabilities: ["has-android-device"]
analyzer_server__analysis_time:
description: >
Measures the speed of analyzing Flutter itself in server mode.
stage: devicelab
required_agent_capabilities: ["has-android-device"]
# Android on-device tests
complex_layout_scroll_perf__timeline_summary:
......@@ -297,3 +285,9 @@ tasks:
Measures memory usage after Android app suspend and resume.
stage: devicelab
required_agent_capabilities: ["linux/android"]
analyzer_benchmark:
description: >
Measures the speed of Dart analyzer.
stage: devicelab
required_agent_capabilities: ["linux/android"]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment