Commit c2cccc7b authored by Yegor's avatar Yegor

[tools] teach flutter run to get startup trace info (#3589)

parent bb238eba
...@@ -6,15 +6,10 @@ import 'dart:async'; ...@@ -6,15 +6,10 @@ import 'dart:async';
import 'dart:convert'; import 'dart:convert';
import 'dart:io'; import 'dart:io';
import 'package:json_rpc_2/json_rpc_2.dart' as rpc;
import 'package:path/path.dart' as path;
import 'package:web_socket_channel/io.dart';
import '../android/android_sdk.dart'; import '../android/android_sdk.dart';
import '../application_package.dart'; import '../application_package.dart';
import '../base/os.dart'; import '../base/os.dart';
import '../base/process.dart'; import '../base/process.dart';
import '../base/utils.dart';
import '../build_configuration.dart'; import '../build_configuration.dart';
import '../device.dart'; import '../device.dart';
import '../flx.dart' as flx; import '../flx.dart' as flx;
...@@ -357,59 +352,6 @@ class AndroidDevice extends Device { ...@@ -357,59 +352,6 @@ class AndroidDevice extends Device {
return timeMatch?.group(0); return timeMatch?.group(0);
} }
Future<rpc.Client> _connectToObservatory(int observatoryPort) async {
Uri uri = new Uri(scheme: 'ws', host: '127.0.0.1', port: observatoryPort, path: 'ws');
WebSocket ws = await WebSocket.connect(uri.toString());
rpc.Client client = new rpc.Client(new IOWebSocketChannel(ws));
client.listen();
return client;
}
Future<Null> startTracing(AndroidApk apk, int observatoryPort) async {
rpc.Client client;
try {
client = await _connectToObservatory(observatoryPort);
} catch (e) {
printError('Error connecting to observatory: $e');
return;
}
await client.sendRequest('_setVMTimelineFlags',
{'recordedStreams': ['Compiler', 'Dart', 'Embedder', 'GC']}
);
await client.sendRequest('_clearVMTimeline');
}
Future<String> stopTracing(AndroidApk apk, int observatoryPort, String outPath) async {
rpc.Client client;
try {
client = await _connectToObservatory(observatoryPort);
} catch (e) {
printError('Error connecting to observatory: $e');
return null;
}
await client.sendRequest('_setVMTimelineFlags', {'recordedStreams': '[]'});
File localFile;
if (outPath != null) {
localFile = new File(outPath);
} else {
localFile = getUniqueFile(Directory.current, 'trace', 'json');
}
Map<String, dynamic> response = await client.sendRequest('_getVMTimeline');
List<dynamic> traceEvents = response['traceEvents'];
IOSink sink = localFile.openWrite();
Stream<Object> streamIn = new Stream<Object>.fromIterable(<Object>[traceEvents]);
Stream<List<int>> streamOut = new JsonUtf8Encoder().bind(streamIn);
await sink.addStream(streamOut);
await sink.close();
return path.basename(localFile.path);
}
@override @override
bool isSupported() => true; bool isSupported() => true;
......
...@@ -5,3 +5,8 @@ ...@@ -5,3 +5,8 @@
const int defaultObservatoryPort = 8100; const int defaultObservatoryPort = 8100;
const int defaultDiagnosticPort = 8101; const int defaultDiagnosticPort = 8101;
const int defaultDrivePort = 8183; const int defaultDrivePort = 8183;
// Names of some of the Timeline events we care about
const String flutterEngineMainEnterEventName = 'FlutterEngineMainEnter';
const String frameworkInitEventName = 'Framework initialization';
const String firstUsefulFrameEventName = 'Widgets completed first useful frame';
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
// found in the LICENSE file. // found in the LICENSE file.
import 'dart:async'; import 'dart:async';
import 'dart:convert';
import 'dart:io'; import 'dart:io';
import 'package:path/path.dart' as path; import 'package:path/path.dart' as path;
...@@ -317,6 +318,9 @@ Future<int> startAppStayResident( ...@@ -317,6 +318,9 @@ Future<int> startAppStayResident(
return 2; return 2;
} }
if (traceStartup)
await _downloadStartupTrace(debuggingOptions.observatoryPort, device);
Completer<int> exitCompleter = new Completer<int>(); Completer<int> exitCompleter = new Completer<int>();
void complete(int exitCode) { void complete(int exitCode) {
...@@ -372,6 +376,59 @@ String findMainDartFile([String target]) { ...@@ -372,6 +376,59 @@ String findMainDartFile([String target]) {
return targetPath; return targetPath;
} }
Future<Null> _downloadStartupTrace(int observatoryPort, Device device) async {
Map<String, dynamic> timeline = await device.stopTracingAndDownloadTimeline(
observatoryPort,
waitForFirstFrame: true
);
int extractInstantEventTimestamp(String eventName) {
List<Map<String, dynamic>> events = timeline['traceEvents'];
Map<String, dynamic> event = events
.firstWhere((Map<String, dynamic> event) => event['name'] == eventName, orElse: () => null);
if (event == null)
return null;
return event['ts'];
}
int engineEnterTimestampMicros = extractInstantEventTimestamp(flutterEngineMainEnterEventName);
int frameworkInitTimestampMicros = extractInstantEventTimestamp(frameworkInitEventName);
int firstFrameTimestampMicros = extractInstantEventTimestamp(firstUsefulFrameEventName);
if (engineEnterTimestampMicros == null) {
printError('Engine start event is missing in the timeline. Cannot compute startup time.');
return null;
}
if (firstFrameTimestampMicros == null) {
printError('First frame event is missing in the timeline. Cannot compute startup time.');
return null;
}
File traceInfoFile = new File('build/start_up_info.json');
int timeToFirstFrameMicros = firstFrameTimestampMicros - engineEnterTimestampMicros;
Map<String, dynamic> traceInfo = <String, dynamic>{
'engineEnterTimestampMicros': engineEnterTimestampMicros,
'timeToFirstFrameMicros': timeToFirstFrameMicros,
};
if (frameworkInitTimestampMicros != null) {
traceInfo['timeToFrameworkInitMicros'] = frameworkInitTimestampMicros - engineEnterTimestampMicros;
}
await traceInfoFile.writeAsString(JSON.encode(traceInfo));
String timeToFirstFrameMessage;
if (timeToFirstFrameMicros > 1000000) {
timeToFirstFrameMessage = '${(timeToFirstFrameMicros / 1000000).toStringAsFixed(2)} seconds';
} else {
timeToFirstFrameMessage = '${timeToFirstFrameMicros ~/ 1000} milliseconds';
}
printStatus('Time to first frame $timeToFirstFrameMessage');
printStatus('Saved startup trace info in ${traceInfoFile.path}');
}
/// Delay until the Observatory / service protocol is available. /// Delay until the Observatory / service protocol is available.
/// ///
/// This does not fail if we're unable to connect, and times out after the given /// This does not fail if we're unable to connect, and times out after the given
......
...@@ -3,10 +3,12 @@ ...@@ -3,10 +3,12 @@
// found in the LICENSE file. // found in the LICENSE file.
import 'dart:async'; import 'dart:async';
import 'dart:io';
import 'dart:convert';
import '../android/android_device.dart';
import '../application_package.dart';
import '../base/common.dart'; import '../base/common.dart';
import '../base/utils.dart';
import '../device.dart';
import '../globals.dart'; import '../globals.dart';
import '../runner/flutter_command.dart'; import '../runner/flutter_command.dart';
...@@ -34,41 +36,43 @@ class TraceCommand extends FlutterCommand { ...@@ -34,41 +36,43 @@ class TraceCommand extends FlutterCommand {
'time (controlled by --duration), and stop tracing. To explicitly control tracing, call trace\n' 'time (controlled by --duration), and stop tracing. To explicitly control tracing, call trace\n'
'with --start and later with --stop.'; 'with --start and later with --stop.';
@override
bool get androidOnly => true;
@override @override
bool get requiresDevice => true; bool get requiresDevice => true;
@override @override
Future<int> runInProject() async { Future<int> runInProject() async {
ApplicationPackage androidApp = applicationPackages.android; Device device = deviceForCommand;
AndroidDevice device = deviceForCommand;
int observatoryPort = int.parse(argResults['debug-port']); int observatoryPort = int.parse(argResults['debug-port']);
if ((!argResults['start'] && !argResults['stop']) || if ((!argResults['start'] && !argResults['stop']) ||
(argResults['start'] && argResults['stop'])) { (argResults['start'] && argResults['stop'])) {
// Setting neither flags or both flags means do both commands and wait // Setting neither flags or both flags means do both commands and wait
// duration seconds in between. // duration seconds in between.
await device.startTracing(androidApp, observatoryPort); await device.startTracing(observatoryPort);
await new Future<Null>.delayed( await new Future<Null>.delayed(
new Duration(seconds: int.parse(argResults['duration'])), new Duration(seconds: int.parse(argResults['duration'])),
() => _stopTracing(device, androidApp, observatoryPort) () => _stopTracing(device, observatoryPort)
); );
} else if (argResults['stop']) { } else if (argResults['stop']) {
await _stopTracing(device, androidApp, observatoryPort); await _stopTracing(device, observatoryPort);
} else { } else {
await device.startTracing(androidApp, observatoryPort); await device.startTracing(observatoryPort);
} }
return 0; return 0;
} }
Future<Null> _stopTracing(AndroidDevice android, AndroidApk androidApp, int observatoryPort) async { Future<Null> _stopTracing(Device device, int observatoryPort) async {
String tracePath = await android.stopTracing(androidApp, observatoryPort, argResults['out']); Map<String, dynamic> timeline = await device.stopTracingAndDownloadTimeline(observatoryPort);
if (tracePath == null) {
printError('No trace file saved.'); String outPath = argResults['out'];
File localFile;
if (outPath != null) {
localFile = new File(outPath);
} else { } else {
printStatus('Trace file saved to $tracePath'); localFile = getUniqueFile(Directory.current, 'trace', 'json');
} }
await localFile.writeAsString(JSON.encode(timeline));
printStatus('Trace file saved to ${localFile.path}');
} }
} }
...@@ -6,6 +6,9 @@ import 'dart:async'; ...@@ -6,6 +6,9 @@ import 'dart:async';
import 'dart:io'; import 'dart:io';
import 'dart:math' as math; import 'dart:math' as math;
import 'package:json_rpc_2/json_rpc_2.dart' as rpc;
import 'package:web_socket_channel/io.dart';
import 'android/android_device.dart'; import 'android/android_device.dart';
import 'application_package.dart'; import 'application_package.dart';
import 'base/common.dart'; import 'base/common.dart';
...@@ -224,6 +227,82 @@ abstract class Device { ...@@ -224,6 +227,82 @@ abstract class Device {
'${getNameForTargetPlatform(device.platform)}$supportIndicator'); '${getNameForTargetPlatform(device.platform)}$supportIndicator');
} }
} }
Future<rpc.Peer> _connectToObservatory(int observatoryPort) async {
Uri uri = new Uri(scheme: 'ws', host: '127.0.0.1', port: observatoryPort, path: 'ws');
WebSocket ws = await WebSocket.connect(uri.toString());
rpc.Peer peer = new rpc.Peer(new IOWebSocketChannel(ws));
peer.listen();
return peer;
}
Future<Null> startTracing(int observatoryPort) async {
rpc.Client client;
try {
client = await _connectToObservatory(observatoryPort);
} catch (e) {
printError('Error connecting to observatory: $e');
return;
}
await client.sendRequest('_setVMTimelineFlags',
{'recordedStreams': ['Compiler', 'Dart', 'Embedder', 'GC']}
);
await client.sendRequest('_clearVMTimeline');
}
/// Stops tracing, optionally waiting
Future<Map<String, dynamic>> stopTracingAndDownloadTimeline(int observatoryPort, {bool waitForFirstFrame: false}) async {
rpc.Peer peer;
try {
peer = await _connectToObservatory(observatoryPort);
} catch (e) {
printError('Error connecting to observatory: $e');
return null;
}
Future<Map<String, dynamic>> fetchTimeline() async {
return await peer.sendRequest('_getVMTimeline');
}
Map<String, dynamic> timeline;
if (!waitForFirstFrame) {
// Stop tracing immediately and get the timeline
await peer.sendRequest('_setVMTimelineFlags', {'recordedStreams': '[]'});
timeline = await fetchTimeline();
} else {
Completer<Null> whenFirstFrameRendered = new Completer<Null>();
peer.registerMethod('streamNotify', (rpc.Parameters params) {
Map<String, dynamic> data = params.asMap;
if (data['streamId'] == 'Timeline') {
List<Map<String, dynamic>> events = data['event']['timelineEvents'];
for (Map<String, dynamic> event in events) {
if (event['name'] == firstUsefulFrameEventName) {
whenFirstFrameRendered.complete();
}
}
}
});
await peer.sendRequest('streamListen', {'streamId': 'Timeline'});
await whenFirstFrameRendered.future.timeout(
const Duration(seconds: 10),
onTimeout: () {
printError(
'Timed out waiting for the first frame event. Either the '
'application failed to start, or the event was missed because '
'"flutter run" took too long to subscribe to timeline events.'
);
return null;
}
);
timeline = await fetchTimeline();
await peer.sendRequest('_setVMTimelineFlags', {'recordedStreams': '[]'});
}
return timeline;
}
} }
class DebuggingOptions { class DebuggingOptions {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment