Commit be99a04d authored by Michael Goderbauer's avatar Michael Goderbauer Committed by GitHub

Make bot tests portable to also run on Windows (#7954)

parent d89f4386
......@@ -11,9 +11,10 @@ install:
- gem install coveralls-lcov
- npm install -g firebase-tools@">=3.0.4 <3.1.0"
before_script:
- ./dev/bots/setup.sh
- ./dev/bots/travis_setup.sh
script:
- ./dev/bots/test.sh
- ./bin/cache/dart-sdk/bin/dart ./dev/bots/test.dart
- ./dev/bots/travis_upload.sh
after_success:
- (cd packages/flutter && coveralls-lcov coverage/lcov.info)
cache:
......
name: tests_on_bots
description: Script to run all tests on bots.
dependencies:
path: ^1.4.0
#!/bin/bash
set -e
echo $KEY_FILE | base64 --decode > ../gcloud_key_file.json
# TODO(goderbauer): delete this when all callsites are updated to travis_setup.sh.
set -x
if [ -n "$TRAVIS" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
export CLOUDSDK_CORE_DISABLE_PROMPTS=1
curl https://sdk.cloud.google.com | bash
fi
# disable analytics on the bots and download Flutter dependencies
./bin/flutter config --no-analytics
# run pub get in all the repo packages
./bin/flutter update-packages
./dev/bots/travis_setup.sh
import 'dart:io';
import 'dart:async';
import 'package:path/path.dart' as p;
String flutterRoot = p.dirname(p.dirname(p.dirname(p.fromUri(Platform.script))));
String flutter = p.join(flutterRoot, 'bin', Platform.isWindows ? 'flutter.bat' : 'flutter');
String dart = p.join(flutterRoot, 'bin', 'cache', 'dart-sdk', 'bin', Platform.isWindows ? 'dart.exe' : 'dart');
String flutterTestArgs = Platform.environment['FLUTTER_TEST_ARGS'];
/// When you call this, you can set FLUTTER_TEST_ARGS to pass custom
/// arguments to flutter test. For example, you might want to call this
/// script using FLUTTER_TEST_ARGS=--local-engine=host_debug_unopt to
/// use your own build of the engine.
Future<Null> main() async {
// Analyze all the Dart code in the repo.
await _runFlutterAnalyze(flutterRoot,
options: <String>['--flutter-repo'],
);
// Verify that the tests actually return failure on failure and success on success.
String automatedTests = p.join(flutterRoot, 'dev', 'automated_tests');
await _runFlutterTest(automatedTests,
script: p.join('test_smoke_test', 'fail_test.dart'),
expectFailure: true,
printOutput: false,
);
await _runFlutterTest(automatedTests,
script: p.join('test_smoke_test', 'pass_test.dart'),
printOutput: false,
);
await _runFlutterTest(automatedTests,
script: p.join('test_smoke_test', 'crash1_test.dart'),
expectFailure: true,
printOutput: false,
);
await _runFlutterTest(automatedTests,
script: p.join('test_smoke_test', 'crash2_test.dart'),
expectFailure: true,
printOutput: false
);
await _runFlutterTest(automatedTests,
script: p.join('test_smoke_test', 'syntax_error_test.broken_dart'),
expectFailure: true,
printOutput: false,
);
await _runFlutterTest(automatedTests,
script: p.join('test_smoke_test', 'missing_import_test.broken_dart'),
expectFailure: true,
printOutput: false,
);
await _runCmd(flutter, <String>['drive', '--use-existing-app', '-t', p.join('test_driver', 'failure.dart')],
workingDirectory: p.join(flutterRoot, 'packages', 'flutter_driver'),
expectFailure: true,
printOutput: false,
);
List<String> coverageFlags = <String>[];
if (Platform.environment['TRAVIS'] != null && Platform.environment['TRAVIS_PULL_REQUEST'] == 'false')
coverageFlags.add('--coverage');
// Run tests.
await _runFlutterTest(p.join(flutterRoot, 'packages', 'flutter'),
options: coverageFlags,
);
await _runAllDartTests(p.join(flutterRoot, 'packages', 'flutter_driver'));
await _runFlutterTest(p.join(flutterRoot, 'packages', 'flutter_test'));
await _runAllDartTests(p.join(flutterRoot, 'packages', 'flutter_tools'),
environment: <String, String>{ 'FLUTTER_ROOT': flutterRoot },
);
await _runAllDartTests(p.join(flutterRoot, 'dev', 'devicelab'));
await _runFlutterTest(p.join(flutterRoot, 'dev', 'manual_tests'));
await _runFlutterTest(p.join(flutterRoot, 'examples', 'hello_world'));
await _runFlutterTest(p.join(flutterRoot, 'examples', 'layers'));
await _runFlutterTest(p.join(flutterRoot, 'examples', 'stocks'));
await _runFlutterTest(p.join(flutterRoot, 'examples', 'flutter_gallery'));
await _runCmd(dart, <String>[p.join(flutterRoot, 'dev', 'tools', 'mega_gallery.dart')],
workingDirectory: flutterRoot,
);
await _runFlutterAnalyze(p.join(flutterRoot, 'dev', 'benchmarks', 'mega_gallery'),
options: <String>['--watch', '--benchmark'],
);
print('\x1B[32mDONE: All tests successful.\x1B[0m');
}
Future<Null> _runCmd(String executable, List<String> arguments, {
String workingDirectory,
Map<String, String> environment,
bool expectFailure: false,
bool printOutput: true,
}) async {
String cmd = '${p.relative(executable)} ${arguments.join(' ')}';
print('>>> RUNNING in \x1B[34m${p.relative(workingDirectory)}\x1B[0m: \x1B[33m$cmd\x1B[0m');
Process process = await Process.start(executable, arguments,
workingDirectory: workingDirectory,
environment: environment,
);
if (printOutput) {
stdout.addStream(process.stdout);
stderr.addStream(process.stderr);
}
int exitCode = await process.exitCode;
if ((exitCode == 0) == expectFailure) {
print(
'\x1B[31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\x1B[0m\n'
'\x1B[1mERROR:\x1B[31m Last command exited with $exitCode (expected: ${expectFailure ? 'non-zero' : 'zero'}).\x1B[0m\n'
'\x1B[31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\x1B[0m'
);
exit(1);
}
}
Future<Null> _runFlutterTest(String workingDirectory, {
String script,
bool expectFailure: false,
bool printOutput: true,
List<String> options: const <String>[],
}) {
List<String> args = <String>['test']..addAll(options);
if (flutterTestArgs != null)
args.add(flutterTestArgs);
if (script != null)
args.add(script);
return _runCmd(flutter, args,
workingDirectory: workingDirectory,
expectFailure: expectFailure,
printOutput: printOutput,
);
}
Future<Null> _runAllDartTests(String workingDirectory, {
Map<String, String> environment,
}) {
List<String> args = <String>['--checked', p.join('test', 'all.dart')];
return _runCmd(dart, args,
workingDirectory: workingDirectory,
environment: environment,
);
}
Future<Null> _runFlutterAnalyze(String workingDirectory, {
List<String> options: const <String>[]
}) {
return _runCmd(flutter, <String>['analyze']..addAll(options),
workingDirectory: workingDirectory,
);
}
#!/bin/bash
# When you call this, you can set FLUTTER_TEST_ARGS to pass custom
# arguments to flutter test. For example, you might want to call this
# script using FLUTTER_TEST_ARGS=--local-engine=host_debug_unopt to
# use your own build of the engine.
#
# On Travis, this script additionally collects coverage and uploads
# the coverage and the generated documentation to the cloud.
# TODO(goderbauer): delete this when all callsites are updated to test.dart.
export PATH="$PWD/bin:$PWD/bin/cache/dart-sdk/bin:$PATH"
trap detect_error_on_exit EXIT HUP INT QUIT TERM
detect_error_on_exit() {
exit_code=$?
{ set +x; } 2>/dev/null
if [[ $exit_code -ne 0 ]]; then
echo -e "\x1B[31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\x1B[0m"
echo -e "\x1B[1mError:\x1B[31m script exited early due to error ($exit_code)\x1B[0m"
echo -e "\x1B[31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\x1B[0m"
fi
}
set -ex
# analyze all the Dart code in the repo
flutter analyze --flutter-repo
# verify that the tests actually return failure on failure and success on success
(cd dev/automated_tests; ! flutter test $FLUTTER_TEST_ARGS test_smoke_test/fail_test.dart > /dev/null)
(cd dev/automated_tests; flutter test $FLUTTER_TEST_ARGS test_smoke_test/pass_test.dart > /dev/null)
(cd dev/automated_tests; ! flutter test $FLUTTER_TEST_ARGS test_smoke_test/crash1_test.dart > /dev/null)
(cd dev/automated_tests; ! flutter test $FLUTTER_TEST_ARGS test_smoke_test/crash2_test.dart > /dev/null)
(cd dev/automated_tests; ! flutter test $FLUTTER_TEST_ARGS test_smoke_test/syntax_error_test.broken_dart > /dev/null)
(cd dev/automated_tests; ! flutter test $FLUTTER_TEST_ARGS test_smoke_test/missing_import_test.broken_dart > /dev/null)
(cd packages/flutter_driver; ! flutter drive --use-existing-app -t test_driver/failure.dart >/dev/null 2>&1)
COVERAGE_FLAG=
if [ -n "$TRAVIS" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
COVERAGE_FLAG=--coverage
fi
SRC_ROOT=$PWD
# run tests
(cd packages/flutter; flutter test $FLUTTER_TEST_ARGS $COVERAGE_FLAG)
(cd packages/flutter_driver; dart -c test/all.dart)
(cd packages/flutter_test; flutter test)
(cd packages/flutter_tools; FLUTTER_ROOT=$SRC_ROOT dart -c test/all.dart)
(cd dev/devicelab; dart -c test/all.dart)
(cd dev/manual_tests; flutter test)
(cd examples/hello_world; flutter test)
(cd examples/layers; flutter test)
(cd examples/stocks; flutter test)
(cd examples/flutter_gallery; flutter test)
# generate and analyze our large sample app
dart dev/tools/mega_gallery.dart
(cd dev/benchmarks/mega_gallery; flutter analyze --watch --benchmark)
if [ -n "$COVERAGE_FLAG" ]; then
GSUTIL=$HOME/google-cloud-sdk/bin/gsutil
GCLOUD=$HOME/google-cloud-sdk/bin/gcloud
$GCLOUD auth activate-service-account --key-file ../gcloud_key_file.json
STORAGE_URL=gs://flutter_infra/flutter/coverage/lcov.info
$GSUTIL cp packages/flutter/coverage/lcov.info $STORAGE_URL
fi
# generate the API docs, upload them
dev/bots/docs.sh
./bin/cache/dart-sdk/bin/dart ./dev/bots/test.dart
#!/bin/bash
set -e
echo $KEY_FILE | base64 --decode > ../gcloud_key_file.json
set -x
if [ -n "$TRAVIS" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
export CLOUDSDK_CORE_DISABLE_PROMPTS=1
curl https://sdk.cloud.google.com | bash
fi
# disable analytics on the bots and download Flutter dependencies
./bin/flutter config --no-analytics
# run pub get in all the repo packages
./bin/flutter update-packages
#!/bin/bash
set -ex
export PATH="$PWD/bin:$PWD/bin/cache/dart-sdk/bin:$PATH"
LCOV_FILE=./packages/flutter/coverage/lcov.info
if [ -n "$TRAVIS" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ -a "$LCOV_FILE" ]; then
GSUTIL=$HOME/google-cloud-sdk/bin/gsutil
GCLOUD=$HOME/google-cloud-sdk/bin/gcloud
$GCLOUD auth activate-service-account --key-file ../gcloud_key_file.json
STORAGE_URL=gs://flutter_infra/flutter/coverage/lcov.info
$GSUTIL cp $LCOV_FILE $STORAGE_URL
fi
# generate the API docs, upload them
./dev/bots/docs.sh
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment