Unverified Commit f877c97b authored by Jonah Williams's avatar Jonah Williams Committed by GitHub

Use persisted build information to automatically clean old outputs in assemble (#39654)

parent 03e81003
......@@ -115,96 +115,19 @@ abstract class Target {
/// The action which performs this build step.
Future<void> build(Environment environment);
/// Collect hashes for all inputs to determine if any have changed.
///
/// Returns whether this target can be skipped.
Future<bool> computeChanges(
List<File> inputs,
Environment environment,
FileHashStore fileHashStore,
) async {
final File stamp = _findStampFile(environment);
final Set<String> previousInputs = <String>{};
final List<String> previousOutputs = <String>[];
bool canSkip = true;
// If the stamp file doesn't exist, we haven't run this step before and
// all inputs were added.
if (stamp.existsSync()) {
final String content = stamp.readAsStringSync();
// Something went wrong writing the stamp file.
if (content == null || content.isEmpty) {
stamp.deleteSync();
// Malformed stamp file, not safe to skip.
canSkip = false;
} else {
final Map<String, Object> values = json.decode(content);
final List<Object> inputs = values['inputs'];
final List<Object> outputs = values['outputs'];
inputs.cast<String>().forEach(previousInputs.add);
outputs.cast<String>().forEach(previousOutputs.add);
}
} else {
// No stamp file, not safe to skip.
canSkip = false;
}
// For each input, first determine if we've already computed the hash
// for it. Then collect it to be sent off for hashing as a group.
final List<File> sourcesToHash = <File>[];
final List<File> missingInputs = <File>[];
for (File file in inputs) {
if (!file.existsSync()) {
missingInputs.add(file);
continue;
}
final String absolutePath = file.resolveSymbolicLinksSync();
final String previousHash = fileHashStore.previousHashes[absolutePath];
if (fileHashStore.currentHashes.containsKey(absolutePath)) {
final String currentHash = fileHashStore.currentHashes[absolutePath];
if (currentHash != previousHash) {
canSkip = false;
}
} else {
sourcesToHash.add(file);
}
}
// For each output, first determine if we've already computed the hash
// for it. Then collect it to be sent off for hashing as a group.
for (String previousOutput in previousOutputs) {
final File file = fs.file(previousOutput);
if (!file.existsSync()) {
canSkip = false;
continue;
}
final String absolutePath = file.resolveSymbolicLinksSync();
final String previousHash = fileHashStore.previousHashes[absolutePath];
if (fileHashStore.currentHashes.containsKey(absolutePath)) {
final String currentHash = fileHashStore.currentHashes[absolutePath];
if (currentHash != previousHash) {
canSkip = false;
}
} else {
sourcesToHash.add(file);
}
}
// If we depend on a file that doesnt exist on disk, kill the build.
if (missingInputs.isNotEmpty) {
throw MissingInputException(missingInputs, name);
}
// If we have files to hash, compute them asynchronously and then
// update the result.
if (sourcesToHash.isNotEmpty) {
final List<File> dirty = await fileHashStore.hashFiles(sourcesToHash);
if (dirty.isNotEmpty) {
canSkip = false;
}
}
return canSkip;
/// Create a [Node] with resolved inputs and outputs.
Node _toNode(Environment environment) {
final List<File> inputs = resolveInputs(environment);
final List<File> outputs = resolveOutputs(environment);
return Node(
this,
inputs,
outputs,
<Node>[
for (Target target in dependencies) target._toNode(environment)
],
environment
);
}
/// Invoke to remove the stamp file if the [buildAction] threw an exception;
......@@ -253,12 +176,8 @@ abstract class Target {
/// which uses functions, behaviors, or patterns.
List<File> resolveOutputs(
Environment environment,
{ bool implicit = true, }
) {
final List<File> outputEntities = _resolveConfiguration(outputs, environment, implicit: implicit, inputs: false);
if (implicit) {
verifyOutputDirectories(outputEntities, environment, this);
}
final List<File> outputEntities = _resolveConfiguration(outputs, environment, inputs: false);
return outputEntities;
}
......@@ -279,9 +198,9 @@ abstract class Target {
'name': name,
'dependencies': dependencies.map((Target target) => target.name).toList(),
'inputs': resolveInputs(environment)
.map((File file) => file.resolveSymbolicLinksSync())
.map((File file) => file.path)
.toList(),
'outputs': resolveOutputs(environment, implicit: false)
'outputs': resolveOutputs(environment)
.map((File file) => file.path)
.toList(),
'stamp': _findStampFile(environment).absolute.path,
......@@ -484,10 +403,11 @@ class BuildSystem {
// Perform sanity checks on build.
checkCycles(target);
final Node node = target._toNode(environment);
final _BuildInstance buildInstance = _BuildInstance(environment, fileCache, buildSystemConfig);
bool passed = true;
try {
passed = await buildInstance.invokeTarget(target);
passed = await buildInstance.invokeTarget(node);
} finally {
// Always persist the file cache to disk.
fileCache.persist();
......@@ -543,24 +463,23 @@ class _BuildInstance {
// Exceptions caught during the build process.
final Map<String, ExceptionMeasurement> exceptionMeasurements = <String, ExceptionMeasurement>{};
Future<bool> invokeTarget(Target target) async {
final List<bool> results = await Future.wait(target.dependencies.map(invokeTarget));
Future<bool> invokeTarget(Node node) async {
final List<bool> results = await Future.wait(node.dependencies.map(invokeTarget));
if (results.any((bool result) => !result)) {
return false;
}
final AsyncMemoizer<bool> memoizer = pending[target.name] ??= AsyncMemoizer<bool>();
return memoizer.runOnce(() => _invokeInternal(target));
final AsyncMemoizer<bool> memoizer = pending[node.target.name] ??= AsyncMemoizer<bool>();
return memoizer.runOnce(() => _invokeInternal(node));
}
Future<bool> _invokeInternal(Target target) async {
Future<bool> _invokeInternal(Node node) async {
final PoolResource resource = await resourcePool.request();
final Stopwatch stopwatch = Stopwatch()..start();
bool passed = true;
bool skipped = false;
try {
final List<File> inputs = target.resolveInputs(environment);
final bool canSkip = await target.computeChanges(inputs, environment, fileCache);
for (File input in inputs) {
final bool canSkip = await node.computeChanges(environment, fileCache);
for (File input in node.inputs) {
// The build system should produce a list of aggregate input and output
// files for the overall build. The goal is to provide this to a hosting
// build system, such as Xcode, to configure logic for when to skip the
......@@ -578,35 +497,39 @@ class _BuildInstance {
}
if (canSkip) {
skipped = true;
printStatus('Skipping target: ${target.name}');
final List<File> outputs = target.resolveOutputs(environment, implicit: true);
for (File output in outputs) {
outputFiles[output.resolveSymbolicLinksSync()] = output;
printStatus('Skipping target: ${node.target.name}');
for (File output in node.outputs) {
outputFiles[output.path] = output;
}
} else {
printStatus('${target.name}: Starting');
await target.build(environment);
printStatus('${target.name}: Complete');
printStatus('${node.target.name}: Starting');
await node.target.build(environment);
printStatus('${node.target.name}: Complete');
final List<File> outputs = target.resolveOutputs(environment, implicit: true);
// Update hashes for output files.
await fileCache.hashFiles(outputs);
target._writeStamp(inputs, outputs, environment);
for (File output in outputs) {
outputFiles[output.resolveSymbolicLinksSync()] = output;
await fileCache.hashFiles(node.outputs);
node.target._writeStamp(node.inputs, node.outputs, environment);
for (File output in node.outputs) {
outputFiles[output.path] = output;
}
// Delete outputs from previous stages that are no longer a part of the build.
for (String previousOutput in node.previousOutputs) {
if (!outputFiles.containsKey(previousOutput)) {
fs.file(previousOutput).deleteSync();
}
}
}
} catch (exception, stackTrace) {
target.clearStamp(environment);
node.target.clearStamp(environment);
passed = false;
skipped = false;
exceptionMeasurements[target.name] = ExceptionMeasurement(
target.name, exception, stackTrace);
exceptionMeasurements[node.target.name] = ExceptionMeasurement(
node.target.name, exception, stackTrace);
} finally {
resource.release();
stopwatch.stop();
stepTimings[target.name] = PerformanceMeasurement(
target.name, stopwatch.elapsedMilliseconds, skipped, passed);
stepTimings[node.target.name] = PerformanceMeasurement(
node.target.name, stopwatch.elapsedMilliseconds, skipped, passed);
}
return passed;
}
......@@ -661,7 +584,7 @@ void verifyOutputDirectories(List<File> outputs, Environment environment, Target
missingOutputs.add(sourceFile);
continue;
}
final String path = sourceFile.resolveSymbolicLinksSync();
final String path = sourceFile.path;
if (!path.startsWith(buildDirectory) && !path.startsWith(projectDirectory)) {
throw MisplacedOutputException(path, target.name);
}
......@@ -670,3 +593,144 @@ void verifyOutputDirectories(List<File> outputs, Environment environment, Target
throw MissingOutputException(missingOutputs, target.name);
}
}
/// A node in the build graph.
class Node {
Node(this.target, this.inputs, this.outputs, this.dependencies,
Environment environment) {
final File stamp = target._findStampFile(environment);
// If the stamp file doesn't exist, we haven't run this step before and
// all inputs were added.
if (!stamp.existsSync()) {
// No stamp file, not safe to skip.
_dirty = true;
return;
}
final String content = stamp.readAsStringSync();
// Something went wrong writing the stamp file.
if (content == null || content.isEmpty) {
stamp.deleteSync();
// Malformed stamp file, not safe to skip.
_dirty = true;
return;
}
Map<String, Object> values;
try {
values = json.decode(content);
} on FormatException {
// The json is malformed in some way.
_dirty = true;
return;
}
final Object inputs = values['inputs'];
final Object outputs = values['outputs'];
if (inputs is List<Object> && outputs is List<Object>) {
inputs?.cast<String>()?.forEach(previousInputs.add);
outputs?.cast<String>()?.forEach(previousOutputs.add);
} else {
// The json is malformed in some way.
_dirty = true;
}
}
/// The resolved input files.
///
/// These files may not yet exist if they are produced by previous steps.
final List<File> inputs;
/// The resolved output files.
///
/// These files may not yet exist if the target hasn't run yet.
final List<File> outputs;
/// The target definition which contains the build action to invoke.
final Target target;
/// All of the nodes that this one depends on.
final List<Node> dependencies;
/// Output file paths from the previous invocation of this build node.
final Set<String> previousOutputs = <String>{};
/// Inout file paths from the previous invocation of this build node.
final Set<String> previousInputs = <String>{};
/// Whether this node needs an action performed.
bool get dirty => _dirty;
bool _dirty = false;
/// Collect hashes for all inputs to determine if any have changed.
///
/// Returns whether this target can be skipped.
Future<bool> computeChanges(
Environment environment,
FileHashStore fileHashStore,
) async {
final Set<String> currentOutputPaths = <String>{
for (File file in outputs) file.path
};
// For each input, first determine if we've already computed the hash
// for it. Then collect it to be sent off for hashing as a group.
final List<File> sourcesToHash = <File>[];
final List<File> missingInputs = <File>[];
for (File file in inputs) {
if (!file.existsSync()) {
missingInputs.add(file);
continue;
}
final String absolutePath = file.path;
final String previousHash = fileHashStore.previousHashes[absolutePath];
if (fileHashStore.currentHashes.containsKey(absolutePath)) {
final String currentHash = fileHashStore.currentHashes[absolutePath];
if (currentHash != previousHash) {
_dirty = true;
}
} else {
sourcesToHash.add(file);
}
}
// For each output, first determine if we've already computed the hash
// for it. Then collect it to be sent off for hashing as a group.
for (String previousOutput in previousOutputs) {
// output paths changed.
if (!currentOutputPaths.contains(previousOutput)) {
_dirty = true;
// if this isn't a current output file there is no reason to compute the hash.
continue;
}
final File file = fs.file(previousOutput);
if (!file.existsSync()) {
_dirty = true;
continue;
}
final String absolutePath = file.path;
final String previousHash = fileHashStore.previousHashes[absolutePath];
if (fileHashStore.currentHashes.containsKey(absolutePath)) {
final String currentHash = fileHashStore.currentHashes[absolutePath];
if (currentHash != previousHash) {
_dirty = true;
}
} else {
sourcesToHash.add(file);
}
}
// If we depend on a file that doesnt exist on disk, kill the build.
if (missingInputs.isNotEmpty) {
throw MissingInputException(missingInputs, target.name);
}
// If we have files to hash, compute them asynchronously and then
// update the result.
if (sourcesToHash.isNotEmpty) {
final List<File> dirty = await fileHashStore.hashFiles(sourcesToHash);
if (dirty.isNotEmpty) {
_dirty = true;
}
}
return !_dirty;
}
}
......@@ -29,7 +29,6 @@ class UnpackLinux extends Target {
Source.pattern('{PROJECT_DIR}/linux/flutter/flutter_plugin_registrar.h'),
Source.pattern('{PROJECT_DIR}/linux/flutter/flutter_glfw.h'),
Source.pattern('{PROJECT_DIR}/linux/flutter/icudtl.dat'),
Source.pattern('{PROJECT_DIR}/linux/flutter/cpp_client_wrapper_glfw/*'),
];
@override
......
......@@ -122,9 +122,6 @@ abstract class UnpackMacOS extends Target {
final Directory targetDirectory = environment
.outputDir
.childDirectory('FlutterMacOS.framework');
if (targetDirectory.existsSync()) {
targetDirectory.deleteSync(recursive: true);
}
final ProcessResult result = await processManager
.run(<String>['cp', '-R', basePath, targetDirectory.path]);
......@@ -325,11 +322,6 @@ abstract class MacOSBundleFlutterAssets extends Target {
final Directory assetDirectory = outputDirectory
.childDirectory('Resources')
.childDirectory('flutter_assets');
// We're not smart enough to only remove assets that are removed. If
// anything changes blow away the whole directory.
if (assetDirectory.existsSync()) {
assetDirectory.deleteSync(recursive: true);
}
assetDirectory.createSync(recursive: true);
final AssetBundle assetBundle = AssetBundleFactory.instance.createBundle();
final int result = await assetBundle.build(
......
......@@ -32,7 +32,6 @@ class UnpackWindows extends Target {
Source.pattern('{PROJECT_DIR}/windows/flutter/flutter_plugin_registrar.h'),
Source.pattern('{PROJECT_DIR}/windows/flutter/flutter_windows.h'),
Source.pattern('{PROJECT_DIR}/windows/flutter/icudtl.dat'),
Source.pattern('{PROJECT_DIR}/windows/flutter/cpp_client_wrapper/*'),
];
@override
......
......@@ -126,7 +126,7 @@ void main() {
final BuildResult result = await buildSystem.build(badTarget, environment);
expect(result.hasException, true);
expect(result.exceptions.values.single.exception, isInstanceOf<MissingOutputException>());
expect(result.exceptions.values.single.exception, isInstanceOf<FileSystemException>());
}));
test('Saves a stamp file with inputs and outputs', () => testbed.run(() async {
......@@ -211,6 +211,52 @@ void main() {
expect(shared, 1);
}));
test('Automatically cleans old outputs when dag changes', () => testbed.run(() async {
final TestTarget testTarget = TestTarget((Environment envionment) async {
environment.buildDir.childFile('foo.out').createSync();
})
..inputs = const <Source>[Source.pattern('{PROJECT_DIR}/foo.dart')]
..outputs = const <Source>[Source.pattern('{BUILD_DIR}/foo.out')];
fs.file('foo.dart').createSync();
await buildSystem.build(testTarget, environment);
expect(environment.buildDir.childFile('foo.out').existsSync(), true);
final TestTarget testTarget2 = TestTarget((Environment envionment) async {
environment.buildDir.childFile('bar.out').createSync();
})
..inputs = const <Source>[Source.pattern('{PROJECT_DIR}/foo.dart')]
..outputs = const <Source>[Source.pattern('{BUILD_DIR}/bar.out')];
await buildSystem.build(testTarget2, environment);
expect(environment.buildDir.childFile('bar.out').existsSync(), true);
expect(environment.buildDir.childFile('foo.out').existsSync(), false);
}));
test('reruns build if stamp is corrupted', () => testbed.run(() async {
final TestTarget testTarget = TestTarget((Environment envionment) async {
environment.buildDir.childFile('foo.out').createSync();
})
..inputs = const <Source>[Source.pattern('{PROJECT_DIR}/foo.dart')]
..outputs = const <Source>[Source.pattern('{BUILD_DIR}/foo.out')];
fs.file('foo.dart').createSync();
await buildSystem.build(testTarget, environment);
// invalid JSON
environment.buildDir.childFile('test.stamp').writeAsStringSync('{X');
await buildSystem.build(testTarget, environment);
// empty file
environment.buildDir.childFile('test.stamp').writeAsStringSync('');
await buildSystem.build(testTarget, environment);
// invalid format
environment.buildDir.childFile('test.stamp').writeAsStringSync('{"inputs": 2, "outputs": 3}');
await buildSystem.build(testTarget, environment);
}));
test('handles a throwing build action', () => testbed.run(() async {
final BuildResult result = await buildSystem.build(fizzTarget, environment);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment