build_system.dart 27.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

import 'dart:async';

import 'package:async/async.dart';
import 'package:convert/convert.dart';
import 'package:crypto/crypto.dart';
import 'package:meta/meta.dart';
import 'package:pool/pool.dart';

13
import '../base/context.dart';
14 15 16 17 18 19 20 21 22 23 24
import '../base/file_system.dart';
import '../base/platform.dart';
import '../cache.dart';
import '../convert.dart';
import '../globals.dart';
import 'exceptions.dart';
import 'file_hash_store.dart';
import 'source.dart';

export 'source.dart';

25 26 27
/// The [BuildSystem] instance.
BuildSystem get buildSystem => context.get<BuildSystem>();

28 29 30 31 32 33 34
/// A reasonable amount of files to open at the same time.
///
/// This number is somewhat arbitrary - it is difficult to detect whether
/// or not we'll run out of file descriptiors when using async dart:io
/// APIs.
const int kMaxOpenFiles = 64;

35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94
/// Configuration for the build system itself.
class BuildSystemConfig {
  /// Create a new [BuildSystemConfig].
  const BuildSystemConfig({this.resourcePoolSize});

  /// The maximum number of concurrent tasks the build system will run.
  ///
  /// If not provided, defaults to [platform.numberOfProcessors].
  final int resourcePoolSize;
}

/// A Target describes a single step during a flutter build.
///
/// The target inputs are required to be files discoverable via a combination
/// of at least one of the environment values and zero or more local values.
///
/// To determine if the action for a target needs to be executed, the
/// [BuildSystem] performs a hash of the file contents for both inputs and
/// outputs. This is tracked separately in the [FileHashStore].
///
/// A Target has both implicit and explicit inputs and outputs. Only the
/// later are safe to evaluate before invoking the [buildAction]. For example,
/// a wildcard output pattern requires the outputs to exist before it can
/// glob files correctly.
///
/// - All listed inputs are considered explicit inputs.
/// - Outputs which are provided as [Source.pattern].
///   without wildcards are considered explicit.
/// - The remaining outputs are considered implicit.
///
/// For each target, executing its action creates a corresponding stamp file
/// which records both the input and output files. This file is read by
/// subsequent builds to determine which file hashes need to be checked. If the
/// stamp file is missing, the target's action is always rerun.
///
///  file: `example_target.stamp`
///
/// {
///   "inputs": [
///      "absolute/path/foo",
///      "absolute/path/bar",
///      ...
///    ],
///    "outputs": [
///      "absolute/path/fizz"
///    ]
/// }
///
/// ## Code review
///
/// ### Targes should only depend on files that are provided as inputs
///
/// Example: gen_snapshot must be provided as an input to the aot_elf
/// build steps, even though it isn't a source file. This ensures that changes
/// to the gen_snapshot binary (during a local engine build) correctly
/// trigger a corresponding build update.
///
/// Example: aot_elf has a dependency on the dill and packages file
/// produced by the kernel_snapshot step.
///
95
/// ### Targets should declare all outputs produced
96 97 98 99 100 101 102 103 104 105 106 107 108
///
/// If a target produces an output it should be listed, even if it is not
/// intended to be consumed by another target.
///
/// ## Unit testing
///
/// Most targets will invoke an external binary which makes unit testing
/// trickier. It is recommend that for unit testing that a Fake is used and
/// provided via the dependency injection system. a [Testbed] may be used to
/// set up the environment before the test is run. Unit tests should fully
/// exercise the rule, ensuring that the existing input and output verification
/// logic can run, as well as verifying it correctly handles provided defines
/// and meets any additional contracts present in the target.
109 110
abstract class Target {
  const Target();
111 112 113 114
  /// The user-readable name of the target.
  ///
  /// This information is surfaced in the assemble commands and used as an
  /// argument to build a particular target.
115
  String get name;
116 117

  /// The dependencies of this target.
118
  List<Target> get dependencies;
119 120

  /// The input [Source]s which are diffed to determine if a target should run.
121
  List<Source> get inputs;
122 123

  /// The output [Source]s which we attempt to verify are correctly produced.
124
  List<Source> get outputs;
125 126

  /// The action which performs this build step.
127
  Future<void> build(Environment environment);
128

129 130
  /// Create a [Node] with resolved inputs and outputs.
  Node _toNode(Environment environment) {
131 132
    final ResolvedFiles inputsFiles = resolveInputs(environment);
    final ResolvedFiles outputFiles = resolveOutputs(environment);
133 134
    return Node(
      this,
135 136
      inputsFiles.sources,
      outputFiles.sources,
137
      <Node>[
138
        for (Target target in dependencies) target._toNode(environment),
139
      ],
140
      environment,
141
      inputsFiles.containsNewDepfile,
142
    );
143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160
  }

  /// Invoke to remove the stamp file if the [buildAction] threw an exception;
  void clearStamp(Environment environment) {
    final File stamp = _findStampFile(environment);
    if (stamp.existsSync()) {
      stamp.deleteSync();
    }
  }

  void _writeStamp(
    List<File> inputs,
    List<File> outputs,
    Environment environment,
  ) {
    final File stamp = _findStampFile(environment);
    final List<String> inputPaths = <String>[];
    for (File input in inputs) {
161
      inputPaths.add(input.path);
162 163 164
    }
    final List<String> outputPaths = <String>[];
    for (File output in outputs) {
165
      outputPaths.add(output.path);
166 167 168 169 170 171 172 173 174 175 176 177 178
    }
    final Map<String, Object> result = <String, Object>{
      'inputs': inputPaths,
      'outputs': outputPaths,
    };
    if (!stamp.existsSync()) {
      stamp.createSync();
    }
    stamp.writeAsStringSync(json.encode(result));
  }

  /// Resolve the set of input patterns and functions into a concrete list of
  /// files.
179
  ResolvedFiles resolveInputs(Environment environment) {
180 181 182 183 184 185 186
    return _resolveConfiguration(inputs, environment, implicit: true, inputs: true);
  }

  /// Find the current set of declared outputs, including wildcard directories.
  ///
  /// The [implicit] flag controls whether it is safe to evaluate [Source]s
  /// which uses functions, behaviors, or patterns.
187 188
  ResolvedFiles resolveOutputs(Environment environment) {
    return _resolveConfiguration(outputs, environment, inputs: false);
189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205
  }

  /// Performs a fold across this target and its dependencies.
  T fold<T>(T initialValue, T combine(T previousValue, Target target)) {
    final T dependencyResult = dependencies.fold(
        initialValue, (T prev, Target t) => t.fold(prev, combine));
    return combine(dependencyResult, this);
  }

  /// Convert the target to a JSON structure appropriate for consumption by
  /// external systems.
  ///
  /// This requires constants from the [Environment] to resolve the paths of
  /// inputs and the output stamp.
  Map<String, Object> toJson(Environment environment) {
    return <String, Object>{
      'name': name,
206 207 208 209 210 211 212 213 214
      'dependencies': <String>[
        for (Target target in dependencies) target.name
      ],
      'inputs': <String>[
        for (File file in resolveInputs(environment).sources) file.path,
      ],
      'outputs': <String>[
        for (File file in resolveOutputs(environment).sources) file.path,
      ],
215 216 217 218 219 220 221 222 223 224
      'stamp': _findStampFile(environment).absolute.path,
    };
  }

  /// Locate the stamp file for a particular target name and environment.
  File _findStampFile(Environment environment) {
    final String fileName = '$name.stamp';
    return environment.buildDir.childFile(fileName);
  }

225 226 227
  static ResolvedFiles _resolveConfiguration(List<Source> config, Environment environment, {
    bool implicit = true, bool inputs = true,
  }) {
228 229 230 231
    final SourceVisitor collector = SourceVisitor(environment, inputs);
    for (Source source in config) {
      source.accept(collector);
    }
232
    return collector;
233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280
  }
}

/// The [Environment] defines several constants for use during the build.
///
/// The environment contains configuration and file paths that are safe to
/// depend on and reference during the build.
///
/// Example (Good):
///
/// Use the environment to determine where to write an output file.
///
///    environment.buildDir.childFile('output')
///      ..createSync()
///      ..writeAsStringSync('output data');
///
/// Example (Bad):
///
/// Use a hard-coded path or directory relative to the current working
/// directory to write an output file.
///
///   fs.file('build/linux/out')
///     ..createSync()
///     ..writeAsStringSync('output data');
///
/// Example (Good):
///
/// Using the build mode to produce different output. Note that the action
/// is still responsible for outputting a different file, as defined by the
/// corresponding output [Source].
///
///    final BuildMode buildMode = getBuildModeFromDefines(environment.defines);
///    if (buildMode == BuildMode.debug) {
///      environment.buildDir.childFile('debug.output')
///        ..createSync()
///        ..writeAsStringSync('debug');
///    } else {
///      environment.buildDir.childFile('non_debug.output')
///        ..createSync()
///        ..writeAsStringSync('non_debug');
///    }
class Environment {
  /// Create a new [Environment] object.
  ///
  /// Only [projectDir] is required. The remaining environment locations have
  /// defaults based on it.
  factory Environment({
    @required Directory projectDir,
281
    @required Directory outputDir,
282 283 284 285 286 287 288 289 290 291 292 293 294
    Directory buildDir,
    Map<String, String> defines = const <String, String>{},
  }) {
    // Compute a unique hash of this build's particular environment.
    // Sort the keys by key so that the result is stable. We always
    // include the engine and dart versions.
    String buildPrefix;
    final List<String> keys = defines.keys.toList()..sort();
    final StringBuffer buffer = StringBuffer();
    for (String key in keys) {
      buffer.write(key);
      buffer.write(defines[key]);
    }
295
    buffer.write(outputDir.path);
296 297 298 299 300 301 302
    final String output = buffer.toString();
    final Digest digest = md5.convert(utf8.encode(output));
    buildPrefix = hex.encode(digest.bytes);

    final Directory rootBuildDir = buildDir ?? projectDir.childDirectory('build');
    final Directory buildDirectory = rootBuildDir.childDirectory(buildPrefix);
    return Environment._(
303
      outputDir: outputDir,
304 305 306 307 308
      projectDir: projectDir,
      buildDir: buildDirectory,
      rootBuildDir: rootBuildDir,
      cacheDir: Cache.instance.getRoot(),
      defines: defines,
309
      flutterRootDir: fs.directory(Cache.flutterRoot),
310 311 312 313
    );
  }

  Environment._({
314
    @required this.outputDir,
315 316 317 318 319
    @required this.projectDir,
    @required this.buildDir,
    @required this.rootBuildDir,
    @required this.cacheDir,
    @required this.defines,
320
    @required this.flutterRootDir,
321 322 323 324 325 326 327 328 329 330 331 332 333 334
  });

  /// The [Source] value which is substituted with the path to [projectDir].
  static const String kProjectDirectory = '{PROJECT_DIR}';

  /// The [Source] value which is substituted with the path to [buildDir].
  static const String kBuildDirectory = '{BUILD_DIR}';

  /// The [Source] value which is substituted with the path to [cacheDir].
  static const String kCacheDirectory = '{CACHE_DIR}';

  /// The [Source] value which is substituted with a path to the flutter root.
  static const String kFlutterRootDirectory = '{FLUTTER_ROOT}';

335 336 337
  /// The [Source] value which is substituted with a path to [outputDir].
  static const String kOutputDirectory = '{OUTPUT_DIR}';

338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355
  /// The `PROJECT_DIR` environment variable.
  ///
  /// This should be root of the flutter project where a pubspec and dart files
  /// can be located.
  final Directory projectDir;

  /// The `BUILD_DIR` environment variable.
  ///
  /// Defaults to `{PROJECT_ROOT}/build`. The root of the output directory where
  /// build step intermediates and outputs are written.
  final Directory buildDir;

  /// The `CACHE_DIR` environment variable.
  ///
  /// Defaults to `{FLUTTER_ROOT}/bin/cache`. The root of the artifact cache for
  /// the flutter tool.
  final Directory cacheDir;

356 357 358 359 360
  /// The `FLUTTER_ROOT` environment variable.
  ///
  /// Defaults to to the value of [Cache.flutterRoot].
  final Directory flutterRootDir;

361 362 363 364 365
  /// The `OUTPUT_DIR` environment variable.
  ///
  /// Must be provided to configure the output location for the final artifacts.
  final Directory outputDir;

366 367 368 369 370 371 372 373 374 375 376 377
  /// Additional configuration passed to the build targets.
  ///
  /// Setting values here forces a unique build directory to be chosen
  /// which prevents the config from leaking into different builds.
  final Map<String, String> defines;

  /// The root build directory shared by all builds.
  final Directory rootBuildDir;
}

/// The result information from the build system.
class BuildResult {
378 379 380 381 382 383 384
  BuildResult({
    @required this.success,
    this.exceptions = const <String, ExceptionMeasurement>{},
    this.performance = const <String, PerformanceMeasurement>{},
    this.inputFiles = const <File>[],
    this.outputFiles = const <File>[],
  });
385 386 387 388

  final bool success;
  final Map<String, ExceptionMeasurement> exceptions;
  final Map<String, PerformanceMeasurement> performance;
389 390
  final List<File> inputFiles;
  final List<File> outputFiles;
391 392 393 394 395 396

  bool get hasException => exceptions.isNotEmpty;
}

/// The build system is responsible for invoking and ordering [Target]s.
class BuildSystem {
397 398 399
  const BuildSystem();

  /// Build `target` and all of its dependencies.
400
  Future<BuildResult> build(
401
    Target target,
402 403 404
    Environment environment, {
    BuildSystemConfig buildSystemConfig = const BuildSystemConfig(),
  }) async {
405
    environment.buildDir.createSync(recursive: true);
406
    environment.outputDir.createSync(recursive: true);
407 408 409 410 411 412 413 414

    // Load file hash store from previous builds.
    final FileHashStore fileCache = FileHashStore(environment)
      ..initialize();

    // Perform sanity checks on build.
    checkCycles(target);

415
    final Node node = target._toNode(environment);
416 417 418
    final _BuildInstance buildInstance = _BuildInstance(environment, fileCache, buildSystemConfig);
    bool passed = true;
    try {
419
      passed = await buildInstance.invokeTarget(node);
420 421 422 423
    } finally {
      // Always persist the file cache to disk.
      fileCache.persist();
    }
424 425
    // TODO(jonahwilliams): this is a bit of a hack, due to various parts of
    // the flutter tool writing these files unconditionally. Since Xcode uses
426
    // timestamps to track files, this leads to unnecessary rebuilds if they
427 428
    // are included. Once all the places that write these files have been
    // tracked down and moved into assemble, these checks should be removable.
429 430
    // We also remove files under .dart_tool, since these are intermediaries
    // and don't need to be tracked by external systems.
431 432
    {
      buildInstance.inputFiles.removeWhere((String path, File file) {
433 434 435
        return path.contains('.flutter-plugins') ||
                       path.contains('xcconfig') ||
                     path.contains('.dart_tool');
436 437
      });
      buildInstance.outputFiles.removeWhere((String path, File file) {
438 439 440
        return path.contains('.flutter-plugins') ||
                       path.contains('xcconfig') ||
                     path.contains('.dart_tool');
441 442
      });
    }
443
    return BuildResult(
444 445 446 447 448 449 450
      success: passed,
      exceptions: buildInstance.exceptionMeasurements,
      performance: buildInstance.stepTimings,
      inputFiles: buildInstance.inputFiles.values.toList()
          ..sort((File a, File b) => a.path.compareTo(b.path)),
      outputFiles: buildInstance.outputFiles.values.toList()
          ..sort((File a, File b) => a.path.compareTo(b.path)),
451 452 453 454
    );
  }
}

455

456 457 458 459 460 461 462 463 464 465
/// An active instance of a build.
class _BuildInstance {
  _BuildInstance(this.environment, this.fileCache, this.buildSystemConfig)
    : resourcePool = Pool(buildSystemConfig.resourcePoolSize ?? platform?.numberOfProcessors ?? 1);

  final BuildSystemConfig buildSystemConfig;
  final Pool resourcePool;
  final Map<String, AsyncMemoizer<void>> pending = <String, AsyncMemoizer<void>>{};
  final Environment environment;
  final FileHashStore fileCache;
466 467
  final Map<String, File> inputFiles = <String, File>{};
  final Map<String, File> outputFiles = <String, File>{};
468 469 470 471 472 473 474

  // Timings collected during target invocation.
  final Map<String, PerformanceMeasurement> stepTimings = <String, PerformanceMeasurement>{};

  // Exceptions caught during the build process.
  final Map<String, ExceptionMeasurement> exceptionMeasurements = <String, ExceptionMeasurement>{};

475 476
  Future<bool> invokeTarget(Node node) async {
    final List<bool> results = await Future.wait(node.dependencies.map(invokeTarget));
477 478 479
    if (results.any((bool result) => !result)) {
      return false;
    }
480 481
    final AsyncMemoizer<bool> memoizer = pending[node.target.name] ??= AsyncMemoizer<bool>();
    return memoizer.runOnce(() => _invokeInternal(node));
482 483
  }

484
  Future<bool> _invokeInternal(Node node) async {
485 486 487 488
    final PoolResource resource = await resourcePool.request();
    final Stopwatch stopwatch = Stopwatch()..start();
    bool passed = true;
    bool skipped = false;
489 490 491 492 493 494 495 496 497 498 499 500 501 502 503

    // The build system produces a list of aggregate input and output
    // files for the overall build. This list is provided to a hosting build
    // system, such as Xcode, to configure logic for when to skip the
    // rule/phase which contains the flutter build.
    //
    // When looking at the inputs and outputs for the individual rules, we need
    // to be careful to remove inputs that were actually output from previous
    // build steps. This indicates that the file is an intermediary. If
    // these files are included as both inputs and outputs then it isn't
    // possible to construct a DAG describing the build.
    void updateGraph() {
      for (File output in node.outputs) {
        outputFiles[output.path] = output;
      }
504
      for (File input in node.inputs) {
505 506 507 508 509 510
        final String resolvedPath = input.resolveSymbolicLinksSync();
        if (outputFiles.containsKey(resolvedPath)) {
          continue;
        }
        inputFiles[resolvedPath] = input;
      }
511 512 513 514 515 516 517 518
    }

    try {
      // If we're missing a depfile, wait until after evaluating the target to
      // compute changes.
      final bool canSkip = !node.missingDepfile &&
        await node.computeChanges(environment, fileCache);

519
      if (canSkip) {
520
        skipped = true;
521
        printTrace('Skipping target: ${node.target.name}');
522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548
        updateGraph();
        return passed;
      }
      printTrace('${node.target.name}: Starting due to ${node.invalidatedReasons}');
      await node.target.build(environment);
      printTrace('${node.target.name}: Complete');

      // If we were missing the depfile, resolve files after executing the
      // target so that all file hashes are up to date on the next run.
      if (node.missingDepfile) {
        node.inputs.clear();
        node.outputs.clear();
        node.inputs.addAll(node.target.resolveInputs(environment).sources);
        node.outputs.addAll(node.target.resolveOutputs(environment).sources);
        await fileCache.hashFiles(node.inputs);
      }

      // Update hashes for output files.
      await fileCache.hashFiles(node.outputs);
      node.target._writeStamp(node.inputs, node.outputs, environment);
      updateGraph();

      // Delete outputs from previous stages that are no longer a part of the
      // build.
      for (String previousOutput in node.previousOutputs) {
        if (outputFiles.containsKey(previousOutput)) {
          continue;
549
        }
550 551 552
        final File previousFile = fs.file(previousOutput);
        if (previousFile.existsSync()) {
          previousFile.deleteSync();
553
        }
554 555
      }
    } catch (exception, stackTrace) {
556
      node.target.clearStamp(environment);
557 558
      passed = false;
      skipped = false;
559 560
      exceptionMeasurements[node.target.name] = ExceptionMeasurement(
          node.target.name, exception, stackTrace);
561 562 563
    } finally {
      resource.release();
      stopwatch.stop();
564 565
      stepTimings[node.target.name] = PerformanceMeasurement(
          node.target.name, stopwatch.elapsedMilliseconds, skipped, passed);
566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581
    }
    return passed;
  }
}

/// Helper class to collect exceptions.
class ExceptionMeasurement {
  ExceptionMeasurement(this.target, this.exception, this.stackTrace);

  final String target;
  final dynamic exception;
  final StackTrace stackTrace;
}

/// Helper class to collect measurement data.
class PerformanceMeasurement {
582
  PerformanceMeasurement(this.target, this.elapsedMilliseconds, this.skipped, this.passed);
583 584
  final int elapsedMilliseconds;
  final String target;
585
  final bool skipped;
586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619
  final bool passed;
}

/// Check if there are any dependency cycles in the target.
///
/// Throws a [CycleException] if one is encountered.
void checkCycles(Target initial) {
  void checkInternal(Target target, Set<Target> visited, Set<Target> stack) {
    if (stack.contains(target)) {
      throw CycleException(stack..add(target));
    }
    if (visited.contains(target)) {
      return;
    }
    visited.add(target);
    stack.add(target);
    for (Target dependency in target.dependencies) {
      checkInternal(dependency, visited, stack);
    }
    stack.remove(target);
  }
  checkInternal(initial, <Target>{}, <Target>{});
}

/// Verifies that all files exist and are in a subdirectory of [Environment.buildDir].
void verifyOutputDirectories(List<File> outputs, Environment environment, Target target) {
  final String buildDirectory = environment.buildDir.resolveSymbolicLinksSync();
  final String projectDirectory = environment.projectDir.resolveSymbolicLinksSync();
  final List<File> missingOutputs = <File>[];
  for (File sourceFile in outputs) {
    if (!sourceFile.existsSync()) {
      missingOutputs.add(sourceFile);
      continue;
    }
620
    final String path = sourceFile.path;
621 622 623 624 625 626 627 628
    if (!path.startsWith(buildDirectory) && !path.startsWith(projectDirectory)) {
      throw MisplacedOutputException(path, target.name);
    }
  }
  if (missingOutputs.isNotEmpty) {
    throw MissingOutputException(missingOutputs, target.name);
  }
}
629 630 631

/// A node in the build graph.
class Node {
632 633 634 635 636 637 638 639
  Node(
    this.target,
    this.inputs,
    this.outputs,
    this.dependencies,
    Environment environment,
    this.missingDepfile,
  ) {
640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685
    final File stamp = target._findStampFile(environment);

    // If the stamp file doesn't exist, we haven't run this step before and
    // all inputs were added.
    if (!stamp.existsSync()) {
      // No stamp file, not safe to skip.
      _dirty = true;
      return;
    }
    final String content = stamp.readAsStringSync();
    // Something went wrong writing the stamp file.
    if (content == null || content.isEmpty) {
      stamp.deleteSync();
      // Malformed stamp file, not safe to skip.
      _dirty = true;
      return;
    }
    Map<String, Object> values;
    try {
      values = json.decode(content);
    } on FormatException {
      // The json is malformed in some way.
      _dirty = true;
      return;
    }
    final Object inputs = values['inputs'];
    final Object outputs = values['outputs'];
    if (inputs is List<Object> && outputs is List<Object>) {
      inputs?.cast<String>()?.forEach(previousInputs.add);
      outputs?.cast<String>()?.forEach(previousOutputs.add);
    } else {
      // The json is malformed in some way.
      _dirty = true;
    }
  }

  /// The resolved input files.
  ///
  /// These files may not yet exist if they are produced by previous steps.
  final List<File> inputs;

  /// The resolved output files.
  ///
  /// These files may not yet exist if the target hasn't run yet.
  final List<File> outputs;

686 687 688 689 690 691
  /// Whether this node is missing a depfile.
  ///
  /// This requires an additional pass of source resolution after the target
  /// has been executed.
  final bool missingDepfile;

692 693 694 695 696 697 698 699 700
  /// The target definition which contains the build action to invoke.
  final Target target;

  /// All of the nodes that this one depends on.
  final List<Node> dependencies;

  /// Output file paths from the previous invocation of this build node.
  final Set<String> previousOutputs = <String>{};

701
  /// Input file paths from the previous invocation of this build node.
702 703
  final Set<String> previousInputs = <String>{};

704 705 706 707 708
  /// One or more reasons why a task was invalidated.
  ///
  /// May be empty if the task was skipped.
  final Set<InvalidedReason> invalidatedReasons = <InvalidedReason>{};

709 710 711 712 713 714 715 716 717 718 719 720
  /// Whether this node needs an action performed.
  bool get dirty => _dirty;
  bool _dirty = false;

  /// Collect hashes for all inputs to determine if any have changed.
  ///
  /// Returns whether this target can be skipped.
  Future<bool> computeChanges(
    Environment environment,
    FileHashStore fileHashStore,
  ) async {
    final Set<String> currentOutputPaths = <String>{
721
      for (File file in outputs) file.path,
722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737
    };
    // For each input, first determine if we've already computed the hash
    // for it. Then collect it to be sent off for hashing as a group.
    final List<File> sourcesToHash = <File>[];
    final List<File> missingInputs = <File>[];
    for (File file in inputs) {
      if (!file.existsSync()) {
        missingInputs.add(file);
        continue;
      }

      final String absolutePath = file.path;
      final String previousHash = fileHashStore.previousHashes[absolutePath];
      if (fileHashStore.currentHashes.containsKey(absolutePath)) {
        final String currentHash = fileHashStore.currentHashes[absolutePath];
        if (currentHash != previousHash) {
738
          invalidatedReasons.add(InvalidedReason.inputChanged);
739 740 741 742 743 744 745 746 747 748 749 750 751
          _dirty = true;
        }
      } else {
        sourcesToHash.add(file);
      }
    }

    // For each output, first determine if we've already computed the hash
    // for it. Then collect it to be sent off for hashing as a group.
    for (String previousOutput in previousOutputs) {
      // output paths changed.
      if (!currentOutputPaths.contains(previousOutput)) {
        _dirty = true;
752
        invalidatedReasons.add(InvalidedReason.outputSetChanged);
753 754 755 756 757
        // if this isn't a current output file there is no reason to compute the hash.
        continue;
      }
      final File file = fs.file(previousOutput);
      if (!file.existsSync()) {
758
        invalidatedReasons.add(InvalidedReason.outputMissing);
759 760 761 762 763 764 765 766
        _dirty = true;
        continue;
      }
      final String absolutePath = file.path;
      final String previousHash = fileHashStore.previousHashes[absolutePath];
      if (fileHashStore.currentHashes.containsKey(absolutePath)) {
        final String currentHash = fileHashStore.currentHashes[absolutePath];
        if (currentHash != previousHash) {
767
          invalidatedReasons.add(InvalidedReason.outputChanged);
768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784
          _dirty = true;
        }
      } else {
        sourcesToHash.add(file);
      }
    }

    // If we depend on a file that doesnt exist on disk, kill the build.
    if (missingInputs.isNotEmpty) {
      throw MissingInputException(missingInputs, target.name);
    }

    // If we have files to hash, compute them asynchronously and then
    // update the result.
    if (sourcesToHash.isNotEmpty) {
      final List<File> dirty = await fileHashStore.hashFiles(sourcesToHash);
      if (dirty.isNotEmpty) {
785
        invalidatedReasons.add(InvalidedReason.inputChanged);
786 787 788 789 790 791
        _dirty = true;
      }
    }
    return !_dirty;
  }
}
792 793 794 795 796 797 798 799 800 801 802 803 804 805 806

/// A description of why a task was rerun.
enum InvalidedReason {
  /// An input file has an updated hash.
  inputChanged,

  /// An output file has an updated hash.
  outputChanged,

  /// An output file that is expected is missing.
  outputMissing,

  /// The set of expected output files changed.
  outputSetChanged,
}