Unverified Commit ec93c51e authored by Jonah Williams's avatar Jonah Williams Committed by GitHub

[flutter_tools] add support for faster incremental build (#56067)

Introduce BuildSystem.buildIncremental, which keeps an in-memory cache of timestamps instead of file hashes. This will be used to implement localization generation through the same build system API.
parent b039fc7c
...@@ -19,7 +19,7 @@ import '../base/utils.dart'; ...@@ -19,7 +19,7 @@ import '../base/utils.dart';
import '../cache.dart'; import '../cache.dart';
import '../convert.dart'; import '../convert.dart';
import 'exceptions.dart'; import 'exceptions.dart';
import 'file_hash_store.dart'; import 'file_store.dart';
import 'source.dart'; import 'source.dart';
export 'source.dart'; export 'source.dart';
...@@ -48,8 +48,9 @@ class BuildSystemConfig { ...@@ -48,8 +48,9 @@ class BuildSystemConfig {
/// of at least one of the environment values and zero or more local values. /// of at least one of the environment values and zero or more local values.
/// ///
/// To determine if the action for a target needs to be executed, the /// To determine if the action for a target needs to be executed, the
/// [BuildSystem] performs a hash of the file contents for both inputs and /// [BuildSystem] computes a key of the file contents for both inputs and
/// outputs. This is tracked separately in the [FileHashStore]. /// outputs. This is tracked separately in the [FileStore]. The key may
/// be either an md5 hash of the file contents or a timestamp.
/// ///
/// A Target has both implicit and explicit inputs and outputs. Only the /// A Target has both implicit and explicit inputs and outputs. Only the
/// later are safe to evaluate before invoking the [buildAction]. For example, /// later are safe to evaluate before invoking the [buildAction]. For example,
...@@ -507,10 +508,10 @@ class BuildSystem { ...@@ -507,10 +508,10 @@ class BuildSystem {
environment.buildDir.createSync(recursive: true); environment.buildDir.createSync(recursive: true);
environment.outputDir.createSync(recursive: true); environment.outputDir.createSync(recursive: true);
// Load file hash store from previous builds. // Load file store from previous builds.
final FileHashStore fileCache = FileHashStore( final File cacheFile = environment.buildDir.childFile(FileStore.kFileCache);
environment: environment, final FileStore fileCache = FileStore(
fileSystem: _fileSystem, cacheFile: cacheFile,
logger: _logger, logger: _logger,
)..initialize(); )..initialize();
...@@ -569,6 +570,55 @@ class BuildSystem { ...@@ -569,6 +570,55 @@ class BuildSystem {
); );
} }
static final Expando<FileStore> _incrementalFileStore = Expando<FileStore>();
/// Perform an incremental build of `target` and all of its dependencies.
///
/// If [previousBuild] is not provided, a new incremental build is
/// initialized.
Future<BuildResult> buildIncremental(
Target target,
Environment environment,
BuildResult previousBuild,
) async {
environment.buildDir.createSync(recursive: true);
environment.outputDir.createSync(recursive: true);
FileStore fileCache;
if (previousBuild == null || _incrementalFileStore[previousBuild] == null) {
final File cacheFile = environment.buildDir.childFile(FileStore.kFileCache);
fileCache = FileStore(
cacheFile: cacheFile,
logger: _logger,
strategy: FileStoreStrategy.timestamp,
)..initialize();
} else {
fileCache = _incrementalFileStore[previousBuild];
}
final Node node = target._toNode(environment);
final _BuildInstance buildInstance = _BuildInstance(
environment: environment,
fileCache: fileCache,
buildSystemConfig: const BuildSystemConfig(),
logger: _logger,
fileSystem: _fileSystem,
platform: _platform,
);
bool passed = true;
try {
passed = await buildInstance.invokeTarget(node);
} finally {
fileCache.persistIncremental();
}
final BuildResult result = BuildResult(
success: passed,
exceptions: buildInstance.exceptionMeasurements,
performance: buildInstance.stepTimings,
);
_incrementalFileStore[result] = fileCache;
return result;
}
/// Write the identifier of the last build into the output directory and /// Write the identifier of the last build into the output directory and
/// remove the previous build's output. /// remove the previous build's output.
/// ///
...@@ -644,7 +694,7 @@ class _BuildInstance { ...@@ -644,7 +694,7 @@ class _BuildInstance {
final Pool resourcePool; final Pool resourcePool;
final Map<String, AsyncMemoizer<bool>> pending = <String, AsyncMemoizer<bool>>{}; final Map<String, AsyncMemoizer<bool>> pending = <String, AsyncMemoizer<bool>>{};
final Environment environment; final Environment environment;
final FileHashStore fileCache; final FileStore fileCache;
final Map<String, File> inputFiles = <String, File>{}; final Map<String, File> inputFiles = <String, File>{};
final Map<String, File> outputFiles = <String, File>{}; final Map<String, File> outputFiles = <String, File>{};
...@@ -718,11 +768,11 @@ class _BuildInstance { ...@@ -718,11 +768,11 @@ class _BuildInstance {
// If we were missing the depfile, resolve input files after executing the // If we were missing the depfile, resolve input files after executing the
// target so that all file hashes are up to date on the next run. // target so that all file hashes are up to date on the next run.
if (node.missingDepfile) { if (node.missingDepfile) {
await fileCache.hashFiles(node.inputs); await fileCache.diffFileList(node.inputs);
} }
// Always update hashes for output files. // Always update hashes for output files.
await fileCache.hashFiles(node.outputs); await fileCache.diffFileList(node.outputs);
node.target._writeStamp(node.inputs, node.outputs, environment); node.target._writeStamp(node.inputs, node.outputs, environment);
updateGraph(); updateGraph();
...@@ -921,16 +971,16 @@ class Node { ...@@ -921,16 +971,16 @@ class Node {
/// Returns whether this target can be skipped. /// Returns whether this target can be skipped.
Future<bool> computeChanges( Future<bool> computeChanges(
Environment environment, Environment environment,
FileHashStore fileHashStore, FileStore fileStore,
FileSystem fileSystem, FileSystem fileSystem,
Logger logger, Logger logger,
) async { ) async {
final Set<String> currentOutputPaths = <String>{ final Set<String> currentOutputPaths = <String>{
for (final File file in outputs) file.path, for (final File file in outputs) file.path,
}; };
// For each input, first determine if we've already computed the hash // For each input, first determine if we've already computed the key
// for it. Then collect it to be sent off for hashing as a group. // for it. Then collect it to be sent off for diffing as a group.
final List<File> sourcesToHash = <File>[]; final List<File> sourcesToDiff = <File>[];
final List<File> missingInputs = <File>[]; final List<File> missingInputs = <File>[];
for (final File file in inputs) { for (final File file in inputs) {
if (!file.existsSync()) { if (!file.existsSync()) {
...@@ -939,26 +989,26 @@ class Node { ...@@ -939,26 +989,26 @@ class Node {
} }
final String absolutePath = file.path; final String absolutePath = file.path;
final String previousHash = fileHashStore.previousHashes[absolutePath]; final String previousAssetKey = fileStore.previousAssetKeys[absolutePath];
if (fileHashStore.currentHashes.containsKey(absolutePath)) { if (fileStore.currentAssetKeys.containsKey(absolutePath)) {
final String currentHash = fileHashStore.currentHashes[absolutePath]; final String currentHash = fileStore.currentAssetKeys[absolutePath];
if (currentHash != previousHash) { if (currentHash != previousAssetKey) {
invalidatedReasons.add(InvalidatedReason.inputChanged); invalidatedReasons.add(InvalidatedReason.inputChanged);
_dirty = true; _dirty = true;
} }
} else { } else {
sourcesToHash.add(file); sourcesToDiff.add(file);
} }
} }
// For each output, first determine if we've already computed the hash // For each output, first determine if we've already computed the key
// for it. Then collect it to be sent off for hashing as a group. // for it. Then collect it to be sent off for hashing as a group.
for (final String previousOutput in previousOutputs) { for (final String previousOutput in previousOutputs) {
// output paths changed. // output paths changed.
if (!currentOutputPaths.contains(previousOutput)) { if (!currentOutputPaths.contains(previousOutput)) {
_dirty = true; _dirty = true;
invalidatedReasons.add(InvalidatedReason.outputSetChanged); invalidatedReasons.add(InvalidatedReason.outputSetChanged);
// if this isn't a current output file there is no reason to compute the hash. // if this isn't a current output file there is no reason to compute the key.
continue; continue;
} }
final File file = fileSystem.file(previousOutput); final File file = fileSystem.file(previousOutput);
...@@ -968,15 +1018,15 @@ class Node { ...@@ -968,15 +1018,15 @@ class Node {
continue; continue;
} }
final String absolutePath = file.path; final String absolutePath = file.path;
final String previousHash = fileHashStore.previousHashes[absolutePath]; final String previousHash = fileStore.previousAssetKeys[absolutePath];
if (fileHashStore.currentHashes.containsKey(absolutePath)) { if (fileStore.currentAssetKeys.containsKey(absolutePath)) {
final String currentHash = fileHashStore.currentHashes[absolutePath]; final String currentHash = fileStore.currentAssetKeys[absolutePath];
if (currentHash != previousHash) { if (currentHash != previousHash) {
invalidatedReasons.add(InvalidatedReason.outputChanged); invalidatedReasons.add(InvalidatedReason.outputChanged);
_dirty = true; _dirty = true;
} }
} else { } else {
sourcesToHash.add(file); sourcesToDiff.add(file);
} }
} }
...@@ -990,10 +1040,10 @@ class Node { ...@@ -990,10 +1040,10 @@ class Node {
invalidatedReasons.add(InvalidatedReason.inputMissing); invalidatedReasons.add(InvalidatedReason.inputMissing);
} }
// If we have files to hash, compute them asynchronously and then // If we have files to diff, compute them asynchronously and then
// update the result. // update the result.
if (sourcesToHash.isNotEmpty) { if (sourcesToDiff.isNotEmpty) {
final List<File> dirty = await fileHashStore.hashFiles(sourcesToHash); final List<File> dirty = await fileStore.diffFileList(sourcesToDiff);
if (dirty.isNotEmpty) { if (dirty.isNotEmpty) {
invalidatedReasons.add(InvalidatedReason.inputChanged); invalidatedReasons.add(InvalidatedReason.inputChanged);
_dirty = true; _dirty = true;
...@@ -1009,10 +1059,10 @@ enum InvalidatedReason { ...@@ -1009,10 +1059,10 @@ enum InvalidatedReason {
/// depfile dependencies, or if a target is incorrectly specified. /// depfile dependencies, or if a target is incorrectly specified.
inputMissing, inputMissing,
/// An input file has an updated hash. /// An input file has an updated key.
inputChanged, inputChanged,
/// An output file has an updated hash. /// An output file has an updated key.
outputChanged, outputChanged,
/// An output file that is expected is missing. /// An output file that is expected is missing.
......
...@@ -63,32 +63,47 @@ class FileHash { ...@@ -63,32 +63,47 @@ class FileHash {
} }
} }
/// A globally accessible cache of file hashes. /// The strategy used by [FileStore] to determine if a file has been
/// invalidated.
enum FileStoreStrategy {
/// The [FileStore] will compute an md5 hash of the file contents.
hash,
/// The [FileStore] will check for differences in the file's last modified
/// timestamp.
timestamp,
}
/// A globally accessible cache of files.
/// ///
/// In cases where multiple targets read the same source files as inputs, we /// In cases where multiple targets read the same source files as inputs, we
/// avoid recomputing or storing multiple copies of hashes by delegating /// avoid recomputing or storing multiple copies of hashes by delegating
/// through this class. All file hashes are held in memory during a build /// through this class.
/// operation, and persisted to cache in the root build directory. ///
/// This class uses either timestamps or file hashes depending on the
/// provided [FileStoreStrategy]. All information is held in memory during
/// a build operation, and may be persisted to cache in the root build
/// directory.
/// ///
/// The format of the file store is subject to change and not part of its API. /// The format of the file store is subject to change and not part of its API.
class FileHashStore { class FileStore {
FileHashStore({ FileStore({
@required Environment environment, @required File cacheFile,
@required FileSystem fileSystem,
@required Logger logger, @required Logger logger,
}) : _cachePath = environment.buildDir.childFile(_kFileCache).path, FileStoreStrategy strategy = FileStoreStrategy.hash,
_logger = logger, }) : _logger = logger,
_fileSystem = fileSystem; _strategy = strategy,
_cacheFile = cacheFile;
final FileSystem _fileSystem; final File _cacheFile;
final String _cachePath;
final Logger _logger; final Logger _logger;
final FileStoreStrategy _strategy;
final HashMap<String, String> previousHashes = HashMap<String, String>(); final HashMap<String, String> previousAssetKeys = HashMap<String, String>();
final HashMap<String, String> currentHashes = HashMap<String, String>(); final HashMap<String, String> currentAssetKeys = HashMap<String, String>();
// The name of the file which stores the file hashes. // The name of the file which stores the file hashes.
static const String _kFileCache = '.filecache'; static const String kFileCache = '.filecache';
// The current version of the file cache storage format. // The current version of the file cache storage format.
static const int _kVersion = 2; static const int _kVersion = 2;
...@@ -96,16 +111,15 @@ class FileHashStore { ...@@ -96,16 +111,15 @@ class FileHashStore {
/// Read file hashes from disk. /// Read file hashes from disk.
void initialize() { void initialize() {
_logger.printTrace('Initializing file store'); _logger.printTrace('Initializing file store');
final File cacheFile = _fileSystem.file(_cachePath); if (!_cacheFile.existsSync()) {
if (!cacheFile.existsSync()) {
return; return;
} }
Uint8List data; Uint8List data;
try { try {
data = cacheFile.readAsBytesSync(); data = _cacheFile.readAsBytesSync();
} on FileSystemException catch (err) { } on FileSystemException catch (err) {
_logger.printError( _logger.printError(
'Failed to read file store at ${cacheFile.path} due to $err.\n' 'Failed to read file store at ${_cacheFile.path} due to $err.\n'
'Build artifacts will not be cached. Try clearing the cache directories ' 'Build artifacts will not be cached. Try clearing the cache directories '
'with "flutter clean"', 'with "flutter clean"',
); );
...@@ -117,29 +131,28 @@ class FileHashStore { ...@@ -117,29 +131,28 @@ class FileHashStore {
fileStorage = FileStorage.fromBuffer(data); fileStorage = FileStorage.fromBuffer(data);
} on Exception catch (err) { } on Exception catch (err) {
_logger.printTrace('Filestorage format changed: $err'); _logger.printTrace('Filestorage format changed: $err');
cacheFile.deleteSync(); _cacheFile.deleteSync();
return; return;
} }
if (fileStorage.version != _kVersion) { if (fileStorage.version != _kVersion) {
_logger.printTrace('file cache format updating, clearing old hashes.'); _logger.printTrace('file cache format updating, clearing old hashes.');
cacheFile.deleteSync(); _cacheFile.deleteSync();
return; return;
} }
for (final FileHash fileHash in fileStorage.files) { for (final FileHash fileHash in fileStorage.files) {
previousHashes[fileHash.path] = fileHash.hash; previousAssetKeys[fileHash.path] = fileHash.hash;
} }
_logger.printTrace('Done initializing file store'); _logger.printTrace('Done initializing file store');
} }
/// Persist file hashes to disk. /// Persist file marks to disk for a non-incremental build.
void persist() { void persist() {
_logger.printTrace('Persisting file store'); _logger.printTrace('Persisting file store');
final File cacheFile = _fileSystem.file(_cachePath); if (!_cacheFile.existsSync()) {
if (!cacheFile.existsSync()) { _cacheFile.createSync(recursive: true);
cacheFile.createSync(recursive: true);
} }
final List<FileHash> fileHashes = <FileHash>[]; final List<FileHash> fileHashes = <FileHash>[];
for (final MapEntry<String, String> entry in currentHashes.entries) { for (final MapEntry<String, String> entry in currentAssetKeys.entries) {
fileHashes.add(FileHash(entry.key, entry.value)); fileHashes.add(FileHash(entry.key, entry.value));
} }
final FileStorage fileStorage = FileStorage( final FileStorage fileStorage = FileStorage(
...@@ -148,10 +161,10 @@ class FileHashStore { ...@@ -148,10 +161,10 @@ class FileHashStore {
); );
final List<int> buffer = fileStorage.toBuffer(); final List<int> buffer = fileStorage.toBuffer();
try { try {
cacheFile.writeAsBytesSync(buffer); _cacheFile.writeAsBytesSync(buffer);
} on FileSystemException catch (err) { } on FileSystemException catch (err) {
_logger.printError( _logger.printError(
'Failed to persist file store at ${cacheFile.path} due to $err.\n' 'Failed to persist file store at ${_cacheFile.path} due to $err.\n'
'Build artifacts will not be cached. Try clearing the cache directories ' 'Build artifacts will not be cached. Try clearing the cache directories '
'with "flutter clean"', 'with "flutter clean"',
); );
...@@ -159,26 +172,60 @@ class FileHashStore { ...@@ -159,26 +172,60 @@ class FileHashStore {
_logger.printTrace('Done persisting file store'); _logger.printTrace('Done persisting file store');
} }
/// Computes a hash of the provided files and returns a list of entities /// Reset `previousMarks` for an incremental build.
void persistIncremental() {
previousAssetKeys.clear();
previousAssetKeys.addAll(currentAssetKeys);
currentAssetKeys.clear();
}
/// Computes a diff of the provided files and returns a list of files
/// that were dirty. /// that were dirty.
Future<List<File>> hashFiles(List<File> files) async { Future<List<File>> diffFileList(List<File> files) async {
final List<File> dirty = <File>[]; final List<File> dirty = <File>[];
final Pool openFiles = Pool(kMaxOpenFiles); switch (_strategy) {
await Future.wait(<Future<void>>[ case FileStoreStrategy.hash:
for (final File file in files) _hashFile(file, dirty, openFiles) final Pool openFiles = Pool(kMaxOpenFiles);
]); await Future.wait(<Future<void>>[
for (final File file in files) _hashFile(file, dirty, openFiles)
]);
break;
case FileStoreStrategy.timestamp:
for (final File file in files) {
_checkModification(file, dirty);
}
break;
}
return dirty; return dirty;
} }
void _checkModification(File file, List<File> dirty) {
final String absolutePath = file.path;
final String previousTime = previousAssetKeys[absolutePath];
// If the file is missing it is assumed to be dirty.
if (!file.existsSync()) {
currentAssetKeys.remove(absolutePath);
previousAssetKeys.remove(absolutePath);
dirty.add(file);
return;
}
final String modifiedTime = file.lastModifiedSync().toString();
if (modifiedTime != previousTime) {
dirty.add(file);
}
currentAssetKeys[absolutePath] = modifiedTime;
}
Future<void> _hashFile(File file, List<File> dirty, Pool pool) async { Future<void> _hashFile(File file, List<File> dirty, Pool pool) async {
final PoolResource resource = await pool.request(); final PoolResource resource = await pool.request();
try { try {
final String absolutePath = file.path; final String absolutePath = file.path;
final String previousHash = previousHashes[absolutePath]; final String previousHash = previousAssetKeys[absolutePath];
// If the file is missing it is assumed to be dirty. // If the file is missing it is assumed to be dirty.
if (!file.existsSync()) { if (!file.existsSync()) {
currentHashes.remove(absolutePath); currentAssetKeys.remove(absolutePath);
previousHashes.remove(absolutePath); previousAssetKeys.remove(absolutePath);
dirty.add(file); dirty.add(file);
return; return;
} }
...@@ -187,7 +234,7 @@ class FileHashStore { ...@@ -187,7 +234,7 @@ class FileHashStore {
if (currentHash != previousHash) { if (currentHash != previousHash) {
dirty.add(file); dirty.add(file);
} }
currentHashes[absolutePath] = currentHash; currentAssetKeys[absolutePath] = currentHash;
} finally { } finally {
resource.release(); resource.release();
} }
......
...@@ -356,6 +356,32 @@ void main() { ...@@ -356,6 +356,32 @@ void main() {
expect(called, 1); expect(called, 1);
}); });
testWithoutContext('Target with depfile dependency will not run twice without '
'invalidation in incremental builds', () async {
final BuildSystem buildSystem = setUpBuildSystem(fileSystem);
int called = 0;
final TestTarget target = TestTarget((Environment environment) async {
environment.buildDir
.childFile('example.d')
.writeAsStringSync('a.txt: b.txt');
fileSystem.file('a.txt').writeAsStringSync('a');
called += 1;
})
..depfiles = <String>['example.d'];
fileSystem.file('b.txt').writeAsStringSync('b');
final BuildResult result = await buildSystem
.buildIncremental(target, environment, null);
expect(fileSystem.file('a.txt'), exists);
expect(called, 1);
// Second build is up to date due to depfile parse.
await buildSystem.buildIncremental(target, environment, result);
expect(called, 1);
});
testWithoutContext('output directory is an input to the build', () async { testWithoutContext('output directory is an input to the build', () async {
final Environment environmentA = Environment.test( final Environment environmentA = Environment.test(
fileSystem.currentDirectory, fileSystem.currentDirectory,
......
...@@ -5,73 +5,80 @@ ...@@ -5,73 +5,80 @@
import 'dart:typed_data'; import 'dart:typed_data';
import 'package:file/memory.dart'; import 'package:file/memory.dart';
import 'package:file_testing/file_testing.dart';
import 'package:flutter_tools/src/artifacts.dart'; import 'package:flutter_tools/src/artifacts.dart';
import 'package:flutter_tools/src/base/file_system.dart'; import 'package:flutter_tools/src/base/file_system.dart';
import 'package:flutter_tools/src/base/logger.dart'; import 'package:flutter_tools/src/base/logger.dart';
import 'package:flutter_tools/src/base/terminal.dart'; import 'package:flutter_tools/src/build_system/file_store.dart';
import 'package:flutter_tools/src/build_system/build_system.dart';
import 'package:flutter_tools/src/build_system/file_hash_store.dart';
import 'package:mockito/mockito.dart'; import 'package:mockito/mockito.dart';
import 'package:platform/platform.dart';
import '../../src/common.dart'; import '../../src/common.dart';
import '../../src/fake_process_manager.dart';
void main() { void main() {
Environment environment; testWithoutContext('FileStore initializes file cache', () {
FileSystem fileSystem; final FileSystem fileSystem = MemoryFileSystem.test();
BufferLogger logger; final File cacheFile = fileSystem.file(FileStore.kFileCache);
final FileStore fileCache = FileStore(
setUp(() { cacheFile: cacheFile,
fileSystem = MemoryFileSystem(); logger: BufferLogger.test(),
logger = BufferLogger(
outputPreferences: OutputPreferences.test(),
terminal: AnsiTerminal(stdio: null, platform: FakePlatform())
);
fileSystem.directory('build').createSync();
environment = Environment.test(
fileSystem.currentDirectory,
artifacts: MockArtifacts(),
processManager: FakeProcessManager.any(),
logger: logger,
fileSystem: fileSystem,
);
environment.buildDir.createSync(recursive: true);
});
test('Initializes file cache', () {
final FileHashStore fileCache = FileHashStore(
environment: environment,
fileSystem: fileSystem,
logger: logger,
); );
fileCache.initialize(); fileCache.initialize();
fileCache.persist(); fileCache.persist();
expect(fileSystem.file(fileSystem.path.join(environment.buildDir.path, '.filecache')).existsSync(), true); expect(cacheFile, exists);
final Uint8List buffer = fileSystem.file(fileSystem.path.join(environment.buildDir.path, '.filecache')) final Uint8List buffer = cacheFile.readAsBytesSync();
.readAsBytesSync();
final FileStorage fileStorage = FileStorage.fromBuffer(buffer); final FileStorage fileStorage = FileStorage.fromBuffer(buffer);
expect(fileStorage.files, isEmpty); expect(fileStorage.files, isEmpty);
expect(fileStorage.version, 2); expect(fileStorage.version, 2);
}); });
test('saves and restores to file cache', () async { testWithoutContext('FileStore can use timestamp strategy', () async {
final FileSystem fileSystem = MemoryFileSystem.test();
final File cacheFile = fileSystem.file(FileStore.kFileCache);
final FileStore fileCache = FileStore(
cacheFile: cacheFile,
logger: BufferLogger.test(),
);
fileCache.initialize();
final File file = fileSystem.file('test')..createSync();
// Initial run does not contain any timestamps for file.
expect(await fileCache.diffFileList(<File>[file]), hasLength(1));
// Swap current timestamps to previous timestamps.
fileCache.persistIncremental();
// timestamp matches previous timestamp.
expect(await fileCache.diffFileList(<File>[file]), isEmpty);
// clear current timestamp list.
fileCache.persistIncremental();
// modify the time stamp.
file.writeAsStringSync('foo');
// verify the file is marked as dirty again.
expect(await fileCache.diffFileList(<File>[file]), hasLength(1));
});
testWithoutContext('FileStore saves and restores to file cache', () async {
final FileSystem fileSystem = MemoryFileSystem.test();
final File cacheFile = fileSystem.file(FileStore.kFileCache);
final FileStore fileCache = FileStore(
cacheFile: cacheFile,
logger: BufferLogger.test(),
);
final File file = fileSystem.file('foo.dart') final File file = fileSystem.file('foo.dart')
..createSync() ..createSync()
..writeAsStringSync('hello'); ..writeAsStringSync('hello');
final FileHashStore fileCache = FileHashStore(
environment: environment,
fileSystem: fileSystem,
logger: logger,
);
fileCache.initialize(); fileCache.initialize();
await fileCache.hashFiles(<File>[file]); await fileCache.diffFileList(<File>[file]);
fileCache.persist(); fileCache.persist();
final String currentHash = fileCache.currentHashes[file.path]; final String currentHash = fileCache.currentAssetKeys[file.path];
final Uint8List buffer = fileSystem.file(fileSystem.path.join(environment.buildDir.path, '.filecache')) final Uint8List buffer = cacheFile
.readAsBytesSync(); .readAsBytesSync();
FileStorage fileStorage = FileStorage.fromBuffer(buffer); FileStorage fileStorage = FileStorage.fromBuffer(buffer);
...@@ -79,15 +86,14 @@ void main() { ...@@ -79,15 +86,14 @@ void main() {
expect(fileStorage.files.single.path, file.path); expect(fileStorage.files.single.path, file.path);
final FileHashStore newFileCache = FileHashStore( final FileStore newfileCache = FileStore(
environment: environment, cacheFile: cacheFile,
fileSystem: fileSystem, logger: BufferLogger.test(),
logger: logger,
); );
newFileCache.initialize(); newfileCache.initialize();
expect(newFileCache.currentHashes, isEmpty); expect(newfileCache.currentAssetKeys, isEmpty);
expect(newFileCache.previousHashes['foo.dart'], currentHash); expect(newfileCache.previousAssetKeys['foo.dart'], currentHash);
newFileCache.persist(); newfileCache.persist();
// Still persisted correctly. // Still persisted correctly.
fileStorage = FileStorage.fromBuffer(buffer); fileStorage = FileStorage.fromBuffer(buffer);
...@@ -96,84 +102,78 @@ void main() { ...@@ -96,84 +102,78 @@ void main() {
expect(fileStorage.files.single.path, file.path); expect(fileStorage.files.single.path, file.path);
}); });
test('handles persisting with a missing build directory', () async { testWithoutContext('FileStore handles persisting with a missing build directory', () async {
final FileSystem fileSystem = MemoryFileSystem.test();
final File cacheFile = fileSystem
.directory('example')
.childFile(FileStore.kFileCache)
..createSync(recursive: true);
final FileStore fileCache = FileStore(
cacheFile: cacheFile,
logger: BufferLogger.test(),
);
final File file = fileSystem.file('foo.dart') final File file = fileSystem.file('foo.dart')
..createSync() ..createSync()
..writeAsStringSync('hello'); ..writeAsStringSync('hello');
final FileHashStore fileCache = FileHashStore(
environment: environment,
fileSystem: fileSystem,
logger: logger,
);
fileCache.initialize(); fileCache.initialize();
environment.buildDir.deleteSync(recursive: true);
await fileCache.hashFiles(<File>[file]); cacheFile.parent.deleteSync(recursive: true);
await fileCache.diffFileList(<File>[file]);
expect(() => fileCache.persist(), returnsNormally); expect(() => fileCache.persist(), returnsNormally);
}); });
test('handles hashing missing files', () async { testWithoutContext('FileStore handles hashing missing files', () async {
final FileHashStore fileCache = FileHashStore( final FileSystem fileSystem = MemoryFileSystem.test();
environment: environment, final File cacheFile = fileSystem.file(FileStore.kFileCache);
fileSystem: fileSystem, final FileStore fileCache = FileStore(
logger: logger, cacheFile: cacheFile,
logger: BufferLogger.test(),
); );
fileCache.initialize(); fileCache.initialize();
final List<File> results = await fileCache.hashFiles(<File>[fileSystem.file('hello.dart')]); final List<File> results = await fileCache.diffFileList(<File>[fileSystem.file('hello.dart')]);
expect(results, hasLength(1)); expect(results, hasLength(1));
expect(results.single.path, 'hello.dart'); expect(results.single.path, 'hello.dart');
expect(fileCache.currentHashes, isNot(contains(fileSystem.path.absolute('hello.dart')))); expect(fileCache.currentAssetKeys, isNot(contains(fileSystem.path.absolute('hello.dart'))));
}); });
test('handles failure to persist file cache', () async { testWithoutContext('FileStore handles failure to persist file cache', () async {
final FakeForwardingFileSystem fakeForwardingFileSystem = FakeForwardingFileSystem(fileSystem);
final FileHashStore fileCache = FileHashStore(
environment: environment,
fileSystem: fakeForwardingFileSystem,
logger: logger,
);
final String cacheFile = environment.buildDir.childFile('.filecache').path;
final MockFile mockFile = MockFile(); final MockFile mockFile = MockFile();
final BufferLogger logger = BufferLogger.test();
when(mockFile.writeAsBytesSync(any)).thenThrow(const FileSystemException('Out of space!')); when(mockFile.writeAsBytesSync(any)).thenThrow(const FileSystemException('Out of space!'));
when(mockFile.readAsBytesSync()).thenReturn(Uint8List(0));
when(mockFile.existsSync()).thenReturn(true); when(mockFile.existsSync()).thenReturn(true);
final FileStore fileCache = FileStore(
cacheFile: mockFile,
logger: logger,
);
fileCache.initialize(); fileCache.initialize();
fakeForwardingFileSystem.files[cacheFile] = mockFile;
fileCache.persist(); fileCache.persist();
expect(logger.errorText, contains('Out of space!')); expect(logger.errorText, contains('Out of space!'));
}); });
test('handles failure to restore file cache', () async { testWithoutContext('FileStore handles failure to restore file cache', () async {
final FakeForwardingFileSystem fakeForwardingFileSystem = FakeForwardingFileSystem(fileSystem);
final FileHashStore fileCache = FileHashStore(
environment: environment,
fileSystem: fakeForwardingFileSystem,
logger: logger,
);
final String cacheFile = environment.buildDir.childFile('.filecache').path;
final MockFile mockFile = MockFile(); final MockFile mockFile = MockFile();
final BufferLogger logger = BufferLogger.test();
when(mockFile.readAsBytesSync()).thenThrow(const FileSystemException('Out of space!')); when(mockFile.readAsBytesSync()).thenThrow(const FileSystemException('Out of space!'));
when(mockFile.existsSync()).thenReturn(true); when(mockFile.existsSync()).thenReturn(true);
fakeForwardingFileSystem.files[cacheFile] = mockFile; final FileStore fileCache = FileStore(
cacheFile: mockFile,
logger: logger,
);
fileCache.initialize(); fileCache.initialize();
expect(logger.errorText, contains('Out of space!')); expect(logger.errorText, contains('Out of space!'));
}); });
} }
class FakeForwardingFileSystem extends ForwardingFileSystem {
FakeForwardingFileSystem(FileSystem fileSystem) : super(fileSystem);
final Map<String, File> files = <String, File>{};
@override
File file(dynamic path) => files[path] ?? super.file(path);
}
class MockFile extends Mock implements File {} class MockFile extends Mock implements File {}
class MockArtifacts extends Mock implements Artifacts {} class MockArtifacts extends Mock implements Artifacts {}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment