Unverified Commit c1d742b2 authored by amirh's avatar amirh Committed by GitHub

Implement MultiFrameImageStreamCompleter - handles scheduling for animated image frames (#12955)

* Implement MultiFrameImageStreamCompleter

* only decode frames while there are active listeners

* review comments followup

* multiply animation frame timer duration by time dilation

* lint
parent e9403815
......@@ -3,9 +3,11 @@
// found in the LICENSE file.
import 'dart:async';
import 'dart:ui' as ui show Image;
import 'dart:ui' as ui show Image, Codec, FrameInfo;
import 'dart:ui' show hashValues;
import 'package:flutter/foundation.dart';
import 'package:flutter/scheduler.dart';
/// A [dart:ui.Image] object with its corresponding scale.
///
......@@ -40,6 +42,18 @@ class ImageInfo {
@override
String toString() => '$image @ ${scale}x';
@override
int get hashCode => hashValues(image, scale);
@override
bool operator ==(Object other) {
if (other.runtimeType != runtimeType)
return false;
final ImageInfo typedOther = other;
return typedOther.image == image
&& typedOther.scale == scale;
}
}
/// Signature for callbacks reporting that an image is available.
......@@ -274,3 +288,159 @@ class OneFrameImageStreamCompleter extends ImageStreamCompleter {
});
}
}
/// Manages the decoding and scheduling of image frames.
///
/// New frames will only be emitted while there are registered listeners to the
/// stream (registered with [addListener]).
///
/// This class deals with 2 types of frames:
///
/// * image frames - image frames of an animated image.
/// * app frames - frames that the flutter engine is drawing to the screen to
/// show the app GUI.
///
/// For single frame images the stream will only complete once.
///
/// For animated images, this class eagerly decodes the next image frame,
/// and notifies the listeners that a new frame is ready on the first app frame
/// that is scheduled after the image frame duration has passed.
///
/// Scheduling new timers only from scheduled app frames, makes sure we pause
/// the animation when the app is not visible (as new app frames will not be
/// scheduled).
///
/// See the following timeline example:
///
/// | Time | Event | Comment |
/// |------|--------------------------------------------|---------------------------|
/// | t1 | App frame scheduled (image frame A posted) | |
/// | t2 | App frame scheduled | |
/// | t3 | App frame scheduled | |
/// | t4 | Image frame B decoded | |
/// | t5 | App frame scheduled | t5 - t1 < frameB_duration |
/// | t6 | App frame scheduled (image frame B posted) | t6 - t1 > frameB_duration |
///
class MultiFrameImageStreamCompleter extends ImageStreamCompleter {
/// Creates a image stream completer.
///
/// Immediately starts decoding the first image frame when the codec is ready.
///
/// [codec] is a future for an initialized [ui.Codec] that will be used to
/// decode the image.
/// [scale] is the linear scale factor for drawing this frames of this image
/// at their intended size.
MultiFrameImageStreamCompleter({
@required Future<ui.Codec> codec,
@required double scale,
InformationCollector informationCollector
}) : assert(codec != null),
_informationCollector = informationCollector,
_scale = scale,
_framesEmitted = 0,
_timer = null {
codec.then<Null>(_handleCodecReady, onError: (dynamic error, StackTrace stack) {
FlutterError.reportError(new FlutterErrorDetails(
exception: error,
stack: stack,
library: 'services',
context: 'resolving an image codec',
informationCollector: informationCollector,
silent: true,
));
});
}
ui.Codec _codec;
final double _scale;
final InformationCollector _informationCollector;
ui.FrameInfo _nextFrame;
// When the current was first shown.
Duration _shownTimestamp;
// The requested duration for the current frame;
Duration _frameDuration;
// How many frames have been emitted so far.
int _framesEmitted;
Timer _timer;
void _handleCodecReady(ui.Codec codec){
_codec = codec;
_decodeNextFrameAndSchedule();
}
void _handleAppFrame(Duration timestamp) {
if (!_hasActiveListeners)
return;
if (_isFirstFrame() || _hasFrameDurationPassed(timestamp)) {
_emitFrame(new ImageInfo(image: _nextFrame.image, scale: _scale));
_shownTimestamp = timestamp;
_frameDuration = _nextFrame.duration;
_nextFrame = null;
final int completedCycles = _framesEmitted ~/ _codec.frameCount;
if (_codec.repetitionCount == -1 || completedCycles <= _codec.repetitionCount) {
_decodeNextFrameAndSchedule();
}
return;
}
final Duration delay = _frameDuration - (timestamp - _shownTimestamp);
_timer = new Timer(delay * timeDilation, () {
SchedulerBinding.instance.scheduleFrameCallback(_handleAppFrame);
});
}
bool _isFirstFrame() {
return _frameDuration == null;
}
bool _hasFrameDurationPassed(Duration timestamp) {
assert(_shownTimestamp != null);
return timestamp - _shownTimestamp >= _frameDuration;
}
Future<Null> _decodeNextFrameAndSchedule() async {
try {
_nextFrame = await _codec.getNextFrame();
} catch (exception, stack) {
FlutterError.reportError(new FlutterErrorDetails(
exception: exception,
stack: stack,
library: 'services',
context: 'resolving an image frame',
informationCollector: _informationCollector,
silent: true,
));
return;
}
if (_codec.frameCount == 1) {
// This is not an animated image, just return it and don't schedule more
// frames.
_emitFrame(new ImageInfo(image: _nextFrame.image, scale: _scale));
return;
}
SchedulerBinding.instance.scheduleFrameCallback(_handleAppFrame);
}
void _emitFrame(ImageInfo imageInfo) {
setImage(imageInfo);
_framesEmitted += 1;
}
bool get _hasActiveListeners => _listeners.isNotEmpty;
@override
void addListener(ImageListener listener) {
if (!_hasActiveListeners && _codec != null) {
_decodeNextFrameAndSchedule();
}
super.addListener(listener);
}
@override
void removeListener(ImageListener listener) {
super.removeListener(listener);
if (_hasActiveListeners) {
_timer?.cancel();
_timer = null;
}
}
}
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:ui';
import 'package:flutter/scheduler.dart' show timeDilation;
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
class FakeFrameInfo extends FrameInfo {
final Duration _duration;
final Image _image;
FakeFrameInfo(int width, int height, this._duration) :
_image = new FakeImage(width, height);
@override
Duration get duration => _duration;
@override
Image get image => _image;
}
class FakeImage extends Image {
final int _width;
final int _height;
FakeImage(this._width, this._height);
@override
int get width => _width;
@override
int get height => _height;
@override
void dispose() {}
}
class MockCodec implements Codec {
@override
int frameCount;
@override
int repetitionCount;
int numFramesAsked = 0;
Completer<FrameInfo> _nextFrameCompleter = new Completer<FrameInfo>();
@override
Future<FrameInfo> getNextFrame() {
numFramesAsked += 1;
return _nextFrameCompleter.future;
}
void completeNextFrame(FrameInfo frameInfo) {
_nextFrameCompleter.complete(frameInfo);
_nextFrameCompleter = new Completer<FrameInfo>();
}
void failNextFrame(String err) {
_nextFrameCompleter.completeError(err);
}
@override
void dispose() {}
}
void main() {
testWidgets('Codec future fails', (WidgetTester tester) async {
final Completer<Codec> completer = new Completer<Codec>();
new MultiFrameImageStreamCompleter(
codec: completer.future,
scale: 1.0,
);
completer.completeError('failure message');
await tester.idle();
expect(tester.takeException(), 'failure message');
});
testWidgets('First frame decoding starts when codec is ready', (WidgetTester tester) async {
final Completer<Codec> completer = new Completer<Codec>();
final MockCodec mockCodec = new MockCodec();
mockCodec.frameCount = 1;
new MultiFrameImageStreamCompleter(
codec: completer.future,
scale: 1.0,
);
completer.complete(mockCodec);
await tester.idle();
expect(mockCodec.numFramesAsked, 1);
});
testWidgets('getNextFrame future fails', (WidgetTester tester) async {
final MockCodec mockCodec = new MockCodec();
mockCodec.frameCount = 1;
final Completer<Codec> codecCompleter = new Completer<Codec>();
new MultiFrameImageStreamCompleter(
codec: codecCompleter.future,
scale: 1.0,
);
codecCompleter.complete(mockCodec);
// MultiFrameImageStreamCompleter only sets an error handler for the next
// frame future after the codec future has completed.
// Idling here lets the MultiFrameImageStreamCompleter advance and set the
// error handler for the nextFrame future.
await tester.idle();
mockCodec.failNextFrame('frame completion error');
await tester.idle();
expect(tester.takeException(), 'frame completion error');
});
testWidgets('ImageStream emits frame (static image)', (WidgetTester tester) async {
final MockCodec mockCodec = new MockCodec();
mockCodec.frameCount = 1;
final Completer<Codec> codecCompleter = new Completer<Codec>();
final ImageStreamCompleter imageStream = new MultiFrameImageStreamCompleter(
codec: codecCompleter.future,
scale: 1.0,
);
final List<ImageInfo> emittedImages = <ImageInfo>[];
imageStream.addListener((ImageInfo image, bool synchronousCall) {
emittedImages.add(image);
});
codecCompleter.complete(mockCodec);
await tester.idle();
final FrameInfo frame = new FakeFrameInfo(20, 10, const Duration(milliseconds: 200));
mockCodec.completeNextFrame(frame);
await tester.idle();
expect(emittedImages, equals(<ImageInfo>[new ImageInfo(image: frame.image)]));
});
testWidgets('ImageStream emits frames (animated images)', (WidgetTester tester) async {
final MockCodec mockCodec = new MockCodec();
mockCodec.frameCount = 2;
mockCodec.repetitionCount = -1;
final Completer<Codec> codecCompleter = new Completer<Codec>();
final ImageStreamCompleter imageStream = new MultiFrameImageStreamCompleter(
codec: codecCompleter.future,
scale: 1.0,
);
final List<ImageInfo> emittedImages = <ImageInfo>[];
imageStream.addListener((ImageInfo image, bool synchronousCall) {
emittedImages.add(image);
});
codecCompleter.complete(mockCodec);
await tester.idle();
final FrameInfo frame1 = new FakeFrameInfo(20, 10, const Duration(milliseconds: 200));
mockCodec.completeNextFrame(frame1);
await tester.idle();
// We are waiting for the next animation tick, so at this point no frames
// should have been emitted.
expect(emittedImages.length, 0);
await tester.pump();
expect(emittedImages, equals(<ImageInfo>[new ImageInfo(image: frame1.image)]));
final FrameInfo frame2 = new FakeFrameInfo(200, 100, const Duration(milliseconds: 400));
mockCodec.completeNextFrame(frame2);
await tester.pump(const Duration(milliseconds: 100));
// The duration for the current frame was 200ms, so we don't emit the next
// frame yet even though it is ready.
expect(emittedImages.length, 1);
await tester.pump(const Duration(milliseconds: 100));
expect(emittedImages, equals(<ImageInfo>[
new ImageInfo(image: frame1.image),
new ImageInfo(image: frame2.image),
]));
// Let the pending timer for the next frame to complete so we can cleanly
// quit the test without pending timers.
await tester.pump(const Duration(milliseconds: 400));
});
testWidgets('animation wraps back', (WidgetTester tester) async {
final MockCodec mockCodec = new MockCodec();
mockCodec.frameCount = 2;
mockCodec.repetitionCount = -1;
final Completer<Codec> codecCompleter = new Completer<Codec>();
final ImageStreamCompleter imageStream = new MultiFrameImageStreamCompleter(
codec: codecCompleter.future,
scale: 1.0,
);
final List<ImageInfo> emittedImages = <ImageInfo>[];
imageStream.addListener((ImageInfo image, bool synchronousCall) {
emittedImages.add(image);
});
codecCompleter.complete(mockCodec);
await tester.idle();
final FrameInfo frame1 = new FakeFrameInfo(20, 10, const Duration(milliseconds: 200));
final FrameInfo frame2 = new FakeFrameInfo(200, 100, const Duration(milliseconds: 400));
mockCodec.completeNextFrame(frame1);
await tester.idle(); // let nextFrameFuture complete
await tester.pump(); // first animation frame shows on first app frame.
mockCodec.completeNextFrame(frame2);
await tester.idle(); // let nextFrameFuture complete
await tester.pump(const Duration(milliseconds: 200)); // emit 2nd frame.
mockCodec.completeNextFrame(frame1);
await tester.idle(); // let nextFrameFuture complete
await tester.pump(const Duration(milliseconds: 400)); // emit 3rd frame
expect(emittedImages, equals(<ImageInfo>[
new ImageInfo(image: frame1.image),
new ImageInfo(image: frame2.image),
new ImageInfo(image: frame1.image),
]));
// Let the pending timer for the next frame to complete so we can cleanly
// quit the test without pending timers.
await tester.pump(const Duration(milliseconds: 200));
});
testWidgets('animation doesnt repeat more than specified', (WidgetTester tester) async {
final MockCodec mockCodec = new MockCodec();
mockCodec.frameCount = 2;
mockCodec.repetitionCount = 0;
final Completer<Codec> codecCompleter = new Completer<Codec>();
final ImageStreamCompleter imageStream = new MultiFrameImageStreamCompleter(
codec: codecCompleter.future,
scale: 1.0,
);
final List<ImageInfo> emittedImages = <ImageInfo>[];
imageStream.addListener((ImageInfo image, bool synchronousCall) {
emittedImages.add(image);
});
codecCompleter.complete(mockCodec);
await tester.idle();
final FrameInfo frame1 = new FakeFrameInfo(20, 10, const Duration(milliseconds: 200));
final FrameInfo frame2 = new FakeFrameInfo(200, 100, const Duration(milliseconds: 400));
mockCodec.completeNextFrame(frame1);
await tester.idle(); // let nextFrameFuture complete
await tester.pump(); // first animation frame shows on first app frame.
mockCodec.completeNextFrame(frame2);
await tester.idle(); // let nextFrameFuture complete
await tester.pump(const Duration(milliseconds: 200)); // emit 2nd frame.
mockCodec.completeNextFrame(frame1);
// allow another frame to complete (but we shouldn't be asking for it as
// this animation should not repeat.
await tester.idle();
await tester.pump(const Duration(milliseconds: 400));
expect(emittedImages, equals(<ImageInfo>[
new ImageInfo(image: frame1.image),
new ImageInfo(image: frame2.image),
]));
});
testWidgets('frames are only decoded when there are active listeners', (WidgetTester tester) async {
final MockCodec mockCodec = new MockCodec();
mockCodec.frameCount = 2;
mockCodec.repetitionCount = -1;
final Completer<Codec> codecCompleter = new Completer<Codec>();
final ImageStreamCompleter imageStream = new MultiFrameImageStreamCompleter(
codec: codecCompleter.future,
scale: 1.0,
);
final ImageListener listener = (ImageInfo image, bool synchronousCall) {};
imageStream.addListener(listener);
codecCompleter.complete(mockCodec);
await tester.idle();
final FrameInfo frame1 = new FakeFrameInfo(20, 10, const Duration(milliseconds: 200));
final FrameInfo frame2 = new FakeFrameInfo(200, 100, const Duration(milliseconds: 400));
mockCodec.completeNextFrame(frame1);
await tester.idle(); // let nextFrameFuture complete
await tester.pump(); // first animation frame shows on first app frame.
mockCodec.completeNextFrame(frame2);
imageStream.removeListener(listener);
await tester.idle(); // let nextFrameFuture complete
await tester.pump(const Duration(milliseconds: 400)); // emit 2nd frame.
// Decoding of the 3rd frame should not start as there are no registered
// listeners to the stream
expect(mockCodec.numFramesAsked, 2);
imageStream.addListener(listener);
await tester.idle(); // let nextFrameFuture complete
expect(mockCodec.numFramesAsked, 3);
});
testWidgets('timeDilation affects animation frame timers', (WidgetTester tester) async {
final MockCodec mockCodec = new MockCodec();
mockCodec.frameCount = 2;
mockCodec.repetitionCount = -1;
final Completer<Codec> codecCompleter = new Completer<Codec>();
final ImageStreamCompleter imageStream = new MultiFrameImageStreamCompleter(
codec: codecCompleter.future,
scale: 1.0,
);
final ImageListener listener = (ImageInfo image, bool synchronousCall) {};
imageStream.addListener(listener);
codecCompleter.complete(mockCodec);
await tester.idle();
final FrameInfo frame1 = new FakeFrameInfo(20, 10, const Duration(milliseconds: 200));
final FrameInfo frame2 = new FakeFrameInfo(200, 100, const Duration(milliseconds: 400));
mockCodec.completeNextFrame(frame1);
await tester.idle(); // let nextFrameFuture complete
await tester.pump(); // first animation frame shows on first app frame.
timeDilation = 2.0;
mockCodec.completeNextFrame(frame2);
await tester.idle(); // let nextFrameFuture complete
await tester.pump(); // schedule next app frame
await tester.pump(const Duration(milliseconds: 200)); // emit 2nd frame.
// Decoding of the 3rd frame should not start after 200 ms, as time is
// dilated by a factor of 2.
expect(mockCodec.numFramesAsked, 2);
await tester.pump(const Duration(milliseconds: 200)); // emit 2nd frame.
expect(mockCodec.numFramesAsked, 3);
});
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment