Unverified Commit 441d7f3f authored by Justin McCandless's avatar Justin McCandless Committed by GitHub

InteractiveViewer scale interpretation improvement (#63543)

parent afc95099
......@@ -684,6 +684,22 @@ class _InteractiveViewerState extends State<InteractiveViewer> with TickerProvid
}
}
// Decide which type of gesture this is by comparing the amount of scale
// and rotation in the gesture, if any. Scale starts at 1 and rotation
// starts at 0. Pan will have no scale and no rotation because it uses only one
// finger.
_GestureType _getGestureType(ScaleUpdateDetails details) {
final double scale = !widget.scaleEnabled ? 1.0 : details.scale;
final double rotation = !_rotateEnabled ? 0.0 : details.rotation;
if ((scale - 1).abs() > rotation.abs()) {
return _GestureType.scale;
} else if (rotation != 0.0) {
return _GestureType.rotate;
} else {
return _GestureType.pan;
}
}
// Handle the start of a gesture. All of pan, scale, and rotate are handled
// with GestureDetector's scale gesture.
void _onScaleStart(ScaleStartDetails details) {
......@@ -723,23 +739,23 @@ class _InteractiveViewerState extends State<InteractiveViewer> with TickerProvid
final Offset focalPointScene = _transformationController.toScene(
details.localFocalPoint,
);
_gestureType ??= _getGestureType(
!widget.scaleEnabled ? 1.0 : details.scale,
!_rotateEnabled ? 0.0 : details.rotation,
);
if (_gestureType == _GestureType.pan) {
_panAxis ??= _getPanAxis(_referenceFocalPoint, focalPointScene);
// When a gesture first starts, it sometimes has no change in scale and
// rotation despite being a two-finger gesture. Here the gesture is
// allowed to be reinterpreted as its correct type after originally
// being marked as a pan.
_gestureType = _getGestureType(details);
} else {
_gestureType ??= _getGestureType(details);
}
if (!_gestureIsSupported(_gestureType)) {
return;
}
switch (_gestureType) {
case _GestureType.scale:
if (_scaleStart == null) {
return;
}
assert(_scaleStart != null);
// details.scale gives us the amount to change the scale as of the
// start of this gesture, so calculate the amount to scale as of the
// previous call to _onScaleUpdate.
......@@ -789,9 +805,14 @@ class _InteractiveViewerState extends State<InteractiveViewer> with TickerProvid
return;
case _GestureType.pan:
if (_referenceFocalPoint == null || details.scale != 1.0) {
assert(_referenceFocalPoint != null);
// details may have a change in scale here when scaleEnabled is false.
// In an effort to keep the behavior similar whether or not scaleEnabled
// is true, these gestures are thrown away.
if (details.scale != 1.0) {
return;
}
_panAxis ??= _getPanAxis(_referenceFocalPoint, focalPointScene);
// Translate so that the same point in the scene is underneath the
// focal point before and after the movement.
final Offset translationChange = focalPointScene - _referenceFocalPoint;
......@@ -1082,20 +1103,6 @@ double _getFinalTime(double velocity, double drag) {
return math.log(effectivelyMotionless / velocity) / math.log(drag / 100);
}
// Decide which type of gesture this is by comparing the amount of scale
// and rotation in the gesture, if any. Scale starts at 1 and rotation
// starts at 0. Pan will have 0 scale and 0 rotation because it uses only one
// finger.
_GestureType _getGestureType(double scale, double rotation) {
if ((scale - 1).abs() > rotation.abs()) {
return _GestureType.scale;
} else if (rotation != 0) {
return _GestureType.rotate;
} else {
return _GestureType.pan;
}
}
// Return the translation from the given Matrix4 as an Offset.
Offset _getMatrixTranslation(Matrix4 matrix) {
final Vector3 nextTranslation = matrix.getTranslation();
......
......@@ -672,6 +672,63 @@ void main() {
await tester.pumpAndSettle();
expect(transformationController.value, equals(Matrix4.identity()));
});
testWidgets('gesture can start as pan and become scale', (WidgetTester tester) async {
final TransformationController transformationController = TransformationController();
const double boundaryMargin = 50.0;
await tester.pumpWidget(
MaterialApp(
home: Scaffold(
body: Center(
child: InteractiveViewer(
boundaryMargin: const EdgeInsets.all(boundaryMargin),
transformationController: transformationController,
child: Container(width: 200.0, height: 200.0),
),
),
),
),
);
Vector3 translation = transformationController.value.getTranslation();
expect(translation.x, 0.0);
expect(translation.y, 0.0);
// Start a pan gesture.
final Offset childCenter = tester.getCenter(find.byType(Container));
final TestGesture gesture = await tester.createGesture();
await gesture.down(childCenter);
await tester.pump();
await gesture.moveTo(Offset(
childCenter.dx + 5.0,
childCenter.dy + 5.0,
));
await tester.pump();
translation = transformationController.value.getTranslation();
expect(translation.x, greaterThan(0.0));
expect(translation.y, greaterThan(0.0));
// Put another finger down and turn it into a scale gesture.
final TestGesture gesture2 = await tester.createGesture();
await gesture2.down(Offset(
childCenter.dx - 5.0,
childCenter.dy - 5.0,
));
await tester.pump();
await gesture.moveTo(Offset(
childCenter.dx + 25.0,
childCenter.dy + 25.0,
));
await gesture2.moveTo(Offset(
childCenter.dx - 25.0,
childCenter.dy - 25.0,
));
await tester.pump();
await gesture.up();
await gesture2.up();
await tester.pumpAndSettle();
expect(transformationController.value.getMaxScaleOnAxis(), greaterThan(1.0));
});
});
group('getNearestPointOnLine', () {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment