diff options
4 files changed, 422 insertions, 40 deletions
diff --git a/core/res/res/raw/accessibility_gestures.bin b/core/res/res/raw/accessibility_gestures.bin Binary files differdeleted file mode 100644 index acd79939b105..000000000000 --- a/core/res/res/raw/accessibility_gestures.bin +++ /dev/null diff --git a/core/res/res/values/symbols.xml b/core/res/res/values/symbols.xml index 554e123c64ff..25ebb876da53 100644 --- a/core/res/res/values/symbols.xml +++ b/core/res/res/values/symbols.xml @@ -1453,7 +1453,6 @@ <java-symbol type="raw" name="color_fade_vert" /> <java-symbol type="raw" name="color_fade_frag" /> - <java-symbol type="raw" name="accessibility_gestures" /> <java-symbol type="raw" name="loaderror" /> <java-symbol type="raw" name="nodomain" /> diff --git a/services/accessibility/java/com/android/server/accessibility/AccessibilityGestureDetector.java b/services/accessibility/java/com/android/server/accessibility/AccessibilityGestureDetector.java index 582b19b62c22..b95d2e689873 100644 --- a/services/accessibility/java/com/android/server/accessibility/AccessibilityGestureDetector.java +++ b/services/accessibility/java/com/android/server/accessibility/AccessibilityGestureDetector.java @@ -16,19 +16,18 @@ package com.android.server.accessibility; +import android.accessibilityservice.AccessibilityService; import android.content.Context; import android.gesture.Gesture; -import android.gesture.GestureLibraries; -import android.gesture.GestureLibrary; import android.gesture.GesturePoint; import android.gesture.GestureStore; import android.gesture.GestureStroke; import android.gesture.Prediction; +import android.graphics.PointF; import android.util.Slog; import android.util.TypedValue; import android.view.GestureDetector; import android.view.MotionEvent; -import android.view.VelocityTracker; import android.view.ViewConfiguration; import com.android.internal.R; @@ -47,6 +46,49 @@ class AccessibilityGestureDetector extends GestureDetector.SimpleOnGestureListen // Tag for logging received events. private static final String LOG_TAG = "AccessibilityGestureDetector"; + // Constants for sampling motion event points. + // We sample based on a minimum distance between points, primarily to improve accuracy by + // reducing noisy minor changes in direction. + private static final float MIN_INCHES_BETWEEN_SAMPLES = 0.1f; + private final float mMinPixelsBetweenSamplesX; + private final float mMinPixelsBetweenSamplesY; + + // Constants for separating gesture segments + private static final float ANGLE_THRESHOLD = 0.0f; + + // Constants for line segment directions + private static final int LEFT = 0; + private static final int RIGHT = 1; + private static final int UP = 2; + private static final int DOWN = 3; + private static final int[][] DIRECTIONS_TO_GESTURE_ID = { + { + AccessibilityService.GESTURE_SWIPE_LEFT, + AccessibilityService.GESTURE_SWIPE_LEFT_AND_RIGHT, + AccessibilityService.GESTURE_SWIPE_LEFT_AND_UP, + AccessibilityService.GESTURE_SWIPE_LEFT_AND_DOWN + }, + { + AccessibilityService.GESTURE_SWIPE_RIGHT_AND_LEFT, + AccessibilityService.GESTURE_SWIPE_RIGHT, + AccessibilityService.GESTURE_SWIPE_RIGHT_AND_UP, + AccessibilityService.GESTURE_SWIPE_RIGHT_AND_DOWN + }, + { + AccessibilityService.GESTURE_SWIPE_UP_AND_LEFT, + AccessibilityService.GESTURE_SWIPE_UP_AND_RIGHT, + AccessibilityService.GESTURE_SWIPE_UP, + AccessibilityService.GESTURE_SWIPE_UP_AND_DOWN + }, + { + AccessibilityService.GESTURE_SWIPE_DOWN_AND_LEFT, + AccessibilityService.GESTURE_SWIPE_DOWN_AND_RIGHT, + AccessibilityService.GESTURE_SWIPE_DOWN_AND_UP, + AccessibilityService.GESTURE_SWIPE_DOWN + } + }; + + /** * Listener functions are called as a result of onMoveEvent(). The current * MotionEvent in the context of these functions is the event passed into @@ -102,10 +144,8 @@ class AccessibilityGestureDetector extends GestureDetector.SimpleOnGestureListen } private final Listener mListener; - private final GestureDetector mGestureDetector; - - // The library for gesture detection. - private final GestureLibrary mGestureLibrary; + private final Context mContext; // Retained for on-demand construction of GestureDetector. + protected GestureDetector mGestureDetector; // Double-tap detector. Visible for test. // Indicates that a single tap has occurred. private boolean mFirstTapDetected; @@ -168,28 +208,26 @@ class AccessibilityGestureDetector extends GestureDetector.SimpleOnGestureListen // movement when gesturing, and touch exploring. Based on user testing, // all gestures started with the initial movement taking less than 100ms. // When touch exploring, the first movement almost always takes longer than - // 200ms. From this data, 200ms seems the best value to decide what - // kind of interaction it is. - private static final long CANCEL_ON_PAUSE_THRESHOLD_NOT_STARTED_MS = 200; + // 200ms. + private static final long CANCEL_ON_PAUSE_THRESHOLD_NOT_STARTED_MS = 150; // Time threshold used to determine if a gesture should be cancelled. If - // the finger pauses for longer than this delay, the ongoing gesture is + // the finger takes more than this time to move 1cm, the ongoing gesture is // cancelled. - private static final long CANCEL_ON_PAUSE_THRESHOLD_STARTED_MS = 500; + private static final long CANCEL_ON_PAUSE_THRESHOLD_STARTED_MS = 300; AccessibilityGestureDetector(Context context, Listener listener) { mListener = listener; - - mGestureDetector = new GestureDetector(context, this); - mGestureDetector.setOnDoubleTapListener(this); - - mGestureLibrary = GestureLibraries.fromRawResource(context, R.raw.accessibility_gestures); - mGestureLibrary.setOrientationStyle(8 /* GestureStore.ORIENTATION_SENSITIVE_8 */); - mGestureLibrary.setSequenceType(GestureStore.SEQUENCE_SENSITIVE); - mGestureLibrary.load(); + mContext = context; mGestureDetectionThreshold = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_MM, 1, context.getResources().getDisplayMetrics()) * GESTURE_CONFIRM_MM; + + // Calculate minimum gesture velocity + final float pixelsPerInchX = context.getResources().getDisplayMetrics().xdpi; + final float pixelsPerInchY = context.getResources().getDisplayMetrics().ydpi; + mMinPixelsBetweenSamplesX = MIN_INCHES_BETWEEN_SAMPLES * pixelsPerInchX; + mMinPixelsBetweenSamplesY = MIN_INCHES_BETWEEN_SAMPLES * pixelsPerInchY; } /** @@ -205,6 +243,18 @@ class AccessibilityGestureDetector extends GestureDetector.SimpleOnGestureListen * @return true if the event is consumed, else false */ public boolean onMotionEvent(MotionEvent event, int policyFlags) { + + // Construct GestureDetector double-tap detector on demand, so that testable sub-class + // can use mock GestureDetector. + // TODO: Break the circular dependency between GestureDetector's constructor and + // AccessibilityGestureDetector's constructor. Construct GestureDetector in TouchExplorer, + // using a GestureDetector listener owned by TouchExplorer, which passes double-tap state + // information to AccessibilityGestureDetector. + if (mGestureDetector == null) { + mGestureDetector = new GestureDetector(mContext, this); + mGestureDetector.setOnDoubleTapListener(this); + } + final float x = event.getX(); final float y = event.getY(); final long time = event.getEventTime(); @@ -267,7 +317,7 @@ class AccessibilityGestureDetector extends GestureDetector.SimpleOnGestureListen final float dX = Math.abs(x - mPreviousGestureX); final float dY = Math.abs(y - mPreviousGestureY); - if (dX >= TOUCH_TOLERANCE || dY >= TOUCH_TOLERANCE) { + if (dX >= mMinPixelsBetweenSamplesX || dY >= mMinPixelsBetweenSamplesY) { mPreviousGestureX = x; mPreviousGestureY = y; mStrokeBuffer.add(new GesturePoint(x, y, time)); @@ -280,8 +330,11 @@ class AccessibilityGestureDetector extends GestureDetector.SimpleOnGestureListen return finishDoubleTap(event, policyFlags); } if (mGestureStarted) { - mStrokeBuffer.add(new GesturePoint(x, y, time)); - + final float dX = Math.abs(x - mPreviousGestureX); + final float dY = Math.abs(y - mPreviousGestureY); + if (dX >= mMinPixelsBetweenSamplesX || dY >= mMinPixelsBetweenSamplesY) { + mStrokeBuffer.add(new GesturePoint(x, y, time)); + } return recognizeGesture(event, policyFlags); } break; @@ -397,30 +450,154 @@ class AccessibilityGestureDetector extends GestureDetector.SimpleOnGestureListen mStrokeBuffer.clear(); } + /** + * Looks at the sequence of motions in mStrokeBuffer, classifies the gesture, then calls + * Listener callbacks for success or failure. + * + * @param event The raw motion event to pass to the listener callbacks. + * @param policyFlags Policy flags for the event. + * + * @return true if the event is consumed, else false + */ private boolean recognizeGesture(MotionEvent event, int policyFlags) { - Gesture gesture = new Gesture(); - gesture.addStroke(new GestureStroke(mStrokeBuffer)); - - ArrayList<Prediction> predictions = mGestureLibrary.recognize(gesture); - if (!predictions.isEmpty()) { - Prediction bestPrediction = predictions.get(0); - if (bestPrediction.score >= MIN_PREDICTION_SCORE) { - if (DEBUG) { - Slog.i(LOG_TAG, "gesture: " + bestPrediction.name + " score: " - + bestPrediction.score); - } - try { - final int gestureId = Integer.parseInt(bestPrediction.name); - return mListener.onGestureCompleted(gestureId); - } catch (NumberFormatException nfe) { - Slog.w(LOG_TAG, "Non numeric gesture id:" + bestPrediction.name); + if (mStrokeBuffer.size() < 2) { + return mListener.onGestureCancelled(event, policyFlags); + } + + // Look at mStrokeBuffer and extract 2 line segments, delimited by near-perpendicular + // direction change. + // Method: for each sampled motion event, check the angle of the most recent motion vector + // versus the preceding motion vector, and segment the line if the angle is about + // 90 degrees. + + ArrayList<PointF> path = new ArrayList<>(); + PointF lastDelimiter = new PointF(mStrokeBuffer.get(0).x, mStrokeBuffer.get(0).y); + path.add(lastDelimiter); + + float dX = 0; // Sum of unit vectors from last delimiter to each following point + float dY = 0; + int count = 0; // Number of points since last delimiter + float length = 0; // Vector length from delimiter to most recent point + + PointF next = new PointF(); + for (int i = 1; i < mStrokeBuffer.size(); ++i) { + next = new PointF(mStrokeBuffer.get(i).x, mStrokeBuffer.get(i).y); + if (count > 0) { + // Average of unit vectors from delimiter to following points + float currentDX = dX / count; + float currentDY = dY / count; + + // newDelimiter is a possible new delimiter, based on a vector with length from + // the last delimiter to the previous point, but in the direction of the average + // unit vector from delimiter to previous points. + // Using the averaged vector has the effect of "squaring off the curve", + // creating a sharper angle between the last motion and the preceding motion from + // the delimiter. In turn, this sharper angle achieves the splitting threshold + // even in a gentle curve. + PointF newDelimiter = new PointF(length * currentDX + lastDelimiter.x, + length * currentDY + lastDelimiter.y); + + // Unit vector from newDelimiter to the most recent point + float nextDX = next.x - newDelimiter.x; + float nextDY = next.y - newDelimiter.y; + float nextLength = (float) Math.sqrt(nextDX * nextDX + nextDY * nextDY); + nextDX = nextDX / nextLength; + nextDY = nextDY / nextLength; + + // Compare the initial motion direction to the most recent motion direction, + // and segment the line if direction has changed by about 90 degrees. + float dot = currentDX * nextDX + currentDY * nextDY; + if (dot < ANGLE_THRESHOLD) { + path.add(newDelimiter); + lastDelimiter = newDelimiter; + dX = 0; + dY = 0; + count = 0; } } + + // Vector from last delimiter to most recent point + float currentDX = next.x - lastDelimiter.x; + float currentDY = next.y - lastDelimiter.y; + length = (float) Math.sqrt(currentDX * currentDX + currentDY * currentDY); + + // Increment sum of unit vectors from delimiter to each following point + count = count + 1; + dX = dX + currentDX / length; + dY = dY + currentDY / length; } + path.add(next); + Slog.i(LOG_TAG, "path=" + path.toString()); + + // Classify line segments, and call Listener callbacks. + return recognizeGesturePath(event, policyFlags, path); + } + + /** + * Classifies a pair of line segments, by direction. + * Calls Listener callbacks for success or failure. + * + * @param event The raw motion event to pass to the listener's onGestureCanceled method. + * @param policyFlags Policy flags for the event. + * @param path A sequence of motion line segments derived from motion points in mStrokeBuffer. + * + * @return true if the event is consumed, else false + */ + private boolean recognizeGesturePath(MotionEvent event, int policyFlags, + ArrayList<PointF> path) { + + if (path.size() == 2) { + PointF start = path.get(0); + PointF end = path.get(1); + + float dX = end.x - start.x; + float dY = end.y - start.y; + int direction = toDirection(dX, dY); + switch (direction) { + case LEFT: + return mListener.onGestureCompleted(AccessibilityService.GESTURE_SWIPE_LEFT); + case RIGHT: + return mListener.onGestureCompleted(AccessibilityService.GESTURE_SWIPE_RIGHT); + case UP: + return mListener.onGestureCompleted(AccessibilityService.GESTURE_SWIPE_UP); + case DOWN: + return mListener.onGestureCompleted(AccessibilityService.GESTURE_SWIPE_DOWN); + default: + // Do nothing. + } + + } else if (path.size() == 3) { + PointF start = path.get(0); + PointF mid = path.get(1); + PointF end = path.get(2); + + float dX0 = mid.x - start.x; + float dY0 = mid.y - start.y; + + float dX1 = end.x - mid.x; + float dY1 = end.y - mid.y; + + int segmentDirection0 = toDirection(dX0, dY0); + int segmentDirection1 = toDirection(dX1, dY1); + int gestureId = DIRECTIONS_TO_GESTURE_ID[segmentDirection0][segmentDirection1]; + return mListener.onGestureCompleted(gestureId); + } + // else if (path.size() < 2 || 3 < path.size()) then no gesture recognized. return mListener.onGestureCancelled(event, policyFlags); } + /** Maps a vector to a dominant direction in set {LEFT, RIGHT, UP, DOWN}. */ + private static int toDirection(float dX, float dY) { + if (Math.abs(dX) > Math.abs(dY)) { + // Horizontal + return (dX < 0) ? LEFT : RIGHT; + } else { + // Vertical + return (dY < 0) ? UP : DOWN; + } + } + private MotionEvent mapSecondPointerToFirstPointer(MotionEvent event) { // Only map basic events when two fingers are down. if (event.getPointerCount() != 2 || diff --git a/services/tests/servicestests/src/com/android/server/accessibility/AccessibilityGestureDetectorTest.java b/services/tests/servicestests/src/com/android/server/accessibility/AccessibilityGestureDetectorTest.java new file mode 100644 index 000000000000..d0c2b52f4a67 --- /dev/null +++ b/services/tests/servicestests/src/com/android/server/accessibility/AccessibilityGestureDetectorTest.java @@ -0,0 +1,206 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.android.server.accessibility; + +import static junit.framework.TestCase.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.verify; + +import android.accessibilityservice.AccessibilityService; +import android.content.Context; +import android.content.res.Resources; +import android.graphics.Point; +import android.graphics.PointF; +import android.os.Looper; +import android.util.DisplayMetrics; +import android.view.GestureDetector; +import android.view.MotionEvent; +import java.util.ArrayList; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + + +/** + * Tests for AccessibilityGestureDetector + */ +public class AccessibilityGestureDetectorTest { + + // Constants for testRecognizeGesturePath() + private static final PointF PATH_START = new PointF(300f, 300f); + private static final int PATH_STEP_PIXELS = 200; + private static final long PATH_STEP_MILLISEC = 100; + + /** + * AccessibilitGestureDetector that can mock double-tap detector. + */ + private class AccessibilityGestureDetectorTestable extends AccessibilityGestureDetector { + public AccessibilityGestureDetectorTestable(Context context, Listener listener) { + super(context, listener); + } + + protected void setDoubleTapDetector(GestureDetector gestureDetector) { + mGestureDetector = gestureDetector; + mGestureDetector.setOnDoubleTapListener(this); + } + } + + + // Data used by all tests + private AccessibilityGestureDetectorTestable mDetector; + private AccessibilityGestureDetector.Listener mResultListener; + + + @BeforeClass + public static void oneTimeInitialization() { + if (Looper.myLooper() == null) { + Looper.prepare(); + } + } + + @Before + public void setUp() { + // Construct a mock Context. + DisplayMetrics displayMetricsMock = mock(DisplayMetrics.class); + displayMetricsMock.xdpi = 500; + displayMetricsMock.ydpi = 500; + Resources mockResources = mock(Resources.class); + when(mockResources.getDisplayMetrics()).thenReturn(displayMetricsMock); + Context contextMock = mock(Context.class); + when(contextMock.getMainLooper()).thenReturn(Looper.myLooper()); + when(contextMock.getResources()).thenReturn(mockResources); + + // Construct a testable AccessibilityGestureDetector. + mResultListener = mock(AccessibilityGestureDetector.Listener.class); + mDetector = new AccessibilityGestureDetectorTestable(contextMock, mResultListener); + GestureDetector doubleTapDetectorMock = mock(GestureDetector.class); + mDetector.setDoubleTapDetector(doubleTapDetectorMock); + } + + + @Test + public void testRecognizeGesturePath() { + final int d = 1000; // Length of each segment in the test gesture, in pixels. + + testPath(p(-d, +0), AccessibilityService.GESTURE_SWIPE_LEFT); + testPath(p(+d, +0), AccessibilityService.GESTURE_SWIPE_RIGHT); + testPath(p(+0, -d), AccessibilityService.GESTURE_SWIPE_UP); + testPath(p(+0, +d), AccessibilityService.GESTURE_SWIPE_DOWN); + + testPath(p(-d, +0), p((-d - d), +0), AccessibilityService.GESTURE_SWIPE_LEFT); + testPath(p(-d, +0), p(+0, +0), AccessibilityService.GESTURE_SWIPE_LEFT_AND_RIGHT); + testPath(p(-d, +0), p(-d, -d), AccessibilityService.GESTURE_SWIPE_LEFT_AND_UP); + testPath(p(-d, +0), p(-d, +d), AccessibilityService.GESTURE_SWIPE_LEFT_AND_DOWN); + + testPath(p(+d, +0), p(+0, +0), AccessibilityService.GESTURE_SWIPE_RIGHT_AND_LEFT); + testPath(p(+d, +0), p((+d + d), +0), AccessibilityService.GESTURE_SWIPE_RIGHT); + testPath(p(+d, +0), p(+d, -d), AccessibilityService.GESTURE_SWIPE_RIGHT_AND_UP); + testPath(p(+d, +0), p(+d, +d), AccessibilityService.GESTURE_SWIPE_RIGHT_AND_DOWN); + + testPath(p(+0, -d), p(-d, -d), AccessibilityService.GESTURE_SWIPE_UP_AND_LEFT); + testPath(p(+0, -d), p(+d, -d), AccessibilityService.GESTURE_SWIPE_UP_AND_RIGHT); + testPath(p(+0, -d), p(+0, (-d - d)), AccessibilityService.GESTURE_SWIPE_UP); + testPath(p(+0, -d), p(+0, +0), AccessibilityService.GESTURE_SWIPE_UP_AND_DOWN); + + testPath(p(+0, +d), p(-d, +d), AccessibilityService.GESTURE_SWIPE_DOWN_AND_LEFT); + testPath(p(+0, +d), p(+d, +d), AccessibilityService.GESTURE_SWIPE_DOWN_AND_RIGHT); + testPath(p(+0, +d), p(+0, +0), AccessibilityService.GESTURE_SWIPE_DOWN_AND_UP); + testPath(p(+0, +d), p(+0, (+d + d)), AccessibilityService.GESTURE_SWIPE_DOWN); + } + + /** Convenient short alias to make a Point. */ + private static Point p(int x, int y) { + return new Point(x, y); + } + + /** Test recognizing path from PATH_START to PATH_START+delta. */ + private void testPath(Point delta, int gestureId) { + ArrayList<PointF> path = new ArrayList<>(); + path.add(PATH_START); + + PointF segmentEnd = new PointF(PATH_START.x + delta.x, PATH_START.y + delta.y); + fillPath(PATH_START, segmentEnd, path); + + testPath(path, gestureId); + } + + /** Test recognizing path from PATH_START to PATH_START+delta1 to PATH_START+delta2. */ + private void testPath(Point delta1, Point delta2, int gestureId) { + ArrayList<PointF> path = new ArrayList<>(); + path.add(PATH_START); + + PointF startPlusDelta1 = new PointF(PATH_START.x + delta1.x, PATH_START.y + delta1.y); + fillPath(PATH_START, startPlusDelta1, path); + + PointF startPlusDelta2 = new PointF(PATH_START.x + delta2.x, PATH_START.y + delta2.y); + fillPath(startPlusDelta1, startPlusDelta2, path); + + testPath(path, gestureId); + } + + /** Fill in movement points from start to end, appending points to path. */ + private void fillPath(PointF start, PointF end, ArrayList<PointF> path) { + // Calculate number of path steps needed. + float deltaX = end.x - start.x; + float deltaY = end.y - start.y; + float distance = (float) Math.hypot(deltaX, deltaY); + float numSteps = distance / (float) PATH_STEP_PIXELS; + float stepX = (float) deltaX / numSteps; + float stepY = (float) deltaY / numSteps; + + // For each path step from start (non-inclusive) to end ... add a motion point. + for (int step = 1; step < numSteps; ++step) { + path.add(new PointF( + (start.x + (stepX * (float) step)), + (start.y + (stepY * (float) step)))); + } + } + + /** Test recognizing a path made of motion event points. */ + private void testPath(ArrayList<PointF> path, int gestureId) { + // Clear last recognition result. + reset(mResultListener); + + int policyFlags = 0; + long eventDownTimeMs = 0; + long eventTimeMs = eventDownTimeMs; + + // For each path point... + for (int pointIndex = 0; pointIndex < path.size(); ++pointIndex) { + + // Create motion event. + PointF point = path.get(pointIndex); + int action = MotionEvent.ACTION_MOVE; + if (pointIndex == 0) { + action = MotionEvent.ACTION_DOWN; + } else if (pointIndex == path.size() - 1) { + action = MotionEvent.ACTION_UP; + } + MotionEvent event = MotionEvent.obtain(eventDownTimeMs, eventTimeMs, action, + point.x, point.y, 0); + + // Send event. + mDetector.onMotionEvent(event, policyFlags); + eventTimeMs += PATH_STEP_MILLISEC; + } + + // Check that correct gesture was recognized. + verify(mResultListener).onGestureCompleted(gestureId); + } +} |