summaryrefslogtreecommitdiff
path: root/libs/input
diff options
context:
space:
mode:
author Xin Li <delphij@google.com> 2024-06-20 20:42:50 +0000
committer Android (Google) Code Review <android-gerrit@google.com> 2024-06-20 20:42:50 +0000
commit3dd03323aa8825a0c7bf4dcc1ed6d6c3b88a3d7f (patch)
treee05639565a22611c334a83a35a998f3fba3e1f1a /libs/input
parent447d0a33acc9827809d1049023b05e4cac05fc70 (diff)
parentbb1814a0b94e8efb3fa3843c3d6a6533a382cac3 (diff)
Merge "Merge 24Q3 (ab/11976889) to aosp-main-future" into aosp-main-future
Diffstat (limited to 'libs/input')
-rw-r--r--libs/input/Android.bp109
-rw-r--r--libs/input/Input.cpp242
-rw-r--r--libs/input/InputConsumer.cpp962
-rw-r--r--libs/input/InputConsumerNoResampling.cpp539
-rw-r--r--libs/input/InputDevice.cpp15
-rw-r--r--libs/input/InputTransport.cpp939
-rw-r--r--libs/input/KeyCharacterMap.cpp18
-rw-r--r--libs/input/KeyboardClassifier.cpp89
-rw-r--r--libs/input/MotionPredictor.cpp96
-rw-r--r--libs/input/MotionPredictorMetricsManager.cpp104
-rw-r--r--libs/input/TfLiteMotionPredictor.cpp2
-rw-r--r--libs/input/android/os/IInputConstants.aidl271
-rw-r--r--libs/input/android/os/InputConfig.aidl10
-rw-r--r--libs/input/android/os/PointerIconType.aidl56
-rw-r--r--libs/input/input_flags.aconfig45
-rw-r--r--libs/input/rust/input.rs143
-rw-r--r--libs/input/rust/keyboard_classifier.rs345
-rw-r--r--libs/input/rust/lib.rs111
-rw-r--r--libs/input/tests/Android.bp17
-rw-r--r--libs/input/tests/InputChannel_test.cpp2
-rw-r--r--libs/input/tests/InputEvent_test.cpp317
-rw-r--r--libs/input/tests/InputPublisherAndConsumerNoResampling_test.cpp817
-rw-r--r--libs/input/tests/InputPublisherAndConsumer_test.cpp50
-rw-r--r--libs/input/tests/MotionPredictorMetricsManager_test.cpp81
-rw-r--r--libs/input/tests/MotionPredictor_test.cpp189
-rw-r--r--libs/input/tests/TestHelpers.h81
-rw-r--r--libs/input/tests/TouchResampling_test.cpp97
-rw-r--r--libs/input/tests/VelocityTracker_test.cpp5
-rw-r--r--libs/input/tests/VerifiedInputEvent_test.cpp12
29 files changed, 4451 insertions, 1313 deletions
diff --git a/libs/input/Android.bp b/libs/input/Android.bp
index 2b5d3758be..d782f42071 100644
--- a/libs/input/Android.bp
+++ b/libs/input/Android.bp
@@ -30,6 +30,7 @@ filegroup {
"android/os/InputEventInjectionResult.aidl",
"android/os/InputEventInjectionSync.aidl",
"android/os/InputConfig.aidl",
+ "android/os/PointerIconType.aidl",
],
}
@@ -84,11 +85,6 @@ rust_bindgen {
bindgen_flags: [
"--verbose",
- "--allowlist-var=AMOTION_EVENT_FLAG_CANCELED",
- "--allowlist-var=AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED",
- "--allowlist-var=AMOTION_EVENT_FLAG_WINDOW_IS_PARTIALLY_OBSCURED",
- "--allowlist-var=AMOTION_EVENT_FLAG_IS_ACCESSIBILITY_EVENT",
- "--allowlist-var=AMOTION_EVENT_FLAG_NO_FOCUS_CHANGE",
"--allowlist-var=AMOTION_EVENT_ACTION_CANCEL",
"--allowlist-var=AMOTION_EVENT_ACTION_UP",
"--allowlist-var=AMOTION_EVENT_ACTION_POINTER_DOWN",
@@ -117,6 +113,27 @@ rust_bindgen {
"--allowlist-var=AINPUT_SOURCE_HDMI",
"--allowlist-var=AINPUT_SOURCE_SENSOR",
"--allowlist-var=AINPUT_SOURCE_ROTARY_ENCODER",
+ "--allowlist-var=AINPUT_KEYBOARD_TYPE_NONE",
+ "--allowlist-var=AINPUT_KEYBOARD_TYPE_NON_ALPHABETIC",
+ "--allowlist-var=AINPUT_KEYBOARD_TYPE_ALPHABETIC",
+ "--allowlist-var=AMETA_NONE",
+ "--allowlist-var=AMETA_ALT_ON",
+ "--allowlist-var=AMETA_ALT_LEFT_ON",
+ "--allowlist-var=AMETA_ALT_RIGHT_ON",
+ "--allowlist-var=AMETA_SHIFT_ON",
+ "--allowlist-var=AMETA_SHIFT_LEFT_ON",
+ "--allowlist-var=AMETA_SHIFT_RIGHT_ON",
+ "--allowlist-var=AMETA_SYM_ON",
+ "--allowlist-var=AMETA_FUNCTION_ON",
+ "--allowlist-var=AMETA_CTRL_ON",
+ "--allowlist-var=AMETA_CTRL_LEFT_ON",
+ "--allowlist-var=AMETA_CTRL_RIGHT_ON",
+ "--allowlist-var=AMETA_META_ON",
+ "--allowlist-var=AMETA_META_LEFT_ON",
+ "--allowlist-var=AMETA_META_RIGHT_ON",
+ "--allowlist-var=AMETA_CAPS_LOCK_ON",
+ "--allowlist-var=AMETA_NUM_LOCK_ON",
+ "--allowlist-var=AMETA_SCROLL_LOCK_ON",
],
static_libs: [
@@ -131,6 +148,29 @@ rust_bindgen {
],
}
+cc_library_static {
+ name: "iinputflinger_aidl_lib_static",
+ host_supported: true,
+ srcs: [
+ "android/os/IInputFlinger.aidl",
+ "android/os/InputChannelCore.aidl",
+ ],
+ shared_libs: [
+ "libbinder",
+ ],
+ whole_static_libs: [
+ "libgui_window_info_static",
+ ],
+ aidl: {
+ export_aidl_headers: true,
+ local_include_dirs: ["."],
+ include_dirs: [
+ "frameworks/native/libs/gui",
+ "frameworks/native/libs/input",
+ ],
+ },
+}
+
// Contains methods to help access C++ code from rust
cc_library_static {
name: "libinput_from_rust_to_cpp",
@@ -175,16 +215,17 @@ cc_library {
"-DANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION",
],
srcs: [
- "android/os/IInputFlinger.aidl",
- "android/os/InputChannelCore.aidl",
"AccelerationCurve.cpp",
"Input.cpp",
+ "InputConsumer.cpp",
+ "InputConsumerNoResampling.cpp",
"InputDevice.cpp",
"InputEventLabels.cpp",
"InputTransport.cpp",
"InputVerifier.cpp",
"Keyboard.cpp",
"KeyCharacterMap.cpp",
+ "KeyboardClassifier.cpp",
"KeyLayoutMap.cpp",
"MotionPredictor.cpp",
"MotionPredictorMetricsManager.cpp",
@@ -218,6 +259,7 @@ cc_library {
shared_libs: [
"libbase",
"libbinder",
+ "libbinder_ndk",
"libcutils",
"liblog",
"libPlatformProperties",
@@ -239,7 +281,6 @@ cc_library {
static_libs: [
"inputconstants-cpp",
- "libgui_window_info_static",
"libui-types",
"libtflite_static",
"libkernelconfigs",
@@ -248,10 +289,10 @@ cc_library {
whole_static_libs: [
"com.android.input.flags-aconfig-cc",
"libinput_rust_ffi",
+ "iinputflinger_aidl_lib_static",
],
export_static_lib_headers: [
- "libgui_window_info_static",
"libui-types",
],
@@ -262,21 +303,14 @@ cc_library {
target: {
android: {
- export_shared_lib_headers: ["libbinder"],
-
- shared_libs: [
- "libutils",
- "libbinder",
- // Stats logging library and its dependencies.
- "libstatslog_libinput",
- "libstatsbootstrap",
- "android.os.statsbootstrap_aidl-cpp",
- ],
-
required: [
"motion_predictor_model_prebuilt",
"motion_predictor_model_config",
],
+ static_libs: [
+ "libstatslog_libinput",
+ "libstatssocket_lazy",
+ ],
},
host: {
include_dirs: [
@@ -285,37 +319,32 @@ cc_library {
],
},
},
-
- aidl: {
- local_include_dirs: ["."],
- export_aidl_headers: true,
- include_dirs: [
- "frameworks/native/libs/gui",
- ],
- },
}
-// Use bootstrap version of stats logging library.
-// libinput is a bootstrap process (starts early in the boot process), and thus can't use the normal
-// `libstatslog` because that requires `libstatssocket`, which is only available later in the boot.
-cc_library {
+cc_library_static {
name: "libstatslog_libinput",
generated_sources: ["statslog_libinput.cpp"],
generated_headers: ["statslog_libinput.h"],
+ cflags: [
+ "-Wall",
+ "-Werror",
+ ],
export_generated_headers: ["statslog_libinput.h"],
shared_libs: [
- "libbinder",
- "libstatsbootstrap",
+ "libcutils",
+ "liblog",
"libutils",
- "android.os.statsbootstrap_aidl-cpp",
+ ],
+ static_libs: [
+ "libstatssocket_lazy",
],
}
genrule {
name: "statslog_libinput.h",
tools: ["stats-log-api-gen"],
- cmd: "$(location stats-log-api-gen) --header $(genDir)/statslog_libinput.h --module libinput" +
- " --namespace android,stats,libinput --bootstrap",
+ cmd: "$(location stats-log-api-gen) --header $(genDir)/statslog_libinput.h " +
+ "--module libinput --namespace android,libinput",
out: [
"statslog_libinput.h",
],
@@ -324,9 +353,9 @@ genrule {
genrule {
name: "statslog_libinput.cpp",
tools: ["stats-log-api-gen"],
- cmd: "$(location stats-log-api-gen) --cpp $(genDir)/statslog_libinput.cpp --module libinput" +
- " --namespace android,stats,libinput --importHeader statslog_libinput.h" +
- " --bootstrap",
+ cmd: "$(location stats-log-api-gen) --cpp $(genDir)/statslog_libinput.cpp " +
+ "--module libinput --namespace android,libinput " +
+ "--importHeader statslog_libinput.h",
out: [
"statslog_libinput.cpp",
],
diff --git a/libs/input/Input.cpp b/libs/input/Input.cpp
index 9e0ce1db33..b09814797f 100644
--- a/libs/input/Input.cpp
+++ b/libs/input/Input.cpp
@@ -27,15 +27,12 @@
#include <android-base/logging.h>
#include <android-base/stringprintf.h>
#include <cutils/compiler.h>
-#include <gui/constants.h>
#include <input/DisplayViewport.h>
#include <input/Input.h>
#include <input/InputDevice.h>
#include <input/InputEventLabels.h>
-#ifdef __linux__
#include <binder/Parcel.h>
-#endif
#if defined(__ANDROID__)
#include <sys/random.h>
#endif
@@ -60,6 +57,58 @@ bool shouldDisregardOffset(uint32_t source) {
return !isFromSource(source, AINPUT_SOURCE_CLASS_POINTER);
}
+int32_t resolveActionForSplitMotionEvent(
+ int32_t action, int32_t flags, const std::vector<PointerProperties>& pointerProperties,
+ const std::vector<PointerProperties>& splitPointerProperties) {
+ LOG_ALWAYS_FATAL_IF(splitPointerProperties.empty());
+ const auto maskedAction = MotionEvent::getActionMasked(action);
+ if (maskedAction != AMOTION_EVENT_ACTION_POINTER_DOWN &&
+ maskedAction != AMOTION_EVENT_ACTION_POINTER_UP) {
+ // The action is unaffected by splitting this motion event.
+ return action;
+ }
+ const auto actionIndex = MotionEvent::getActionIndex(action);
+ if (CC_UNLIKELY(actionIndex >= pointerProperties.size())) {
+ LOG(FATAL) << "Action index is out of bounds, index: " << actionIndex;
+ }
+
+ const auto affectedPointerId = pointerProperties[actionIndex].id;
+ std::optional<uint32_t> splitActionIndex;
+ for (uint32_t i = 0; i < splitPointerProperties.size(); i++) {
+ if (affectedPointerId == splitPointerProperties[i].id) {
+ splitActionIndex = i;
+ break;
+ }
+ }
+ if (!splitActionIndex.has_value()) {
+ // The affected pointer is not part of the split motion event.
+ return AMOTION_EVENT_ACTION_MOVE;
+ }
+
+ if (splitPointerProperties.size() > 1) {
+ return maskedAction | (*splitActionIndex << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
+ }
+
+ if (maskedAction == AMOTION_EVENT_ACTION_POINTER_UP) {
+ return ((flags & AMOTION_EVENT_FLAG_CANCELED) != 0) ? AMOTION_EVENT_ACTION_CANCEL
+ : AMOTION_EVENT_ACTION_UP;
+ }
+ return AMOTION_EVENT_ACTION_DOWN;
+}
+
+float transformOrientation(const ui::Transform& transform, const PointerCoords& coords,
+ int32_t motionEventFlags) {
+ if ((motionEventFlags & AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION) == 0) {
+ return 0;
+ }
+
+ const bool isDirectionalAngle =
+ (motionEventFlags & AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION) != 0;
+
+ return transformAngle(transform, coords.getAxisValue(AMOTION_EVENT_AXIS_ORIENTATION),
+ isDirectionalAngle);
+}
+
} // namespace
const char* motionClassificationToString(MotionClassification classification) {
@@ -151,7 +200,7 @@ vec2 transformWithoutTranslation(const ui::Transform& transform, const vec2& xy)
return roundTransformedCoords(transformedXy - transformedOrigin);
}
-float transformAngle(const ui::Transform& transform, float angleRadians) {
+float transformAngle(const ui::Transform& transform, float angleRadians, bool isDirectional) {
// Construct and transform a vector oriented at the specified clockwise angle from vertical.
// Coordinate system: down is increasing Y, right is increasing X.
float x = sinf(angleRadians);
@@ -165,6 +214,11 @@ float transformAngle(const ui::Transform& transform, float angleRadians) {
transformedPoint.x -= origin.x;
transformedPoint.y -= origin.y;
+ if (!isDirectional && transformedPoint.y > 0) {
+ // Limit the range of atan2f to [-pi/2, pi/2] by reversing the direction of the vector.
+ transformedPoint *= -1;
+ }
+
// Derive the transformed vector's clockwise angle from vertical.
// The return value of atan2f is in range [-pi, pi] which conforms to the orientation API.
return atan2f(transformedPoint.x, -transformedPoint.y);
@@ -254,8 +308,8 @@ VerifiedMotionEvent verifiedMotionEventFromMotionEvent(const MotionEvent& event)
event.getButtonState()};
}
-void InputEvent::initialize(int32_t id, int32_t deviceId, uint32_t source, int32_t displayId,
- std::array<uint8_t, 32> hmac) {
+void InputEvent::initialize(int32_t id, int32_t deviceId, uint32_t source,
+ ui::LogicalDisplayId displayId, std::array<uint8_t, 32> hmac) {
mId = id;
mDeviceId = deviceId;
mSource = source;
@@ -317,10 +371,11 @@ std::optional<int> KeyEvent::getKeyCodeFromLabel(const char* label) {
return InputEventLookup::getKeyCodeByLabel(label);
}
-void KeyEvent::initialize(int32_t id, int32_t deviceId, uint32_t source, int32_t displayId,
- std::array<uint8_t, 32> hmac, int32_t action, int32_t flags,
- int32_t keyCode, int32_t scanCode, int32_t metaState, int32_t repeatCount,
- nsecs_t downTime, nsecs_t eventTime) {
+void KeyEvent::initialize(int32_t id, int32_t deviceId, uint32_t source,
+ ui::LogicalDisplayId displayId, std::array<uint8_t, 32> hmac,
+ int32_t action, int32_t flags, int32_t keyCode, int32_t scanCode,
+ int32_t metaState, int32_t repeatCount, nsecs_t downTime,
+ nsecs_t eventTime) {
InputEvent::initialize(id, deviceId, source, displayId, hmac);
mAction = action;
mFlags = flags;
@@ -444,7 +499,6 @@ void PointerCoords::scale(float globalScaleFactor, float windowXScale, float win
scaleAxisValue(*this, AMOTION_EVENT_AXIS_RELATIVE_Y, windowYScale);
}
-#ifdef __linux__
status_t PointerCoords::readFromParcel(Parcel* parcel) {
bits = parcel->readInt64();
@@ -472,7 +526,6 @@ status_t PointerCoords::writeToParcel(Parcel* parcel) const {
parcel->writeBool(isResampled);
return OK;
}
-#endif
void PointerCoords::tooManyAxes(int axis) {
ALOGW("Could not set value for axis %d because the PointerCoords structure is full and "
@@ -495,36 +548,17 @@ bool PointerCoords::operator==(const PointerCoords& other) const {
return true;
}
-void PointerCoords::transform(const ui::Transform& transform) {
- const vec2 xy = transform.transform(getXYValue());
- setAxisValue(AMOTION_EVENT_AXIS_X, xy.x);
- setAxisValue(AMOTION_EVENT_AXIS_Y, xy.y);
-
- if (BitSet64::hasBit(bits, AMOTION_EVENT_AXIS_RELATIVE_X) ||
- BitSet64::hasBit(bits, AMOTION_EVENT_AXIS_RELATIVE_Y)) {
- const ui::Transform rotation(transform.getOrientation());
- const vec2 relativeXy = rotation.transform(getAxisValue(AMOTION_EVENT_AXIS_RELATIVE_X),
- getAxisValue(AMOTION_EVENT_AXIS_RELATIVE_Y));
- setAxisValue(AMOTION_EVENT_AXIS_RELATIVE_X, relativeXy.x);
- setAxisValue(AMOTION_EVENT_AXIS_RELATIVE_Y, relativeXy.y);
- }
-
- if (BitSet64::hasBit(bits, AMOTION_EVENT_AXIS_ORIENTATION)) {
- const float val = getAxisValue(AMOTION_EVENT_AXIS_ORIENTATION);
- setAxisValue(AMOTION_EVENT_AXIS_ORIENTATION, transformAngle(transform, val));
- }
-}
-
// --- MotionEvent ---
-void MotionEvent::initialize(int32_t id, int32_t deviceId, uint32_t source, int32_t displayId,
- std::array<uint8_t, 32> hmac, int32_t action, int32_t actionButton,
- int32_t flags, int32_t edgeFlags, int32_t metaState,
- int32_t buttonState, MotionClassification classification,
- const ui::Transform& transform, float xPrecision, float yPrecision,
- float rawXCursorPosition, float rawYCursorPosition,
- const ui::Transform& rawTransform, nsecs_t downTime, nsecs_t eventTime,
- size_t pointerCount, const PointerProperties* pointerProperties,
+void MotionEvent::initialize(int32_t id, int32_t deviceId, uint32_t source,
+ ui::LogicalDisplayId displayId, std::array<uint8_t, 32> hmac,
+ int32_t action, int32_t actionButton, int32_t flags, int32_t edgeFlags,
+ int32_t metaState, int32_t buttonState,
+ MotionClassification classification, const ui::Transform& transform,
+ float xPrecision, float yPrecision, float rawXCursorPosition,
+ float rawYCursorPosition, const ui::Transform& rawTransform,
+ nsecs_t downTime, nsecs_t eventTime, size_t pointerCount,
+ const PointerProperties* pointerProperties,
const PointerCoords* pointerCoords) {
InputEvent::initialize(id, deviceId, source, displayId, hmac);
mAction = action;
@@ -584,6 +618,28 @@ void MotionEvent::copyFrom(const MotionEvent* other, bool keepHistory) {
}
}
+void MotionEvent::splitFrom(const android::MotionEvent& other,
+ std::bitset<MAX_POINTER_ID + 1> splitPointerIds, int32_t newEventId) {
+ // TODO(b/327503168): The down time should be a parameter to the split function, because only
+ // the caller can know when the first event went down on the target.
+ const nsecs_t splitDownTime = other.mDownTime;
+
+ auto [action, pointerProperties, pointerCoords] =
+ split(other.getAction(), other.getFlags(), other.getHistorySize(),
+ other.mPointerProperties, other.mSamplePointerCoords, splitPointerIds);
+
+ // Initialize the event with zero pointers, and manually set the split pointers.
+ initialize(newEventId, other.mDeviceId, other.mSource, other.mDisplayId, /*hmac=*/{}, action,
+ other.mActionButton, other.mFlags, other.mEdgeFlags, other.mMetaState,
+ other.mButtonState, other.mClassification, other.mTransform, other.mXPrecision,
+ other.mYPrecision, other.mRawXCursorPosition, other.mRawYCursorPosition,
+ other.mRawTransform, splitDownTime, other.getEventTime(), /*pointerCount=*/0,
+ pointerProperties.data(), pointerCoords.data());
+ mPointerProperties = std::move(pointerProperties);
+ mSamplePointerCoords = std::move(pointerCoords);
+ mSampleEventTimes = other.mSampleEventTimes;
+}
+
void MotionEvent::addSample(
int64_t eventTime,
const PointerCoords* pointerCoords) {
@@ -665,13 +721,13 @@ const PointerCoords* MotionEvent::getHistoricalRawPointerCoords(
float MotionEvent::getHistoricalRawAxisValue(int32_t axis, size_t pointerIndex,
size_t historicalIndex) const {
const PointerCoords& coords = *getHistoricalRawPointerCoords(pointerIndex, historicalIndex);
- return calculateTransformedAxisValue(axis, mSource, mRawTransform, coords);
+ return calculateTransformedAxisValue(axis, mSource, mFlags, mRawTransform, coords);
}
float MotionEvent::getHistoricalAxisValue(int32_t axis, size_t pointerIndex,
size_t historicalIndex) const {
const PointerCoords& coords = *getHistoricalRawPointerCoords(pointerIndex, historicalIndex);
- return calculateTransformedAxisValue(axis, mSource, mTransform, coords);
+ return calculateTransformedAxisValue(axis, mSource, mFlags, mTransform, coords);
}
ssize_t MotionEvent::findPointerIndex(int32_t pointerId) const {
@@ -690,6 +746,18 @@ void MotionEvent::offsetLocation(float xOffset, float yOffset) {
mTransform.set(currXOffset + xOffset, currYOffset + yOffset);
}
+float MotionEvent::getRawXOffset() const {
+ // This is equivalent to the x-coordinate of the point that the origin of the raw coordinate
+ // space maps to.
+ return (mTransform * mRawTransform.inverse()).tx();
+}
+
+float MotionEvent::getRawYOffset() const {
+ // This is equivalent to the y-coordinate of the point that the origin of the raw coordinate
+ // space maps to.
+ return (mTransform * mRawTransform.inverse()).ty();
+}
+
void MotionEvent::scale(float globalScaleFactor) {
mTransform.set(mTransform.tx() * globalScaleFactor, mTransform.ty() * globalScaleFactor);
mRawTransform.set(mRawTransform.tx() * globalScaleFactor,
@@ -716,8 +784,9 @@ void MotionEvent::applyTransform(const std::array<float, 9>& matrix) {
transform.set(matrix);
// Apply the transformation to all samples.
- std::for_each(mSamplePointerCoords.begin(), mSamplePointerCoords.end(),
- [&transform](PointerCoords& c) { c.transform(transform); });
+ std::for_each(mSamplePointerCoords.begin(), mSamplePointerCoords.end(), [&](PointerCoords& c) {
+ calculateTransformedCoordsInPlace(c, mSource, mFlags, transform);
+ });
if (mRawXCursorPosition != AMOTION_EVENT_INVALID_CURSOR_POSITION &&
mRawYCursorPosition != AMOTION_EVENT_INVALID_CURSOR_POSITION) {
@@ -727,7 +796,6 @@ void MotionEvent::applyTransform(const std::array<float, 9>& matrix) {
}
}
-#ifdef __linux__
static status_t readFromParcel(ui::Transform& transform, const Parcel& parcel) {
float dsdx, dtdx, tx, dtdy, dsdy, ty;
status_t status = parcel.readFloat(&dsdx);
@@ -762,7 +830,7 @@ status_t MotionEvent::readFromParcel(Parcel* parcel) {
mId = parcel->readInt32();
mDeviceId = parcel->readInt32();
mSource = parcel->readUint32();
- mDisplayId = parcel->readInt32();
+ mDisplayId = ui::LogicalDisplayId{parcel->readInt32()};
std::vector<uint8_t> hmac;
status_t result = parcel->readByteVector(&hmac);
if (result != OK || hmac.size() != 32) {
@@ -830,7 +898,7 @@ status_t MotionEvent::writeToParcel(Parcel* parcel) const {
parcel->writeInt32(mId);
parcel->writeInt32(mDeviceId);
parcel->writeUint32(mSource);
- parcel->writeInt32(mDisplayId);
+ parcel->writeInt32(mDisplayId.val());
std::vector<uint8_t> hmac(mHmac.begin(), mHmac.end());
parcel->writeByteVector(hmac);
parcel->writeInt32(mAction);
@@ -874,7 +942,6 @@ status_t MotionEvent::writeToParcel(Parcel* parcel) const {
}
return OK;
}
-#endif
bool MotionEvent::isTouchEvent(uint32_t source, int32_t action) {
if (isFromSource(source, AINPUT_SOURCE_CLASS_POINTER)) {
@@ -934,6 +1001,46 @@ std::string MotionEvent::actionToString(int32_t action) {
return android::base::StringPrintf("%" PRId32, action);
}
+std::tuple<int32_t, std::vector<PointerProperties>, std::vector<PointerCoords>> MotionEvent::split(
+ int32_t action, int32_t flags, int32_t historySize,
+ const std::vector<PointerProperties>& pointerProperties,
+ const std::vector<PointerCoords>& pointerCoords,
+ std::bitset<MAX_POINTER_ID + 1> splitPointerIds) {
+ LOG_ALWAYS_FATAL_IF(!splitPointerIds.any());
+ const auto pointerCount = pointerProperties.size();
+ LOG_ALWAYS_FATAL_IF(pointerCoords.size() != (pointerCount * (historySize + 1)));
+ const auto splitCount = splitPointerIds.count();
+
+ std::vector<PointerProperties> splitPointerProperties;
+ std::vector<PointerCoords> splitPointerCoords;
+
+ for (uint32_t i = 0; i < pointerCount; i++) {
+ if (splitPointerIds.test(pointerProperties[i].id)) {
+ splitPointerProperties.emplace_back(pointerProperties[i]);
+ }
+ }
+ for (uint32_t i = 0; i < pointerCoords.size(); i++) {
+ if (splitPointerIds.test(pointerProperties[i % pointerCount].id)) {
+ splitPointerCoords.emplace_back(pointerCoords[i]);
+ }
+ }
+ LOG_ALWAYS_FATAL_IF(splitPointerCoords.size() !=
+ (splitPointerProperties.size() * (historySize + 1)));
+
+ if (CC_UNLIKELY(splitPointerProperties.size() != splitCount)) {
+ // TODO(b/329107108): Promote this to a fatal check once bugs in the caller are resolved.
+ LOG(ERROR) << "Cannot split MotionEvent: Requested splitting " << splitCount
+ << " pointers from the original event, but the original event only contained "
+ << splitPointerProperties.size() << " of those pointers.";
+ }
+
+ // TODO(b/327503168): Verify the splitDownTime here once it is used correctly.
+
+ const auto splitAction = resolveActionForSplitMotionEvent(action, flags, pointerProperties,
+ splitPointerProperties);
+ return {splitAction, splitPointerProperties, splitPointerCoords};
+}
+
// Apply the given transformation to the point without checking whether the entire transform
// should be disregarded altogether for the provided source.
static inline vec2 calculateTransformedXYUnchecked(uint32_t source, const ui::Transform& transform,
@@ -951,7 +1058,7 @@ vec2 MotionEvent::calculateTransformedXY(uint32_t source, const ui::Transform& t
}
// Keep in sync with calculateTransformedCoords.
-float MotionEvent::calculateTransformedAxisValue(int32_t axis, uint32_t source,
+float MotionEvent::calculateTransformedAxisValue(int32_t axis, uint32_t source, int32_t flags,
const ui::Transform& transform,
const PointerCoords& coords) {
if (shouldDisregardTransformation(source)) {
@@ -973,7 +1080,7 @@ float MotionEvent::calculateTransformedAxisValue(int32_t axis, uint32_t source,
}
if (axis == AMOTION_EVENT_AXIS_ORIENTATION) {
- return transformAngle(transform, coords.getAxisValue(AMOTION_EVENT_AXIS_ORIENTATION));
+ return transformOrientation(transform, coords, flags);
}
return coords.getAxisValue(axis);
@@ -981,29 +1088,32 @@ float MotionEvent::calculateTransformedAxisValue(int32_t axis, uint32_t source,
// Keep in sync with calculateTransformedAxisValue. This is an optimization of
// calculateTransformedAxisValue for all PointerCoords axes.
-PointerCoords MotionEvent::calculateTransformedCoords(uint32_t source,
- const ui::Transform& transform,
- const PointerCoords& coords) {
+void MotionEvent::calculateTransformedCoordsInPlace(PointerCoords& coords, uint32_t source,
+ int32_t flags, const ui::Transform& transform) {
if (shouldDisregardTransformation(source)) {
- return coords;
+ return;
}
- PointerCoords out = coords;
const vec2 xy = calculateTransformedXYUnchecked(source, transform, coords.getXYValue());
- out.setAxisValue(AMOTION_EVENT_AXIS_X, xy.x);
- out.setAxisValue(AMOTION_EVENT_AXIS_Y, xy.y);
+ coords.setAxisValue(AMOTION_EVENT_AXIS_X, xy.x);
+ coords.setAxisValue(AMOTION_EVENT_AXIS_Y, xy.y);
const vec2 relativeXy =
transformWithoutTranslation(transform,
{coords.getAxisValue(AMOTION_EVENT_AXIS_RELATIVE_X),
coords.getAxisValue(AMOTION_EVENT_AXIS_RELATIVE_Y)});
- out.setAxisValue(AMOTION_EVENT_AXIS_RELATIVE_X, relativeXy.x);
- out.setAxisValue(AMOTION_EVENT_AXIS_RELATIVE_Y, relativeXy.y);
+ coords.setAxisValue(AMOTION_EVENT_AXIS_RELATIVE_X, relativeXy.x);
+ coords.setAxisValue(AMOTION_EVENT_AXIS_RELATIVE_Y, relativeXy.y);
- out.setAxisValue(AMOTION_EVENT_AXIS_ORIENTATION,
- transformAngle(transform,
- coords.getAxisValue(AMOTION_EVENT_AXIS_ORIENTATION)));
+ coords.setAxisValue(AMOTION_EVENT_AXIS_ORIENTATION,
+ transformOrientation(transform, coords, flags));
+}
+PointerCoords MotionEvent::calculateTransformedCoords(uint32_t source, int32_t flags,
+ const ui::Transform& transform,
+ const PointerCoords& coords) {
+ PointerCoords out = coords;
+ calculateTransformedCoordsInPlace(out, source, flags, transform);
return out;
}
@@ -1090,7 +1200,7 @@ std::ostream& operator<<(std::ostream& out, const MotionEvent& event) {
void FocusEvent::initialize(int32_t id, bool hasFocus) {
InputEvent::initialize(id, ReservedInputDeviceId::VIRTUAL_KEYBOARD_ID, AINPUT_SOURCE_UNKNOWN,
- ADISPLAY_ID_NONE, INVALID_HMAC);
+ ui::LogicalDisplayId::INVALID, INVALID_HMAC);
mHasFocus = hasFocus;
}
@@ -1103,7 +1213,7 @@ void FocusEvent::initialize(const FocusEvent& from) {
void CaptureEvent::initialize(int32_t id, bool pointerCaptureEnabled) {
InputEvent::initialize(id, ReservedInputDeviceId::VIRTUAL_KEYBOARD_ID, AINPUT_SOURCE_UNKNOWN,
- ADISPLAY_ID_NONE, INVALID_HMAC);
+ ui::LogicalDisplayId::INVALID, INVALID_HMAC);
mPointerCaptureEnabled = pointerCaptureEnabled;
}
@@ -1116,7 +1226,7 @@ void CaptureEvent::initialize(const CaptureEvent& from) {
void DragEvent::initialize(int32_t id, float x, float y, bool isExiting) {
InputEvent::initialize(id, ReservedInputDeviceId::VIRTUAL_KEYBOARD_ID, AINPUT_SOURCE_UNKNOWN,
- ADISPLAY_ID_NONE, INVALID_HMAC);
+ ui::LogicalDisplayId::INVALID, INVALID_HMAC);
mIsExiting = isExiting;
mX = x;
mY = y;
@@ -1133,7 +1243,7 @@ void DragEvent::initialize(const DragEvent& from) {
void TouchModeEvent::initialize(int32_t id, bool isInTouchMode) {
InputEvent::initialize(id, ReservedInputDeviceId::VIRTUAL_KEYBOARD_ID, AINPUT_SOURCE_UNKNOWN,
- ADISPLAY_ID_NONE, INVALID_HMAC);
+ ui::LogicalDisplayId::INVALID, INVALID_HMAC);
mIsInTouchMode = isInTouchMode;
}
diff --git a/libs/input/InputConsumer.cpp b/libs/input/InputConsumer.cpp
new file mode 100644
index 0000000000..fcf490d5f9
--- /dev/null
+++ b/libs/input/InputConsumer.cpp
@@ -0,0 +1,962 @@
+/**
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstdint>
+#define LOG_TAG "InputTransport"
+#define ATRACE_TAG ATRACE_TAG_INPUT
+
+#include <errno.h>
+#include <fcntl.h>
+#include <inttypes.h>
+#include <math.h>
+#include <poll.h>
+#include <sys/socket.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <android-base/logging.h>
+#include <android-base/properties.h>
+#include <android-base/stringprintf.h>
+#include <binder/Parcel.h>
+#include <cutils/properties.h>
+#include <ftl/enum.h>
+#include <log/log.h>
+#include <utils/Trace.h>
+
+#include <com_android_input_flags.h>
+#include <input/InputConsumer.h>
+#include <input/PrintTools.h>
+#include <input/TraceTools.h>
+
+namespace input_flags = com::android::input::flags;
+
+namespace android {
+
+namespace {
+
+/**
+ * Log debug messages relating to the consumer end of the transport channel.
+ * Enable this via "adb shell setprop log.tag.InputTransportConsumer DEBUG" (requires restart)
+ */
+
+const bool DEBUG_TRANSPORT_CONSUMER =
+ __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Consumer", ANDROID_LOG_INFO);
+
+const bool IS_DEBUGGABLE_BUILD =
+#if defined(__ANDROID__)
+ android::base::GetBoolProperty("ro.debuggable", false);
+#else
+ true;
+#endif
+
+/**
+ * Log debug messages about touch event resampling.
+ *
+ * Enable this via "adb shell setprop log.tag.InputTransportResampling DEBUG".
+ * This requires a restart on non-debuggable (e.g. user) builds, but should take effect immediately
+ * on debuggable builds (e.g. userdebug).
+ */
+bool debugResampling() {
+ if (!IS_DEBUGGABLE_BUILD) {
+ static const bool DEBUG_TRANSPORT_RESAMPLING =
+ __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Resampling",
+ ANDROID_LOG_INFO);
+ return DEBUG_TRANSPORT_RESAMPLING;
+ }
+ return __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Resampling", ANDROID_LOG_INFO);
+}
+
+void initializeKeyEvent(KeyEvent& event, const InputMessage& msg) {
+ event.initialize(msg.body.key.eventId, msg.body.key.deviceId, msg.body.key.source,
+ ui::LogicalDisplayId{msg.body.key.displayId}, msg.body.key.hmac,
+ msg.body.key.action, msg.body.key.flags, msg.body.key.keyCode,
+ msg.body.key.scanCode, msg.body.key.metaState, msg.body.key.repeatCount,
+ msg.body.key.downTime, msg.body.key.eventTime);
+}
+
+void initializeFocusEvent(FocusEvent& event, const InputMessage& msg) {
+ event.initialize(msg.body.focus.eventId, msg.body.focus.hasFocus);
+}
+
+void initializeCaptureEvent(CaptureEvent& event, const InputMessage& msg) {
+ event.initialize(msg.body.capture.eventId, msg.body.capture.pointerCaptureEnabled);
+}
+
+void initializeDragEvent(DragEvent& event, const InputMessage& msg) {
+ event.initialize(msg.body.drag.eventId, msg.body.drag.x, msg.body.drag.y,
+ msg.body.drag.isExiting);
+}
+
+void initializeMotionEvent(MotionEvent& event, const InputMessage& msg) {
+ uint32_t pointerCount = msg.body.motion.pointerCount;
+ PointerProperties pointerProperties[pointerCount];
+ PointerCoords pointerCoords[pointerCount];
+ for (uint32_t i = 0; i < pointerCount; i++) {
+ pointerProperties[i] = msg.body.motion.pointers[i].properties;
+ pointerCoords[i] = msg.body.motion.pointers[i].coords;
+ }
+
+ ui::Transform transform;
+ transform.set({msg.body.motion.dsdx, msg.body.motion.dtdx, msg.body.motion.tx,
+ msg.body.motion.dtdy, msg.body.motion.dsdy, msg.body.motion.ty, 0, 0, 1});
+ ui::Transform displayTransform;
+ displayTransform.set({msg.body.motion.dsdxRaw, msg.body.motion.dtdxRaw, msg.body.motion.txRaw,
+ msg.body.motion.dtdyRaw, msg.body.motion.dsdyRaw, msg.body.motion.tyRaw,
+ 0, 0, 1});
+ event.initialize(msg.body.motion.eventId, msg.body.motion.deviceId, msg.body.motion.source,
+ ui::LogicalDisplayId{msg.body.motion.displayId}, msg.body.motion.hmac,
+ msg.body.motion.action, msg.body.motion.actionButton, msg.body.motion.flags,
+ msg.body.motion.edgeFlags, msg.body.motion.metaState,
+ msg.body.motion.buttonState, msg.body.motion.classification, transform,
+ msg.body.motion.xPrecision, msg.body.motion.yPrecision,
+ msg.body.motion.xCursorPosition, msg.body.motion.yCursorPosition,
+ displayTransform, msg.body.motion.downTime, msg.body.motion.eventTime,
+ pointerCount, pointerProperties, pointerCoords);
+}
+
+void addSample(MotionEvent& event, const InputMessage& msg) {
+ uint32_t pointerCount = msg.body.motion.pointerCount;
+ PointerCoords pointerCoords[pointerCount];
+ for (uint32_t i = 0; i < pointerCount; i++) {
+ pointerCoords[i] = msg.body.motion.pointers[i].coords;
+ }
+
+ event.setMetaState(event.getMetaState() | msg.body.motion.metaState);
+ event.addSample(msg.body.motion.eventTime, pointerCoords);
+}
+
+void initializeTouchModeEvent(TouchModeEvent& event, const InputMessage& msg) {
+ event.initialize(msg.body.touchMode.eventId, msg.body.touchMode.isInTouchMode);
+}
+
+// Nanoseconds per milliseconds.
+constexpr nsecs_t NANOS_PER_MS = 1000000;
+
+// Latency added during resampling. A few milliseconds doesn't hurt much but
+// reduces the impact of mispredicted touch positions.
+const std::chrono::duration RESAMPLE_LATENCY = 5ms;
+
+// Minimum time difference between consecutive samples before attempting to resample.
+const nsecs_t RESAMPLE_MIN_DELTA = 2 * NANOS_PER_MS;
+
+// Maximum time difference between consecutive samples before attempting to resample
+// by extrapolation.
+const nsecs_t RESAMPLE_MAX_DELTA = 20 * NANOS_PER_MS;
+
+// Maximum time to predict forward from the last known state, to avoid predicting too
+// far into the future. This time is further bounded by 50% of the last time delta.
+const nsecs_t RESAMPLE_MAX_PREDICTION = 8 * NANOS_PER_MS;
+
+/**
+ * System property for enabling / disabling touch resampling.
+ * Resampling extrapolates / interpolates the reported touch event coordinates to better
+ * align them to the VSYNC signal, thus resulting in smoother scrolling performance.
+ * Resampling is not needed (and should be disabled) on hardware that already
+ * has touch events triggered by VSYNC.
+ * Set to "1" to enable resampling (default).
+ * Set to "0" to disable resampling.
+ * Resampling is enabled by default.
+ */
+const char* PROPERTY_RESAMPLING_ENABLED = "ro.input.resampling";
+
+inline float lerp(float a, float b, float alpha) {
+ return a + alpha * (b - a);
+}
+
+inline bool isPointerEvent(int32_t source) {
+ return (source & AINPUT_SOURCE_CLASS_POINTER) == AINPUT_SOURCE_CLASS_POINTER;
+}
+
+bool shouldResampleTool(ToolType toolType) {
+ return toolType == ToolType::FINGER || toolType == ToolType::MOUSE ||
+ toolType == ToolType::STYLUS || toolType == ToolType::UNKNOWN;
+}
+
+} // namespace
+
+using android::base::Result;
+using android::base::StringPrintf;
+
+// --- InputConsumer ---
+
+InputConsumer::InputConsumer(const std::shared_ptr<InputChannel>& channel)
+ : InputConsumer(channel, isTouchResamplingEnabled()) {}
+
+InputConsumer::InputConsumer(const std::shared_ptr<InputChannel>& channel,
+ bool enableTouchResampling)
+ : mResampleTouch(enableTouchResampling),
+ mChannel(channel),
+ mProcessingTraceTag(StringPrintf("InputConsumer processing on %s (%p)",
+ mChannel->getName().c_str(), this)),
+ mLifetimeTraceTag(StringPrintf("InputConsumer lifetime on %s (%p)",
+ mChannel->getName().c_str(), this)),
+ mLifetimeTraceCookie(
+ static_cast<int32_t>(reinterpret_cast<std::uintptr_t>(this) & 0xFFFFFFFF)),
+ mMsgDeferred(false) {
+ ATRACE_ASYNC_BEGIN(mLifetimeTraceTag.c_str(), /*cookie=*/mLifetimeTraceCookie);
+}
+
+InputConsumer::~InputConsumer() {
+ ATRACE_ASYNC_END(mLifetimeTraceTag.c_str(), /*cookie=*/mLifetimeTraceCookie);
+}
+
+bool InputConsumer::isTouchResamplingEnabled() {
+ return property_get_bool(PROPERTY_RESAMPLING_ENABLED, true);
+}
+
+status_t InputConsumer::consume(InputEventFactoryInterface* factory, bool consumeBatches,
+ nsecs_t frameTime, uint32_t* outSeq, InputEvent** outEvent) {
+ ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
+ "channel '%s' consumer ~ consume: consumeBatches=%s, frameTime=%" PRId64,
+ mChannel->getName().c_str(), toString(consumeBatches), frameTime);
+
+ *outSeq = 0;
+ *outEvent = nullptr;
+
+ // Fetch the next input message.
+ // Loop until an event can be returned or no additional events are received.
+ while (!*outEvent) {
+ if (mMsgDeferred) {
+ // mMsg contains a valid input message from the previous call to consume
+ // that has not yet been processed.
+ mMsgDeferred = false;
+ } else {
+ // Receive a fresh message.
+ status_t result = mChannel->receiveMessage(&mMsg);
+ if (result == OK) {
+ const auto [_, inserted] =
+ mConsumeTimes.emplace(mMsg.header.seq, systemTime(SYSTEM_TIME_MONOTONIC));
+ LOG_ALWAYS_FATAL_IF(!inserted, "Already have a consume time for seq=%" PRIu32,
+ mMsg.header.seq);
+
+ // Trace the event processing timeline - event was just read from the socket
+ ATRACE_ASYNC_BEGIN(mProcessingTraceTag.c_str(), /*cookie=*/mMsg.header.seq);
+ }
+ if (result) {
+ // Consume the next batched event unless batches are being held for later.
+ if (consumeBatches || result != WOULD_BLOCK) {
+ result = consumeBatch(factory, frameTime, outSeq, outEvent);
+ if (*outEvent) {
+ ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
+ "channel '%s' consumer ~ consumed batch event, seq=%u",
+ mChannel->getName().c_str(), *outSeq);
+ break;
+ }
+ }
+ return result;
+ }
+ }
+
+ switch (mMsg.header.type) {
+ case InputMessage::Type::KEY: {
+ KeyEvent* keyEvent = factory->createKeyEvent();
+ if (!keyEvent) return NO_MEMORY;
+
+ initializeKeyEvent(*keyEvent, mMsg);
+ *outSeq = mMsg.header.seq;
+ *outEvent = keyEvent;
+ ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
+ "channel '%s' consumer ~ consumed key event, seq=%u",
+ mChannel->getName().c_str(), *outSeq);
+ break;
+ }
+
+ case InputMessage::Type::MOTION: {
+ ssize_t batchIndex = findBatch(mMsg.body.motion.deviceId, mMsg.body.motion.source);
+ if (batchIndex >= 0) {
+ Batch& batch = mBatches[batchIndex];
+ if (canAddSample(batch, &mMsg)) {
+ batch.samples.push_back(mMsg);
+ ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
+ "channel '%s' consumer ~ appended to batch event",
+ mChannel->getName().c_str());
+ break;
+ } else if (isPointerEvent(mMsg.body.motion.source) &&
+ mMsg.body.motion.action == AMOTION_EVENT_ACTION_CANCEL) {
+ // No need to process events that we are going to cancel anyways
+ const size_t count = batch.samples.size();
+ for (size_t i = 0; i < count; i++) {
+ const InputMessage& msg = batch.samples[i];
+ sendFinishedSignal(msg.header.seq, false);
+ }
+ batch.samples.erase(batch.samples.begin(), batch.samples.begin() + count);
+ mBatches.erase(mBatches.begin() + batchIndex);
+ } else {
+ // We cannot append to the batch in progress, so we need to consume
+ // the previous batch right now and defer the new message until later.
+ mMsgDeferred = true;
+ status_t result = consumeSamples(factory, batch, batch.samples.size(),
+ outSeq, outEvent);
+ mBatches.erase(mBatches.begin() + batchIndex);
+ if (result) {
+ return result;
+ }
+ ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
+ "channel '%s' consumer ~ consumed batch event and "
+ "deferred current event, seq=%u",
+ mChannel->getName().c_str(), *outSeq);
+ break;
+ }
+ }
+
+ // Start a new batch if needed.
+ if (mMsg.body.motion.action == AMOTION_EVENT_ACTION_MOVE ||
+ mMsg.body.motion.action == AMOTION_EVENT_ACTION_HOVER_MOVE) {
+ Batch batch;
+ batch.samples.push_back(mMsg);
+ mBatches.push_back(batch);
+ ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
+ "channel '%s' consumer ~ started batch event",
+ mChannel->getName().c_str());
+ break;
+ }
+
+ MotionEvent* motionEvent = factory->createMotionEvent();
+ if (!motionEvent) return NO_MEMORY;
+
+ updateTouchState(mMsg);
+ initializeMotionEvent(*motionEvent, mMsg);
+ *outSeq = mMsg.header.seq;
+ *outEvent = motionEvent;
+
+ ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
+ "channel '%s' consumer ~ consumed motion event, seq=%u",
+ mChannel->getName().c_str(), *outSeq);
+ break;
+ }
+
+ case InputMessage::Type::FINISHED:
+ case InputMessage::Type::TIMELINE: {
+ LOG(FATAL) << "Consumed a " << ftl::enum_string(mMsg.header.type)
+ << " message, which should never be seen by "
+ "InputConsumer on "
+ << mChannel->getName();
+ break;
+ }
+
+ case InputMessage::Type::FOCUS: {
+ FocusEvent* focusEvent = factory->createFocusEvent();
+ if (!focusEvent) return NO_MEMORY;
+
+ initializeFocusEvent(*focusEvent, mMsg);
+ *outSeq = mMsg.header.seq;
+ *outEvent = focusEvent;
+ break;
+ }
+
+ case InputMessage::Type::CAPTURE: {
+ CaptureEvent* captureEvent = factory->createCaptureEvent();
+ if (!captureEvent) return NO_MEMORY;
+
+ initializeCaptureEvent(*captureEvent, mMsg);
+ *outSeq = mMsg.header.seq;
+ *outEvent = captureEvent;
+ break;
+ }
+
+ case InputMessage::Type::DRAG: {
+ DragEvent* dragEvent = factory->createDragEvent();
+ if (!dragEvent) return NO_MEMORY;
+
+ initializeDragEvent(*dragEvent, mMsg);
+ *outSeq = mMsg.header.seq;
+ *outEvent = dragEvent;
+ break;
+ }
+
+ case InputMessage::Type::TOUCH_MODE: {
+ TouchModeEvent* touchModeEvent = factory->createTouchModeEvent();
+ if (!touchModeEvent) return NO_MEMORY;
+
+ initializeTouchModeEvent(*touchModeEvent, mMsg);
+ *outSeq = mMsg.header.seq;
+ *outEvent = touchModeEvent;
+ break;
+ }
+ }
+ }
+ return OK;
+}
+
+status_t InputConsumer::consumeBatch(InputEventFactoryInterface* factory, nsecs_t frameTime,
+ uint32_t* outSeq, InputEvent** outEvent) {
+ status_t result;
+ for (size_t i = mBatches.size(); i > 0;) {
+ i--;
+ Batch& batch = mBatches[i];
+ if (frameTime < 0) {
+ result = consumeSamples(factory, batch, batch.samples.size(), outSeq, outEvent);
+ mBatches.erase(mBatches.begin() + i);
+ return result;
+ }
+
+ nsecs_t sampleTime = frameTime;
+ if (mResampleTouch) {
+ sampleTime -= std::chrono::nanoseconds(RESAMPLE_LATENCY).count();
+ }
+ ssize_t split = findSampleNoLaterThan(batch, sampleTime);
+ if (split < 0) {
+ continue;
+ }
+
+ result = consumeSamples(factory, batch, split + 1, outSeq, outEvent);
+ const InputMessage* next;
+ if (batch.samples.empty()) {
+ mBatches.erase(mBatches.begin() + i);
+ next = nullptr;
+ } else {
+ next = &batch.samples[0];
+ }
+ if (!result && mResampleTouch) {
+ resampleTouchState(sampleTime, static_cast<MotionEvent*>(*outEvent), next);
+ }
+ return result;
+ }
+
+ return WOULD_BLOCK;
+}
+
+status_t InputConsumer::consumeSamples(InputEventFactoryInterface* factory, Batch& batch,
+ size_t count, uint32_t* outSeq, InputEvent** outEvent) {
+ MotionEvent* motionEvent = factory->createMotionEvent();
+ if (!motionEvent) return NO_MEMORY;
+
+ uint32_t chain = 0;
+ for (size_t i = 0; i < count; i++) {
+ InputMessage& msg = batch.samples[i];
+ updateTouchState(msg);
+ if (i) {
+ SeqChain seqChain;
+ seqChain.seq = msg.header.seq;
+ seqChain.chain = chain;
+ mSeqChains.push_back(seqChain);
+ addSample(*motionEvent, msg);
+ } else {
+ initializeMotionEvent(*motionEvent, msg);
+ }
+ chain = msg.header.seq;
+ }
+ batch.samples.erase(batch.samples.begin(), batch.samples.begin() + count);
+
+ *outSeq = chain;
+ *outEvent = motionEvent;
+ return OK;
+}
+
+void InputConsumer::updateTouchState(InputMessage& msg) {
+ if (!mResampleTouch || !isPointerEvent(msg.body.motion.source)) {
+ return;
+ }
+
+ int32_t deviceId = msg.body.motion.deviceId;
+ int32_t source = msg.body.motion.source;
+
+ // Update the touch state history to incorporate the new input message.
+ // If the message is in the past relative to the most recently produced resampled
+ // touch, then use the resampled time and coordinates instead.
+ switch (msg.body.motion.action & AMOTION_EVENT_ACTION_MASK) {
+ case AMOTION_EVENT_ACTION_DOWN: {
+ ssize_t index = findTouchState(deviceId, source);
+ if (index < 0) {
+ mTouchStates.push_back({});
+ index = mTouchStates.size() - 1;
+ }
+ TouchState& touchState = mTouchStates[index];
+ touchState.initialize(deviceId, source);
+ touchState.addHistory(msg);
+ break;
+ }
+
+ case AMOTION_EVENT_ACTION_MOVE: {
+ ssize_t index = findTouchState(deviceId, source);
+ if (index >= 0) {
+ TouchState& touchState = mTouchStates[index];
+ touchState.addHistory(msg);
+ rewriteMessage(touchState, msg);
+ }
+ break;
+ }
+
+ case AMOTION_EVENT_ACTION_POINTER_DOWN: {
+ ssize_t index = findTouchState(deviceId, source);
+ if (index >= 0) {
+ TouchState& touchState = mTouchStates[index];
+ touchState.lastResample.idBits.clearBit(msg.body.motion.getActionId());
+ rewriteMessage(touchState, msg);
+ }
+ break;
+ }
+
+ case AMOTION_EVENT_ACTION_POINTER_UP: {
+ ssize_t index = findTouchState(deviceId, source);
+ if (index >= 0) {
+ TouchState& touchState = mTouchStates[index];
+ rewriteMessage(touchState, msg);
+ touchState.lastResample.idBits.clearBit(msg.body.motion.getActionId());
+ }
+ break;
+ }
+
+ case AMOTION_EVENT_ACTION_SCROLL: {
+ ssize_t index = findTouchState(deviceId, source);
+ if (index >= 0) {
+ TouchState& touchState = mTouchStates[index];
+ rewriteMessage(touchState, msg);
+ }
+ break;
+ }
+
+ case AMOTION_EVENT_ACTION_UP:
+ case AMOTION_EVENT_ACTION_CANCEL: {
+ ssize_t index = findTouchState(deviceId, source);
+ if (index >= 0) {
+ TouchState& touchState = mTouchStates[index];
+ rewriteMessage(touchState, msg);
+ mTouchStates.erase(mTouchStates.begin() + index);
+ }
+ break;
+ }
+ }
+}
+
+/**
+ * Replace the coordinates in msg with the coordinates in lastResample, if necessary.
+ *
+ * If lastResample is no longer valid for a specific pointer (i.e. the lastResample time
+ * is in the past relative to msg and the past two events do not contain identical coordinates),
+ * then invalidate the lastResample data for that pointer.
+ * If the two past events have identical coordinates, then lastResample data for that pointer will
+ * remain valid, and will be used to replace these coordinates. Thus, if a certain coordinate x0 is
+ * resampled to the new value x1, then x1 will always be used to replace x0 until some new value
+ * not equal to x0 is received.
+ */
+void InputConsumer::rewriteMessage(TouchState& state, InputMessage& msg) {
+ nsecs_t eventTime = msg.body.motion.eventTime;
+ for (uint32_t i = 0; i < msg.body.motion.pointerCount; i++) {
+ uint32_t id = msg.body.motion.pointers[i].properties.id;
+ if (state.lastResample.idBits.hasBit(id)) {
+ if (eventTime < state.lastResample.eventTime ||
+ state.recentCoordinatesAreIdentical(id)) {
+ PointerCoords& msgCoords = msg.body.motion.pointers[i].coords;
+ const PointerCoords& resampleCoords = state.lastResample.getPointerById(id);
+ ALOGD_IF(debugResampling(), "[%d] - rewrite (%0.3f, %0.3f), old (%0.3f, %0.3f)", id,
+ resampleCoords.getX(), resampleCoords.getY(), msgCoords.getX(),
+ msgCoords.getY());
+ msgCoords.setAxisValue(AMOTION_EVENT_AXIS_X, resampleCoords.getX());
+ msgCoords.setAxisValue(AMOTION_EVENT_AXIS_Y, resampleCoords.getY());
+ msgCoords.isResampled = true;
+ } else {
+ state.lastResample.idBits.clearBit(id);
+ }
+ }
+ }
+}
+
+void InputConsumer::resampleTouchState(nsecs_t sampleTime, MotionEvent* event,
+ const InputMessage* next) {
+ if (!mResampleTouch || !(isPointerEvent(event->getSource())) ||
+ event->getAction() != AMOTION_EVENT_ACTION_MOVE) {
+ return;
+ }
+
+ ssize_t index = findTouchState(event->getDeviceId(), event->getSource());
+ if (index < 0) {
+ ALOGD_IF(debugResampling(), "Not resampled, no touch state for device.");
+ return;
+ }
+
+ TouchState& touchState = mTouchStates[index];
+ if (touchState.historySize < 1) {
+ ALOGD_IF(debugResampling(), "Not resampled, no history for device.");
+ return;
+ }
+
+ // Ensure that the current sample has all of the pointers that need to be reported.
+ const History* current = touchState.getHistory(0);
+ size_t pointerCount = event->getPointerCount();
+ for (size_t i = 0; i < pointerCount; i++) {
+ uint32_t id = event->getPointerId(i);
+ if (!current->idBits.hasBit(id)) {
+ ALOGD_IF(debugResampling(), "Not resampled, missing id %d", id);
+ return;
+ }
+ if (!shouldResampleTool(event->getToolType(i))) {
+ ALOGD_IF(debugResampling(),
+ "Not resampled, containing unsupported tool type at pointer %d", id);
+ return;
+ }
+ }
+
+ // Find the data to use for resampling.
+ const History* other;
+ History future;
+ float alpha;
+ if (next) {
+ // Interpolate between current sample and future sample.
+ // So current->eventTime <= sampleTime <= future.eventTime.
+ future.initializeFrom(*next);
+ other = &future;
+ nsecs_t delta = future.eventTime - current->eventTime;
+ if (delta < RESAMPLE_MIN_DELTA) {
+ ALOGD_IF(debugResampling(), "Not resampled, delta time is too small: %" PRId64 " ns.",
+ delta);
+ return;
+ }
+ alpha = float(sampleTime - current->eventTime) / delta;
+ } else if (touchState.historySize >= 2) {
+ // Extrapolate future sample using current sample and past sample.
+ // So other->eventTime <= current->eventTime <= sampleTime.
+ other = touchState.getHistory(1);
+ nsecs_t delta = current->eventTime - other->eventTime;
+ if (delta < RESAMPLE_MIN_DELTA) {
+ ALOGD_IF(debugResampling(), "Not resampled, delta time is too small: %" PRId64 " ns.",
+ delta);
+ return;
+ } else if (delta > RESAMPLE_MAX_DELTA) {
+ ALOGD_IF(debugResampling(), "Not resampled, delta time is too large: %" PRId64 " ns.",
+ delta);
+ return;
+ }
+ nsecs_t maxPredict = current->eventTime + std::min(delta / 2, RESAMPLE_MAX_PREDICTION);
+ if (sampleTime > maxPredict) {
+ ALOGD_IF(debugResampling(),
+ "Sample time is too far in the future, adjusting prediction "
+ "from %" PRId64 " to %" PRId64 " ns.",
+ sampleTime - current->eventTime, maxPredict - current->eventTime);
+ sampleTime = maxPredict;
+ }
+ alpha = float(current->eventTime - sampleTime) / delta;
+ } else {
+ ALOGD_IF(debugResampling(), "Not resampled, insufficient data.");
+ return;
+ }
+
+ if (current->eventTime == sampleTime) {
+ ALOGD_IF(debugResampling(), "Not resampled, 2 events with identical times.");
+ return;
+ }
+
+ for (size_t i = 0; i < pointerCount; i++) {
+ uint32_t id = event->getPointerId(i);
+ if (!other->idBits.hasBit(id)) {
+ ALOGD_IF(debugResampling(), "Not resampled, the other doesn't have pointer id %d.", id);
+ return;
+ }
+ }
+
+ // Resample touch coordinates.
+ History oldLastResample;
+ oldLastResample.initializeFrom(touchState.lastResample);
+ touchState.lastResample.eventTime = sampleTime;
+ touchState.lastResample.idBits.clear();
+ for (size_t i = 0; i < pointerCount; i++) {
+ uint32_t id = event->getPointerId(i);
+ touchState.lastResample.idToIndex[id] = i;
+ touchState.lastResample.idBits.markBit(id);
+ if (oldLastResample.hasPointerId(id) && touchState.recentCoordinatesAreIdentical(id)) {
+ // We maintain the previously resampled value for this pointer (stored in
+ // oldLastResample) when the coordinates for this pointer haven't changed since then.
+ // This way we don't introduce artificial jitter when pointers haven't actually moved.
+ // The isResampled flag isn't cleared as the values don't reflect what the device is
+ // actually reporting.
+
+ // We know here that the coordinates for the pointer haven't changed because we
+ // would've cleared the resampled bit in rewriteMessage if they had. We can't modify
+ // lastResample in place because the mapping from pointer ID to index may have changed.
+ touchState.lastResample.pointers[i] = oldLastResample.getPointerById(id);
+ continue;
+ }
+
+ PointerCoords& resampledCoords = touchState.lastResample.pointers[i];
+ const PointerCoords& currentCoords = current->getPointerById(id);
+ resampledCoords = currentCoords;
+ resampledCoords.isResampled = true;
+ const PointerCoords& otherCoords = other->getPointerById(id);
+ resampledCoords.setAxisValue(AMOTION_EVENT_AXIS_X,
+ lerp(currentCoords.getX(), otherCoords.getX(), alpha));
+ resampledCoords.setAxisValue(AMOTION_EVENT_AXIS_Y,
+ lerp(currentCoords.getY(), otherCoords.getY(), alpha));
+ ALOGD_IF(debugResampling(),
+ "[%d] - out (%0.3f, %0.3f), cur (%0.3f, %0.3f), "
+ "other (%0.3f, %0.3f), alpha %0.3f",
+ id, resampledCoords.getX(), resampledCoords.getY(), currentCoords.getX(),
+ currentCoords.getY(), otherCoords.getX(), otherCoords.getY(), alpha);
+ }
+
+ event->addSample(sampleTime, touchState.lastResample.pointers);
+}
+
+status_t InputConsumer::sendFinishedSignal(uint32_t seq, bool handled) {
+ ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
+ "channel '%s' consumer ~ sendFinishedSignal: seq=%u, handled=%s",
+ mChannel->getName().c_str(), seq, toString(handled));
+
+ if (!seq) {
+ ALOGE("Attempted to send a finished signal with sequence number 0.");
+ return BAD_VALUE;
+ }
+
+ // Send finished signals for the batch sequence chain first.
+ size_t seqChainCount = mSeqChains.size();
+ if (seqChainCount) {
+ uint32_t currentSeq = seq;
+ uint32_t chainSeqs[seqChainCount];
+ size_t chainIndex = 0;
+ for (size_t i = seqChainCount; i > 0;) {
+ i--;
+ const SeqChain& seqChain = mSeqChains[i];
+ if (seqChain.seq == currentSeq) {
+ currentSeq = seqChain.chain;
+ chainSeqs[chainIndex++] = currentSeq;
+ mSeqChains.erase(mSeqChains.begin() + i);
+ }
+ }
+ status_t status = OK;
+ while (!status && chainIndex > 0) {
+ chainIndex--;
+ status = sendUnchainedFinishedSignal(chainSeqs[chainIndex], handled);
+ }
+ if (status) {
+ // An error occurred so at least one signal was not sent, reconstruct the chain.
+ for (;;) {
+ SeqChain seqChain;
+ seqChain.seq = chainIndex != 0 ? chainSeqs[chainIndex - 1] : seq;
+ seqChain.chain = chainSeqs[chainIndex];
+ mSeqChains.push_back(seqChain);
+ if (!chainIndex) break;
+ chainIndex--;
+ }
+ return status;
+ }
+ }
+
+ // Send finished signal for the last message in the batch.
+ return sendUnchainedFinishedSignal(seq, handled);
+}
+
+status_t InputConsumer::sendTimeline(int32_t inputEventId,
+ std::array<nsecs_t, GraphicsTimeline::SIZE> graphicsTimeline) {
+ ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
+ "channel '%s' consumer ~ sendTimeline: inputEventId=%" PRId32
+ ", gpuCompletedTime=%" PRId64 ", presentTime=%" PRId64,
+ mChannel->getName().c_str(), inputEventId,
+ graphicsTimeline[GraphicsTimeline::GPU_COMPLETED_TIME],
+ graphicsTimeline[GraphicsTimeline::PRESENT_TIME]);
+
+ InputMessage msg;
+ msg.header.type = InputMessage::Type::TIMELINE;
+ msg.header.seq = 0;
+ msg.body.timeline.eventId = inputEventId;
+ msg.body.timeline.graphicsTimeline = std::move(graphicsTimeline);
+ return mChannel->sendMessage(&msg);
+}
+
+nsecs_t InputConsumer::getConsumeTime(uint32_t seq) const {
+ auto it = mConsumeTimes.find(seq);
+ // Consume time will be missing if either 'finishInputEvent' is called twice, or if it was
+ // called for the wrong (synthetic?) input event. Either way, it is a bug that should be fixed.
+ LOG_ALWAYS_FATAL_IF(it == mConsumeTimes.end(), "Could not find consume time for seq=%" PRIu32,
+ seq);
+ return it->second;
+}
+
+void InputConsumer::popConsumeTime(uint32_t seq) {
+ mConsumeTimes.erase(seq);
+}
+
+status_t InputConsumer::sendUnchainedFinishedSignal(uint32_t seq, bool handled) {
+ InputMessage msg;
+ msg.header.type = InputMessage::Type::FINISHED;
+ msg.header.seq = seq;
+ msg.body.finished.handled = handled;
+ msg.body.finished.consumeTime = getConsumeTime(seq);
+ status_t result = mChannel->sendMessage(&msg);
+ if (result == OK) {
+ // Remove the consume time if the socket write succeeded. We will not need to ack this
+ // message anymore. If the socket write did not succeed, we will try again and will still
+ // need consume time.
+ popConsumeTime(seq);
+
+ // Trace the event processing timeline - event was just finished
+ ATRACE_ASYNC_END(mProcessingTraceTag.c_str(), /*cookie=*/seq);
+ }
+ return result;
+}
+
+bool InputConsumer::hasPendingBatch() const {
+ return !mBatches.empty();
+}
+
+int32_t InputConsumer::getPendingBatchSource() const {
+ if (mBatches.empty()) {
+ return AINPUT_SOURCE_CLASS_NONE;
+ }
+
+ const Batch& batch = mBatches[0];
+ const InputMessage& head = batch.samples[0];
+ return head.body.motion.source;
+}
+
+bool InputConsumer::probablyHasInput() const {
+ return hasPendingBatch() || mChannel->probablyHasInput();
+}
+
+ssize_t InputConsumer::findBatch(int32_t deviceId, int32_t source) const {
+ for (size_t i = 0; i < mBatches.size(); i++) {
+ const Batch& batch = mBatches[i];
+ const InputMessage& head = batch.samples[0];
+ if (head.body.motion.deviceId == deviceId && head.body.motion.source == source) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+ssize_t InputConsumer::findTouchState(int32_t deviceId, int32_t source) const {
+ for (size_t i = 0; i < mTouchStates.size(); i++) {
+ const TouchState& touchState = mTouchStates[i];
+ if (touchState.deviceId == deviceId && touchState.source == source) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+bool InputConsumer::canAddSample(const Batch& batch, const InputMessage* msg) {
+ const InputMessage& head = batch.samples[0];
+ uint32_t pointerCount = msg->body.motion.pointerCount;
+ if (head.body.motion.pointerCount != pointerCount ||
+ head.body.motion.action != msg->body.motion.action) {
+ return false;
+ }
+ for (size_t i = 0; i < pointerCount; i++) {
+ if (head.body.motion.pointers[i].properties != msg->body.motion.pointers[i].properties) {
+ return false;
+ }
+ }
+ return true;
+}
+
+ssize_t InputConsumer::findSampleNoLaterThan(const Batch& batch, nsecs_t time) {
+ size_t numSamples = batch.samples.size();
+ size_t index = 0;
+ while (index < numSamples && batch.samples[index].body.motion.eventTime <= time) {
+ index += 1;
+ }
+ return ssize_t(index) - 1;
+}
+
+std::string InputConsumer::dump() const {
+ std::string out;
+ out = out + "mResampleTouch = " + toString(mResampleTouch) + "\n";
+ out = out + "mChannel = " + mChannel->getName() + "\n";
+ out = out + "mMsgDeferred: " + toString(mMsgDeferred) + "\n";
+ if (mMsgDeferred) {
+ out = out + "mMsg : " + ftl::enum_string(mMsg.header.type) + "\n";
+ }
+ out += "Batches:\n";
+ for (const Batch& batch : mBatches) {
+ out += " Batch:\n";
+ for (const InputMessage& msg : batch.samples) {
+ out += android::base::StringPrintf(" Message %" PRIu32 ": %s ", msg.header.seq,
+ ftl::enum_string(msg.header.type).c_str());
+ switch (msg.header.type) {
+ case InputMessage::Type::KEY: {
+ out += android::base::StringPrintf("action=%s keycode=%" PRId32,
+ KeyEvent::actionToString(
+ msg.body.key.action),
+ msg.body.key.keyCode);
+ break;
+ }
+ case InputMessage::Type::MOTION: {
+ out = out + "action=" + MotionEvent::actionToString(msg.body.motion.action);
+ for (uint32_t i = 0; i < msg.body.motion.pointerCount; i++) {
+ const float x = msg.body.motion.pointers[i].coords.getX();
+ const float y = msg.body.motion.pointers[i].coords.getY();
+ out += android::base::StringPrintf("\n Pointer %" PRIu32
+ " : x=%.1f y=%.1f",
+ i, x, y);
+ }
+ break;
+ }
+ case InputMessage::Type::FINISHED: {
+ out += android::base::StringPrintf("handled=%s, consumeTime=%" PRId64,
+ toString(msg.body.finished.handled),
+ msg.body.finished.consumeTime);
+ break;
+ }
+ case InputMessage::Type::FOCUS: {
+ out += android::base::StringPrintf("hasFocus=%s",
+ toString(msg.body.focus.hasFocus));
+ break;
+ }
+ case InputMessage::Type::CAPTURE: {
+ out += android::base::StringPrintf("hasCapture=%s",
+ toString(msg.body.capture
+ .pointerCaptureEnabled));
+ break;
+ }
+ case InputMessage::Type::DRAG: {
+ out += android::base::StringPrintf("x=%.1f y=%.1f, isExiting=%s",
+ msg.body.drag.x, msg.body.drag.y,
+ toString(msg.body.drag.isExiting));
+ break;
+ }
+ case InputMessage::Type::TIMELINE: {
+ const nsecs_t gpuCompletedTime =
+ msg.body.timeline
+ .graphicsTimeline[GraphicsTimeline::GPU_COMPLETED_TIME];
+ const nsecs_t presentTime =
+ msg.body.timeline.graphicsTimeline[GraphicsTimeline::PRESENT_TIME];
+ out += android::base::StringPrintf("inputEventId=%" PRId32
+ ", gpuCompletedTime=%" PRId64
+ ", presentTime=%" PRId64,
+ msg.body.timeline.eventId, gpuCompletedTime,
+ presentTime);
+ break;
+ }
+ case InputMessage::Type::TOUCH_MODE: {
+ out += android::base::StringPrintf("isInTouchMode=%s",
+ toString(msg.body.touchMode.isInTouchMode));
+ break;
+ }
+ }
+ out += "\n";
+ }
+ }
+ if (mBatches.empty()) {
+ out += " <empty>\n";
+ }
+ out += "mSeqChains:\n";
+ for (const SeqChain& chain : mSeqChains) {
+ out += android::base::StringPrintf(" chain: seq = %" PRIu32 " chain=%" PRIu32, chain.seq,
+ chain.chain);
+ }
+ if (mSeqChains.empty()) {
+ out += " <empty>\n";
+ }
+ out += "mConsumeTimes:\n";
+ for (const auto& [seq, consumeTime] : mConsumeTimes) {
+ out += android::base::StringPrintf(" seq = %" PRIu32 " consumeTime = %" PRId64, seq,
+ consumeTime);
+ }
+ if (mConsumeTimes.empty()) {
+ out += " <empty>\n";
+ }
+ return out;
+}
+
+} // namespace android
diff --git a/libs/input/InputConsumerNoResampling.cpp b/libs/input/InputConsumerNoResampling.cpp
new file mode 100644
index 0000000000..15d992f9f3
--- /dev/null
+++ b/libs/input/InputConsumerNoResampling.cpp
@@ -0,0 +1,539 @@
+/**
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "InputTransport"
+#define ATRACE_TAG ATRACE_TAG_INPUT
+
+#include <inttypes.h>
+
+#include <android-base/logging.h>
+#include <android-base/properties.h>
+#include <android-base/stringprintf.h>
+#include <cutils/properties.h>
+#include <ftl/enum.h>
+#include <utils/Trace.h>
+
+#include <com_android_input_flags.h>
+#include <input/InputConsumerNoResampling.h>
+#include <input/PrintTools.h>
+#include <input/TraceTools.h>
+
+namespace input_flags = com::android::input::flags;
+
+namespace android {
+
+namespace {
+
+/**
+ * Log debug messages relating to the consumer end of the transport channel.
+ * Enable this via "adb shell setprop log.tag.InputTransportConsumer DEBUG" (requires restart)
+ */
+const bool DEBUG_TRANSPORT_CONSUMER =
+ __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Consumer", ANDROID_LOG_INFO);
+
+std::unique_ptr<KeyEvent> createKeyEvent(const InputMessage& msg) {
+ std::unique_ptr<KeyEvent> event = std::make_unique<KeyEvent>();
+ event->initialize(msg.body.key.eventId, msg.body.key.deviceId, msg.body.key.source,
+ ui::LogicalDisplayId{msg.body.key.displayId}, msg.body.key.hmac,
+ msg.body.key.action, msg.body.key.flags, msg.body.key.keyCode,
+ msg.body.key.scanCode, msg.body.key.metaState, msg.body.key.repeatCount,
+ msg.body.key.downTime, msg.body.key.eventTime);
+ return event;
+}
+
+std::unique_ptr<FocusEvent> createFocusEvent(const InputMessage& msg) {
+ std::unique_ptr<FocusEvent> event = std::make_unique<FocusEvent>();
+ event->initialize(msg.body.focus.eventId, msg.body.focus.hasFocus);
+ return event;
+}
+
+std::unique_ptr<CaptureEvent> createCaptureEvent(const InputMessage& msg) {
+ std::unique_ptr<CaptureEvent> event = std::make_unique<CaptureEvent>();
+ event->initialize(msg.body.capture.eventId, msg.body.capture.pointerCaptureEnabled);
+ return event;
+}
+
+std::unique_ptr<DragEvent> createDragEvent(const InputMessage& msg) {
+ std::unique_ptr<DragEvent> event = std::make_unique<DragEvent>();
+ event->initialize(msg.body.drag.eventId, msg.body.drag.x, msg.body.drag.y,
+ msg.body.drag.isExiting);
+ return event;
+}
+
+std::unique_ptr<MotionEvent> createMotionEvent(const InputMessage& msg) {
+ std::unique_ptr<MotionEvent> event = std::make_unique<MotionEvent>();
+ const uint32_t pointerCount = msg.body.motion.pointerCount;
+ std::vector<PointerProperties> pointerProperties;
+ pointerProperties.reserve(pointerCount);
+ std::vector<PointerCoords> pointerCoords;
+ pointerCoords.reserve(pointerCount);
+ for (uint32_t i = 0; i < pointerCount; i++) {
+ pointerProperties.push_back(msg.body.motion.pointers[i].properties);
+ pointerCoords.push_back(msg.body.motion.pointers[i].coords);
+ }
+
+ ui::Transform transform;
+ transform.set({msg.body.motion.dsdx, msg.body.motion.dtdx, msg.body.motion.tx,
+ msg.body.motion.dtdy, msg.body.motion.dsdy, msg.body.motion.ty, 0, 0, 1});
+ ui::Transform displayTransform;
+ displayTransform.set({msg.body.motion.dsdxRaw, msg.body.motion.dtdxRaw, msg.body.motion.txRaw,
+ msg.body.motion.dtdyRaw, msg.body.motion.dsdyRaw, msg.body.motion.tyRaw,
+ 0, 0, 1});
+ event->initialize(msg.body.motion.eventId, msg.body.motion.deviceId, msg.body.motion.source,
+ ui::LogicalDisplayId{msg.body.motion.displayId}, msg.body.motion.hmac,
+ msg.body.motion.action, msg.body.motion.actionButton, msg.body.motion.flags,
+ msg.body.motion.edgeFlags, msg.body.motion.metaState,
+ msg.body.motion.buttonState, msg.body.motion.classification, transform,
+ msg.body.motion.xPrecision, msg.body.motion.yPrecision,
+ msg.body.motion.xCursorPosition, msg.body.motion.yCursorPosition,
+ displayTransform, msg.body.motion.downTime, msg.body.motion.eventTime,
+ pointerCount, pointerProperties.data(), pointerCoords.data());
+ return event;
+}
+
+void addSample(MotionEvent& event, const InputMessage& msg) {
+ uint32_t pointerCount = msg.body.motion.pointerCount;
+ std::vector<PointerCoords> pointerCoords;
+ pointerCoords.reserve(pointerCount);
+ for (uint32_t i = 0; i < pointerCount; i++) {
+ pointerCoords.push_back(msg.body.motion.pointers[i].coords);
+ }
+
+ // TODO(b/329770983): figure out if it's safe to combine events with mismatching metaState
+ event.setMetaState(event.getMetaState() | msg.body.motion.metaState);
+ event.addSample(msg.body.motion.eventTime, pointerCoords.data());
+}
+
+std::unique_ptr<TouchModeEvent> createTouchModeEvent(const InputMessage& msg) {
+ std::unique_ptr<TouchModeEvent> event = std::make_unique<TouchModeEvent>();
+ event->initialize(msg.body.touchMode.eventId, msg.body.touchMode.isInTouchMode);
+ return event;
+}
+
+std::string outboundMessageToString(const InputMessage& outboundMsg) {
+ switch (outboundMsg.header.type) {
+ case InputMessage::Type::FINISHED: {
+ return android::base::StringPrintf(" Finish: seq=%" PRIu32 " handled=%s",
+ outboundMsg.header.seq,
+ toString(outboundMsg.body.finished.handled));
+ }
+ case InputMessage::Type::TIMELINE: {
+ return android::base::
+ StringPrintf(" Timeline: inputEventId=%" PRId32 " gpuCompletedTime=%" PRId64
+ ", presentTime=%" PRId64,
+ outboundMsg.body.timeline.eventId,
+ outboundMsg.body.timeline
+ .graphicsTimeline[GraphicsTimeline::GPU_COMPLETED_TIME],
+ outboundMsg.body.timeline
+ .graphicsTimeline[GraphicsTimeline::PRESENT_TIME]);
+ }
+ default: {
+ LOG(FATAL) << "Outbound message must be FINISHED or TIMELINE, got "
+ << ftl::enum_string(outboundMsg.header.type);
+ return "Unreachable";
+ }
+ }
+}
+
+InputMessage createFinishedMessage(uint32_t seq, bool handled, nsecs_t consumeTime) {
+ InputMessage msg;
+ msg.header.type = InputMessage::Type::FINISHED;
+ msg.header.seq = seq;
+ msg.body.finished.handled = handled;
+ msg.body.finished.consumeTime = consumeTime;
+ return msg;
+}
+
+InputMessage createTimelineMessage(int32_t inputEventId, nsecs_t gpuCompletedTime,
+ nsecs_t presentTime) {
+ InputMessage msg;
+ msg.header.type = InputMessage::Type::TIMELINE;
+ msg.header.seq = 0;
+ msg.body.timeline.eventId = inputEventId;
+ msg.body.timeline.graphicsTimeline[GraphicsTimeline::GPU_COMPLETED_TIME] = gpuCompletedTime;
+ msg.body.timeline.graphicsTimeline[GraphicsTimeline::PRESENT_TIME] = presentTime;
+ return msg;
+}
+
+} // namespace
+
+using android::base::Result;
+using android::base::StringPrintf;
+
+// --- InputConsumerNoResampling ---
+
+InputConsumerNoResampling::InputConsumerNoResampling(const std::shared_ptr<InputChannel>& channel,
+ sp<Looper> looper,
+ InputConsumerCallbacks& callbacks)
+ : mChannel(channel), mLooper(looper), mCallbacks(callbacks), mFdEvents(0) {
+ LOG_ALWAYS_FATAL_IF(mLooper == nullptr);
+ mCallback = sp<LooperEventCallback>::make(
+ std::bind(&InputConsumerNoResampling::handleReceiveCallback, this,
+ std::placeholders::_1));
+ // In the beginning, there are no pending outbounds events; we only care about receiving
+ // incoming data.
+ setFdEvents(ALOOPER_EVENT_INPUT);
+}
+
+InputConsumerNoResampling::~InputConsumerNoResampling() {
+ ensureCalledOnLooperThread(__func__);
+ consumeBatchedInputEvents(std::nullopt);
+ while (!mOutboundQueue.empty()) {
+ processOutboundEvents();
+ // This is our last chance to ack the events. If we don't ack them here, we will get an ANR,
+ // so keep trying to send the events as long as they are present in the queue.
+ }
+ setFdEvents(0);
+}
+
+int InputConsumerNoResampling::handleReceiveCallback(int events) {
+ // Allowed return values of this function as documented in LooperCallback::handleEvent
+ constexpr int REMOVE_CALLBACK = 0;
+ constexpr int KEEP_CALLBACK = 1;
+
+ if (events & (ALOOPER_EVENT_ERROR | ALOOPER_EVENT_HANGUP)) {
+ // This error typically occurs when the publisher has closed the input channel
+ // as part of removing a window or finishing an IME session, in which case
+ // the consumer will soon be disposed as well.
+ if (DEBUG_TRANSPORT_CONSUMER) {
+ LOG(INFO) << "The channel was hung up or an error occurred: " << mChannel->getName();
+ }
+ return REMOVE_CALLBACK;
+ }
+
+ int handledEvents = 0;
+ if (events & ALOOPER_EVENT_INPUT) {
+ std::vector<InputMessage> messages = readAllMessages();
+ handleMessages(std::move(messages));
+ handledEvents |= ALOOPER_EVENT_INPUT;
+ }
+
+ if (events & ALOOPER_EVENT_OUTPUT) {
+ processOutboundEvents();
+ handledEvents |= ALOOPER_EVENT_OUTPUT;
+ }
+ if (handledEvents != events) {
+ LOG(FATAL) << "Mismatch: handledEvents=" << handledEvents << ", events=" << events;
+ }
+ return KEEP_CALLBACK;
+}
+
+void InputConsumerNoResampling::processOutboundEvents() {
+ while (!mOutboundQueue.empty()) {
+ const InputMessage& outboundMsg = mOutboundQueue.front();
+
+ const status_t result = mChannel->sendMessage(&outboundMsg);
+ if (result == OK) {
+ if (outboundMsg.header.type == InputMessage::Type::FINISHED) {
+ ATRACE_ASYNC_END("InputConsumer processing", /*cookie=*/outboundMsg.header.seq);
+ }
+ // Successful send. Erase the entry and keep trying to send more
+ mOutboundQueue.pop();
+ continue;
+ }
+
+ // Publisher is busy, try again later. Keep this entry (do not erase)
+ if (result == WOULD_BLOCK) {
+ setFdEvents(ALOOPER_EVENT_INPUT | ALOOPER_EVENT_OUTPUT);
+ return; // try again later
+ }
+
+ // Some other error. Give up
+ LOG(FATAL) << "Failed to send outbound event on channel '" << mChannel->getName()
+ << "'. status=" << statusToString(result) << "(" << result << ")";
+ }
+
+ // The queue is now empty. Tell looper there's no more output to expect.
+ setFdEvents(ALOOPER_EVENT_INPUT);
+}
+
+void InputConsumerNoResampling::finishInputEvent(uint32_t seq, bool handled) {
+ ensureCalledOnLooperThread(__func__);
+ mOutboundQueue.push(createFinishedMessage(seq, handled, popConsumeTime(seq)));
+ // also produce finish events for all batches for this seq (if any)
+ const auto it = mBatchedSequenceNumbers.find(seq);
+ if (it != mBatchedSequenceNumbers.end()) {
+ for (uint32_t subSeq : it->second) {
+ mOutboundQueue.push(createFinishedMessage(subSeq, handled, popConsumeTime(subSeq)));
+ }
+ mBatchedSequenceNumbers.erase(it);
+ }
+ processOutboundEvents();
+}
+
+bool InputConsumerNoResampling::probablyHasInput() const {
+ // Ideally, this would only be allowed to run on the looper thread, and in production, it will.
+ // However, for testing, it's convenient to call this while the looper thread is blocked, so
+ // we do not call ensureCalledOnLooperThread here.
+ return (!mBatches.empty()) || mChannel->probablyHasInput();
+}
+
+void InputConsumerNoResampling::reportTimeline(int32_t inputEventId, nsecs_t gpuCompletedTime,
+ nsecs_t presentTime) {
+ ensureCalledOnLooperThread(__func__);
+ mOutboundQueue.push(createTimelineMessage(inputEventId, gpuCompletedTime, presentTime));
+ processOutboundEvents();
+}
+
+nsecs_t InputConsumerNoResampling::popConsumeTime(uint32_t seq) {
+ auto it = mConsumeTimes.find(seq);
+ // Consume time will be missing if either 'finishInputEvent' is called twice, or if it was
+ // called for the wrong (synthetic?) input event. Either way, it is a bug that should be fixed.
+ LOG_ALWAYS_FATAL_IF(it == mConsumeTimes.end(), "Could not find consume time for seq=%" PRIu32,
+ seq);
+ nsecs_t consumeTime = it->second;
+ mConsumeTimes.erase(it);
+ return consumeTime;
+}
+
+void InputConsumerNoResampling::setFdEvents(int events) {
+ if (mFdEvents != events) {
+ mFdEvents = events;
+ if (events != 0) {
+ mLooper->addFd(mChannel->getFd(), 0, events, mCallback, nullptr);
+ } else {
+ mLooper->removeFd(mChannel->getFd());
+ }
+ }
+}
+
+void InputConsumerNoResampling::handleMessages(std::vector<InputMessage>&& messages) {
+ // TODO(b/297226446) : add resampling
+ for (const InputMessage& msg : messages) {
+ if (msg.header.type == InputMessage::Type::MOTION) {
+ const int32_t action = msg.body.motion.action;
+ const DeviceId deviceId = msg.body.motion.deviceId;
+ const int32_t source = msg.body.motion.source;
+ const bool batchableEvent = (action == AMOTION_EVENT_ACTION_MOVE ||
+ action == AMOTION_EVENT_ACTION_HOVER_MOVE) &&
+ (isFromSource(source, AINPUT_SOURCE_CLASS_POINTER) ||
+ isFromSource(source, AINPUT_SOURCE_CLASS_JOYSTICK));
+ if (batchableEvent) {
+ // add it to batch
+ mBatches[deviceId].emplace(msg);
+ } else {
+ // consume all pending batches for this event immediately
+ // TODO(b/329776327): figure out if this could be smarter by limiting the
+ // consumption only to the current device.
+ consumeBatchedInputEvents(std::nullopt);
+ handleMessage(msg);
+ }
+ } else {
+ // Non-motion events shouldn't force the consumption of pending batched events
+ handleMessage(msg);
+ }
+ }
+ // At the end of this, if we still have pending batches, notify the receiver about it.
+
+ // We need to carefully notify the InputConsumerCallbacks about the pending batch. The receiver
+ // could choose to consume all events when notified about the batch. That means that the
+ // "mBatches" variable could change when 'InputConsumerCallbacks::onBatchedInputEventPending' is
+ // invoked. We also can't notify the InputConsumerCallbacks in a while loop until mBatches is
+ // empty, because the receiver could choose to not consume the batch immediately.
+ std::set<int32_t> pendingBatchSources;
+ for (const auto& [_, pendingMessages] : mBatches) {
+ // Assume that all messages for a given device has the same source.
+ pendingBatchSources.insert(pendingMessages.front().body.motion.source);
+ }
+ for (const int32_t source : pendingBatchSources) {
+ const bool sourceStillRemaining =
+ std::any_of(mBatches.begin(), mBatches.end(), [=](const auto& pair) {
+ return pair.second.front().body.motion.source == source;
+ });
+ if (sourceStillRemaining) {
+ mCallbacks.onBatchedInputEventPending(source);
+ }
+ }
+}
+
+std::vector<InputMessage> InputConsumerNoResampling::readAllMessages() {
+ std::vector<InputMessage> messages;
+ while (true) {
+ InputMessage msg;
+ status_t result = mChannel->receiveMessage(&msg);
+ switch (result) {
+ case OK: {
+ const auto [_, inserted] =
+ mConsumeTimes.emplace(msg.header.seq, systemTime(SYSTEM_TIME_MONOTONIC));
+ LOG_ALWAYS_FATAL_IF(!inserted, "Already have a consume time for seq=%" PRIu32,
+ msg.header.seq);
+
+ // Trace the event processing timeline - event was just read from the socket
+ // TODO(b/329777420): distinguish between multiple instances of InputConsumer
+ // in the same process.
+ ATRACE_ASYNC_BEGIN("InputConsumer processing", /*cookie=*/msg.header.seq);
+ messages.push_back(msg);
+ break;
+ }
+ case WOULD_BLOCK: {
+ return messages;
+ }
+ case DEAD_OBJECT: {
+ LOG(FATAL) << "Got a dead object for " << mChannel->getName();
+ break;
+ }
+ case BAD_VALUE: {
+ LOG(FATAL) << "Got a bad value for " << mChannel->getName();
+ break;
+ }
+ default: {
+ LOG(FATAL) << "Unexpected error: " << result;
+ break;
+ }
+ }
+ }
+}
+
+void InputConsumerNoResampling::handleMessage(const InputMessage& msg) const {
+ switch (msg.header.type) {
+ case InputMessage::Type::KEY: {
+ std::unique_ptr<KeyEvent> keyEvent = createKeyEvent(msg);
+ mCallbacks.onKeyEvent(std::move(keyEvent), msg.header.seq);
+ break;
+ }
+
+ case InputMessage::Type::MOTION: {
+ std::unique_ptr<MotionEvent> motionEvent = createMotionEvent(msg);
+ mCallbacks.onMotionEvent(std::move(motionEvent), msg.header.seq);
+ break;
+ }
+
+ case InputMessage::Type::FINISHED:
+ case InputMessage::Type::TIMELINE: {
+ LOG(FATAL) << "Consumed a " << ftl::enum_string(msg.header.type)
+ << " message, which should never be seen by InputConsumer on "
+ << mChannel->getName();
+ break;
+ }
+
+ case InputMessage::Type::FOCUS: {
+ std::unique_ptr<FocusEvent> focusEvent = createFocusEvent(msg);
+ mCallbacks.onFocusEvent(std::move(focusEvent), msg.header.seq);
+ break;
+ }
+
+ case InputMessage::Type::CAPTURE: {
+ std::unique_ptr<CaptureEvent> captureEvent = createCaptureEvent(msg);
+ mCallbacks.onCaptureEvent(std::move(captureEvent), msg.header.seq);
+ break;
+ }
+
+ case InputMessage::Type::DRAG: {
+ std::unique_ptr<DragEvent> dragEvent = createDragEvent(msg);
+ mCallbacks.onDragEvent(std::move(dragEvent), msg.header.seq);
+ break;
+ }
+
+ case InputMessage::Type::TOUCH_MODE: {
+ std::unique_ptr<TouchModeEvent> touchModeEvent = createTouchModeEvent(msg);
+ mCallbacks.onTouchModeEvent(std::move(touchModeEvent), msg.header.seq);
+ break;
+ }
+ }
+}
+
+bool InputConsumerNoResampling::consumeBatchedInputEvents(
+ std::optional<nsecs_t> requestedFrameTime) {
+ ensureCalledOnLooperThread(__func__);
+ // When batching is not enabled, we want to consume all events. That's equivalent to having an
+ // infinite frameTime.
+ const nsecs_t frameTime = requestedFrameTime.value_or(std::numeric_limits<nsecs_t>::max());
+ bool producedEvents = false;
+ for (auto& [deviceId, messages] : mBatches) {
+ std::unique_ptr<MotionEvent> motion;
+ std::optional<uint32_t> firstSeqForBatch;
+ std::vector<uint32_t> sequences;
+ while (!messages.empty()) {
+ const InputMessage& msg = messages.front();
+ if (msg.body.motion.eventTime > frameTime) {
+ break;
+ }
+ if (motion == nullptr) {
+ motion = createMotionEvent(msg);
+ firstSeqForBatch = msg.header.seq;
+ const auto [_, inserted] = mBatchedSequenceNumbers.insert({*firstSeqForBatch, {}});
+ if (!inserted) {
+ LOG(FATAL) << "The sequence " << msg.header.seq << " was already present!";
+ }
+ } else {
+ addSample(*motion, msg);
+ mBatchedSequenceNumbers[*firstSeqForBatch].push_back(msg.header.seq);
+ }
+ messages.pop();
+ }
+ if (motion != nullptr) {
+ LOG_ALWAYS_FATAL_IF(!firstSeqForBatch.has_value());
+ mCallbacks.onMotionEvent(std::move(motion), *firstSeqForBatch);
+ producedEvents = true;
+ } else {
+ // This is OK, it just means that the frameTime is too old (all events that we have
+ // pending are in the future of the frametime). Maybe print a
+ // warning? If there are multiple devices active though, this might be normal and can
+ // just be ignored, unless none of them resulted in any consumption (in that case, this
+ // function would already return "false" so we could just leave it up to the caller).
+ }
+ }
+ std::erase_if(mBatches, [](const auto& pair) { return pair.second.empty(); });
+ return producedEvents;
+}
+
+void InputConsumerNoResampling::ensureCalledOnLooperThread(const char* func) const {
+ sp<Looper> callingThreadLooper = Looper::getForThread();
+ if (callingThreadLooper != mLooper) {
+ LOG(FATAL) << "The function " << func << " can only be called on the looper thread";
+ }
+}
+
+std::string InputConsumerNoResampling::dump() const {
+ ensureCalledOnLooperThread(__func__);
+ std::string out;
+ if (mOutboundQueue.empty()) {
+ out += "mOutboundQueue: <empty>\n";
+ } else {
+ out += "mOutboundQueue:\n";
+ // Make a copy of mOutboundQueue for printing destructively. Unfortunately std::queue
+ // doesn't provide a good way to iterate over the entire container.
+ std::queue<InputMessage> tmpQueue = mOutboundQueue;
+ while (!tmpQueue.empty()) {
+ out += std::string(" ") + outboundMessageToString(tmpQueue.front()) + "\n";
+ tmpQueue.pop();
+ }
+ }
+
+ if (mBatches.empty()) {
+ out += "mBatches: <empty>\n";
+ } else {
+ out += "mBatches:\n";
+ for (const auto& [deviceId, messages] : mBatches) {
+ out += " Device id ";
+ out += std::to_string(deviceId);
+ out += ":\n";
+ // Make a copy of mOutboundQueue for printing destructively. Unfortunately std::queue
+ // doesn't provide a good way to iterate over the entire container.
+ std::queue<InputMessage> tmpQueue = messages;
+ while (!tmpQueue.empty()) {
+ LOG_ALWAYS_FATAL_IF(tmpQueue.front().header.type != InputMessage::Type::MOTION);
+ std::unique_ptr<MotionEvent> motion = createMotionEvent(tmpQueue.front());
+ out += std::string(" ") + streamableToString(*motion) + "\n";
+ tmpQueue.pop();
+ }
+ }
+ }
+
+ return out;
+}
+
+} // namespace android
diff --git a/libs/input/InputDevice.cpp b/libs/input/InputDevice.cpp
index c348833747..9333ab83a6 100644
--- a/libs/input/InputDevice.cpp
+++ b/libs/input/InputDevice.cpp
@@ -23,7 +23,6 @@
#include <android-base/properties.h>
#include <android-base/stringprintf.h>
#include <ftl/enum.h>
-#include <gui/constants.h>
#include <input/InputDevice.h>
#include <input/InputEventLabels.h>
@@ -170,7 +169,7 @@ std::string InputDeviceIdentifier::getCanonicalName() const {
// --- InputDeviceInfo ---
InputDeviceInfo::InputDeviceInfo() {
- initialize(-1, 0, -1, InputDeviceIdentifier(), "", false, false, ADISPLAY_ID_NONE);
+ initialize(-1, 0, -1, InputDeviceIdentifier(), "", false, false, ui::LogicalDisplayId::INVALID);
}
InputDeviceInfo::InputDeviceInfo(const InputDeviceInfo& other)
@@ -187,6 +186,7 @@ InputDeviceInfo::InputDeviceInfo(const InputDeviceInfo& other)
mKeyCharacterMap(other.mKeyCharacterMap),
mUsiVersion(other.mUsiVersion),
mAssociatedDisplayId(other.mAssociatedDisplayId),
+ mEnabled(other.mEnabled),
mHasVibrator(other.mHasVibrator),
mHasBattery(other.mHasBattery),
mHasButtonUnderPad(other.mHasButtonUnderPad),
@@ -201,8 +201,9 @@ InputDeviceInfo::~InputDeviceInfo() {
void InputDeviceInfo::initialize(int32_t id, int32_t generation, int32_t controllerNumber,
const InputDeviceIdentifier& identifier, const std::string& alias,
- bool isExternal, bool hasMic, int32_t associatedDisplayId,
- InputDeviceViewBehavior viewBehavior) {
+ bool isExternal, bool hasMic,
+ ui::LogicalDisplayId associatedDisplayId,
+ InputDeviceViewBehavior viewBehavior, bool enabled) {
mId = id;
mGeneration = generation;
mControllerNumber = controllerNumber;
@@ -213,6 +214,7 @@ void InputDeviceInfo::initialize(int32_t id, int32_t generation, int32_t control
mSources = 0;
mKeyboardType = AINPUT_KEYBOARD_TYPE_NONE;
mAssociatedDisplayId = associatedDisplayId;
+ mEnabled = enabled;
mHasVibrator = false;
mHasBattery = false;
mHasButtonUnderPad = false;
@@ -271,10 +273,7 @@ void InputDeviceInfo::addLightInfo(const InputDeviceLightInfo& info) {
}
void InputDeviceInfo::setKeyboardType(int32_t keyboardType) {
- static_assert(AINPUT_KEYBOARD_TYPE_NONE < AINPUT_KEYBOARD_TYPE_NON_ALPHABETIC);
- static_assert(AINPUT_KEYBOARD_TYPE_NON_ALPHABETIC < AINPUT_KEYBOARD_TYPE_ALPHABETIC);
- // There can be multiple subdevices with different keyboard types, set it to the highest type
- mKeyboardType = std::max(mKeyboardType, keyboardType);
+ mKeyboardType = keyboardType;
}
void InputDeviceInfo::setKeyboardLayoutInfo(KeyboardLayoutInfo layoutInfo) {
diff --git a/libs/input/InputTransport.cpp b/libs/input/InputTransport.cpp
index e49f4eb6f6..47b422857e 100644
--- a/libs/input/InputTransport.cpp
+++ b/libs/input/InputTransport.cpp
@@ -26,10 +26,13 @@
#include <com_android_input_flags.h>
#include <input/InputTransport.h>
+#include <input/PrintTools.h>
#include <input/TraceTools.h>
namespace input_flags = com::android::input::flags;
+namespace android {
+
namespace {
/**
@@ -48,14 +51,6 @@ const bool DEBUG_CHANNEL_MESSAGES =
const bool DEBUG_CHANNEL_LIFECYCLE =
__android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Lifecycle", ANDROID_LOG_INFO);
-/**
- * Log debug messages relating to the consumer end of the transport channel.
- * Enable this via "adb shell setprop log.tag.InputTransportConsumer DEBUG" (requires restart)
- */
-
-const bool DEBUG_TRANSPORT_CONSUMER =
- __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Consumer", ANDROID_LOG_INFO);
-
const bool IS_DEBUGGABLE_BUILD =
#if defined(__ANDROID__)
android::base::GetBoolProperty("ro.debuggable", false);
@@ -78,23 +73,6 @@ bool debugTransportPublisher() {
return __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Publisher", ANDROID_LOG_INFO);
}
-/**
- * Log debug messages about touch event resampling.
- *
- * Enable this via "adb shell setprop log.tag.InputTransportResampling DEBUG".
- * This requires a restart on non-debuggable (e.g. user) builds, but should take effect immediately
- * on debuggable builds (e.g. userdebug).
- */
-bool debugResampling() {
- if (!IS_DEBUGGABLE_BUILD) {
- static const bool DEBUG_TRANSPORT_RESAMPLING =
- __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Resampling",
- ANDROID_LOG_INFO);
- return DEBUG_TRANSPORT_RESAMPLING;
- }
- return __android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "Resampling", ANDROID_LOG_INFO);
-}
-
android::base::unique_fd dupChannelFd(int fd) {
android::base::unique_fd newFd(::dup(fd));
if (!newFd.ok()) {
@@ -110,78 +88,25 @@ android::base::unique_fd dupChannelFd(int fd) {
return newFd;
}
-} // namespace
-
-using android::base::Result;
-using android::base::StringPrintf;
-
-namespace android {
-
// Socket buffer size. The default is typically about 128KB, which is much larger than
// we really need. So we make it smaller. It just needs to be big enough to hold
// a few dozen large multi-finger motion events in the case where an application gets
// behind processing touches.
-static const size_t SOCKET_BUFFER_SIZE = 32 * 1024;
-
-// Nanoseconds per milliseconds.
-static const nsecs_t NANOS_PER_MS = 1000000;
-
-// Latency added during resampling. A few milliseconds doesn't hurt much but
-// reduces the impact of mispredicted touch positions.
-const std::chrono::duration RESAMPLE_LATENCY = 5ms;
-
-// Minimum time difference between consecutive samples before attempting to resample.
-static const nsecs_t RESAMPLE_MIN_DELTA = 2 * NANOS_PER_MS;
-
-// Maximum time difference between consecutive samples before attempting to resample
-// by extrapolation.
-static const nsecs_t RESAMPLE_MAX_DELTA = 20 * NANOS_PER_MS;
-
-// Maximum time to predict forward from the last known state, to avoid predicting too
-// far into the future. This time is further bounded by 50% of the last time delta.
-static const nsecs_t RESAMPLE_MAX_PREDICTION = 8 * NANOS_PER_MS;
-
-/**
- * System property for enabling / disabling touch resampling.
- * Resampling extrapolates / interpolates the reported touch event coordinates to better
- * align them to the VSYNC signal, thus resulting in smoother scrolling performance.
- * Resampling is not needed (and should be disabled) on hardware that already
- * has touch events triggered by VSYNC.
- * Set to "1" to enable resampling (default).
- * Set to "0" to disable resampling.
- * Resampling is enabled by default.
- */
-static const char* PROPERTY_RESAMPLING_ENABLED = "ro.input.resampling";
+constexpr size_t SOCKET_BUFFER_SIZE = 32 * 1024;
/**
* Crash if the events that are getting sent to the InputPublisher are inconsistent.
* Enable this via "adb shell setprop log.tag.InputTransportVerifyEvents DEBUG"
*/
-static bool verifyEvents() {
+bool verifyEvents() {
return input_flags::enable_outbound_event_verification() ||
__android_log_is_loggable(ANDROID_LOG_DEBUG, LOG_TAG "VerifyEvents", ANDROID_LOG_INFO);
}
-template<typename T>
-inline static T min(const T& a, const T& b) {
- return a < b ? a : b;
-}
-
-inline static float lerp(float a, float b, float alpha) {
- return a + alpha * (b - a);
-}
-
-inline static bool isPointerEvent(int32_t source) {
- return (source & AINPUT_SOURCE_CLASS_POINTER) == AINPUT_SOURCE_CLASS_POINTER;
-}
-
-inline static const char* toString(bool value) {
- return value ? "true" : "false";
-}
+} // namespace
-static bool shouldResampleTool(ToolType toolType) {
- return toolType == ToolType::FINGER || toolType == ToolType::UNKNOWN;
-}
+using android::base::Result;
+using android::base::StringPrintf;
// --- InputMessage ---
@@ -462,9 +387,9 @@ status_t InputChannel::openInputChannelPair(const std::string& name,
status_t InputChannel::sendMessage(const InputMessage* msg) {
ATRACE_NAME_IF(ATRACE_ENABLED(),
- StringPrintf("sendMessage(inputChannel=%s, seq=0x%" PRIx32 ", type=0x%" PRIx32
- ")",
- name.c_str(), msg->header.seq, msg->header.type));
+ StringPrintf("sendMessage(inputChannel=%s, seq=0x%" PRIx32 ", type=%s)",
+ name.c_str(), msg->header.seq,
+ ftl::enum_string(msg->header.type).c_str()));
const size_t msgLength = msg->size();
InputMessage cleanMsg;
msg->getSanitizedCopy(&cleanMsg);
@@ -533,9 +458,10 @@ status_t InputChannel::receiveMessage(InputMessage* msg) {
ftl::enum_string(msg->header.type).c_str());
if (ATRACE_ENABLED()) {
// Add an additional trace point to include data about the received message.
- std::string message = StringPrintf("receiveMessage(inputChannel=%s, seq=0x%" PRIx32
- ", type=0x%" PRIx32 ")",
- name.c_str(), msg->header.seq, msg->header.type);
+ std::string message =
+ StringPrintf("receiveMessage(inputChannel=%s, seq=0x%" PRIx32 ", type=%s)",
+ name.c_str(), msg->header.seq,
+ ftl::enum_string(msg->header.type).c_str());
ATRACE_NAME(message.c_str());
}
return OK;
@@ -604,7 +530,7 @@ InputPublisher::~InputPublisher() {
}
status_t InputPublisher::publishKeyEvent(uint32_t seq, int32_t eventId, int32_t deviceId,
- int32_t source, int32_t displayId,
+ int32_t source, ui::LogicalDisplayId displayId,
std::array<uint8_t, 32> hmac, int32_t action,
int32_t flags, int32_t keyCode, int32_t scanCode,
int32_t metaState, int32_t repeatCount, nsecs_t downTime,
@@ -632,7 +558,7 @@ status_t InputPublisher::publishKeyEvent(uint32_t seq, int32_t eventId, int32_t
msg.body.key.eventId = eventId;
msg.body.key.deviceId = deviceId;
msg.body.key.source = source;
- msg.body.key.displayId = displayId;
+ msg.body.key.displayId = displayId.val();
msg.body.key.hmac = std::move(hmac);
msg.body.key.action = action;
msg.body.key.flags = flags;
@@ -646,11 +572,11 @@ status_t InputPublisher::publishKeyEvent(uint32_t seq, int32_t eventId, int32_t
}
status_t InputPublisher::publishMotionEvent(
- uint32_t seq, int32_t eventId, int32_t deviceId, int32_t source, int32_t displayId,
- std::array<uint8_t, 32> hmac, int32_t action, int32_t actionButton, int32_t flags,
- int32_t edgeFlags, int32_t metaState, int32_t buttonState,
- MotionClassification classification, const ui::Transform& transform, float xPrecision,
- float yPrecision, float xCursorPosition, float yCursorPosition,
+ uint32_t seq, int32_t eventId, int32_t deviceId, int32_t source,
+ ui::LogicalDisplayId displayId, std::array<uint8_t, 32> hmac, int32_t action,
+ int32_t actionButton, int32_t flags, int32_t edgeFlags, int32_t metaState,
+ int32_t buttonState, MotionClassification classification, const ui::Transform& transform,
+ float xPrecision, float yPrecision, float xCursorPosition, float yCursorPosition,
const ui::Transform& rawTransform, nsecs_t downTime, nsecs_t eventTime,
uint32_t pointerCount, const PointerProperties* pointerProperties,
const PointerCoords* pointerCoords) {
@@ -670,13 +596,13 @@ status_t InputPublisher::publishMotionEvent(
std::string transformString;
transform.dump(transformString, "transform", " ");
ALOGD("channel '%s' publisher ~ %s: seq=%u, id=%d, deviceId=%d, source=%s, "
- "displayId=%" PRId32 ", "
+ "displayId=%s, "
"action=%s, actionButton=0x%08x, flags=0x%x, edgeFlags=0x%x, "
"metaState=0x%x, buttonState=0x%x, classification=%s,"
"xPrecision=%f, yPrecision=%f, downTime=%" PRId64 ", eventTime=%" PRId64 ", "
"pointerCount=%" PRIu32 "\n%s",
mChannel->getName().c_str(), __func__, seq, eventId, deviceId,
- inputEventSourceToString(source).c_str(), displayId,
+ inputEventSourceToString(source).c_str(), displayId.toString().c_str(),
MotionEvent::actionToString(action).c_str(), actionButton, flags, edgeFlags,
metaState, buttonState, motionClassificationToString(classification), xPrecision,
yPrecision, downTime, eventTime, pointerCount, transformString.c_str());
@@ -699,7 +625,7 @@ status_t InputPublisher::publishMotionEvent(
msg.body.motion.eventId = eventId;
msg.body.motion.deviceId = deviceId;
msg.body.motion.source = source;
- msg.body.motion.displayId = displayId;
+ msg.body.motion.displayId = displayId.val();
msg.body.motion.hmac = std::move(hmac);
msg.body.motion.action = action;
msg.body.motion.actionButton = actionButton;
@@ -838,819 +764,4 @@ android::base::Result<InputPublisher::ConsumerResponse> InputPublisher::receiveC
return android::base::Error(UNKNOWN_ERROR);
}
-// --- InputConsumer ---
-
-InputConsumer::InputConsumer(const std::shared_ptr<InputChannel>& channel)
- : InputConsumer(channel, isTouchResamplingEnabled()) {}
-
-InputConsumer::InputConsumer(const std::shared_ptr<InputChannel>& channel,
- bool enableTouchResampling)
- : mResampleTouch(enableTouchResampling), mChannel(channel), mMsgDeferred(false) {}
-
-InputConsumer::~InputConsumer() {
-}
-
-bool InputConsumer::isTouchResamplingEnabled() {
- return property_get_bool(PROPERTY_RESAMPLING_ENABLED, true);
-}
-
-status_t InputConsumer::consume(InputEventFactoryInterface* factory, bool consumeBatches,
- nsecs_t frameTime, uint32_t* outSeq, InputEvent** outEvent) {
- ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
- "channel '%s' consumer ~ consume: consumeBatches=%s, frameTime=%" PRId64,
- mChannel->getName().c_str(), toString(consumeBatches), frameTime);
-
- *outSeq = 0;
- *outEvent = nullptr;
-
- // Fetch the next input message.
- // Loop until an event can be returned or no additional events are received.
- while (!*outEvent) {
- if (mMsgDeferred) {
- // mMsg contains a valid input message from the previous call to consume
- // that has not yet been processed.
- mMsgDeferred = false;
- } else {
- // Receive a fresh message.
- status_t result = mChannel->receiveMessage(&mMsg);
- if (result == OK) {
- const auto [_, inserted] =
- mConsumeTimes.emplace(mMsg.header.seq, systemTime(SYSTEM_TIME_MONOTONIC));
- LOG_ALWAYS_FATAL_IF(!inserted, "Already have a consume time for seq=%" PRIu32,
- mMsg.header.seq);
-
- // Trace the event processing timeline - event was just read from the socket
- ATRACE_ASYNC_BEGIN("InputConsumer processing", /*cookie=*/mMsg.header.seq);
- }
- if (result) {
- // Consume the next batched event unless batches are being held for later.
- if (consumeBatches || result != WOULD_BLOCK) {
- result = consumeBatch(factory, frameTime, outSeq, outEvent);
- if (*outEvent) {
- ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
- "channel '%s' consumer ~ consumed batch event, seq=%u",
- mChannel->getName().c_str(), *outSeq);
- break;
- }
- }
- return result;
- }
- }
-
- switch (mMsg.header.type) {
- case InputMessage::Type::KEY: {
- KeyEvent* keyEvent = factory->createKeyEvent();
- if (!keyEvent) return NO_MEMORY;
-
- initializeKeyEvent(keyEvent, &mMsg);
- *outSeq = mMsg.header.seq;
- *outEvent = keyEvent;
- ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
- "channel '%s' consumer ~ consumed key event, seq=%u",
- mChannel->getName().c_str(), *outSeq);
- break;
- }
-
- case InputMessage::Type::MOTION: {
- ssize_t batchIndex = findBatch(mMsg.body.motion.deviceId, mMsg.body.motion.source);
- if (batchIndex >= 0) {
- Batch& batch = mBatches[batchIndex];
- if (canAddSample(batch, &mMsg)) {
- batch.samples.push_back(mMsg);
- ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
- "channel '%s' consumer ~ appended to batch event",
- mChannel->getName().c_str());
- break;
- } else if (isPointerEvent(mMsg.body.motion.source) &&
- mMsg.body.motion.action == AMOTION_EVENT_ACTION_CANCEL) {
- // No need to process events that we are going to cancel anyways
- const size_t count = batch.samples.size();
- for (size_t i = 0; i < count; i++) {
- const InputMessage& msg = batch.samples[i];
- sendFinishedSignal(msg.header.seq, false);
- }
- batch.samples.erase(batch.samples.begin(), batch.samples.begin() + count);
- mBatches.erase(mBatches.begin() + batchIndex);
- } else {
- // We cannot append to the batch in progress, so we need to consume
- // the previous batch right now and defer the new message until later.
- mMsgDeferred = true;
- status_t result = consumeSamples(factory, batch, batch.samples.size(),
- outSeq, outEvent);
- mBatches.erase(mBatches.begin() + batchIndex);
- if (result) {
- return result;
- }
- ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
- "channel '%s' consumer ~ consumed batch event and "
- "deferred current event, seq=%u",
- mChannel->getName().c_str(), *outSeq);
- break;
- }
- }
-
- // Start a new batch if needed.
- if (mMsg.body.motion.action == AMOTION_EVENT_ACTION_MOVE ||
- mMsg.body.motion.action == AMOTION_EVENT_ACTION_HOVER_MOVE) {
- Batch batch;
- batch.samples.push_back(mMsg);
- mBatches.push_back(batch);
- ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
- "channel '%s' consumer ~ started batch event",
- mChannel->getName().c_str());
- break;
- }
-
- MotionEvent* motionEvent = factory->createMotionEvent();
- if (!motionEvent) return NO_MEMORY;
-
- updateTouchState(mMsg);
- initializeMotionEvent(motionEvent, &mMsg);
- *outSeq = mMsg.header.seq;
- *outEvent = motionEvent;
-
- ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
- "channel '%s' consumer ~ consumed motion event, seq=%u",
- mChannel->getName().c_str(), *outSeq);
- break;
- }
-
- case InputMessage::Type::FINISHED:
- case InputMessage::Type::TIMELINE: {
- LOG_ALWAYS_FATAL("Consumed a %s message, which should never be seen by "
- "InputConsumer!",
- ftl::enum_string(mMsg.header.type).c_str());
- break;
- }
-
- case InputMessage::Type::FOCUS: {
- FocusEvent* focusEvent = factory->createFocusEvent();
- if (!focusEvent) return NO_MEMORY;
-
- initializeFocusEvent(focusEvent, &mMsg);
- *outSeq = mMsg.header.seq;
- *outEvent = focusEvent;
- break;
- }
-
- case InputMessage::Type::CAPTURE: {
- CaptureEvent* captureEvent = factory->createCaptureEvent();
- if (!captureEvent) return NO_MEMORY;
-
- initializeCaptureEvent(captureEvent, &mMsg);
- *outSeq = mMsg.header.seq;
- *outEvent = captureEvent;
- break;
- }
-
- case InputMessage::Type::DRAG: {
- DragEvent* dragEvent = factory->createDragEvent();
- if (!dragEvent) return NO_MEMORY;
-
- initializeDragEvent(dragEvent, &mMsg);
- *outSeq = mMsg.header.seq;
- *outEvent = dragEvent;
- break;
- }
-
- case InputMessage::Type::TOUCH_MODE: {
- TouchModeEvent* touchModeEvent = factory->createTouchModeEvent();
- if (!touchModeEvent) return NO_MEMORY;
-
- initializeTouchModeEvent(touchModeEvent, &mMsg);
- *outSeq = mMsg.header.seq;
- *outEvent = touchModeEvent;
- break;
- }
- }
- }
- return OK;
-}
-
-status_t InputConsumer::consumeBatch(InputEventFactoryInterface* factory,
- nsecs_t frameTime, uint32_t* outSeq, InputEvent** outEvent) {
- status_t result;
- for (size_t i = mBatches.size(); i > 0; ) {
- i--;
- Batch& batch = mBatches[i];
- if (frameTime < 0) {
- result = consumeSamples(factory, batch, batch.samples.size(), outSeq, outEvent);
- mBatches.erase(mBatches.begin() + i);
- return result;
- }
-
- nsecs_t sampleTime = frameTime;
- if (mResampleTouch) {
- sampleTime -= std::chrono::nanoseconds(RESAMPLE_LATENCY).count();
- }
- ssize_t split = findSampleNoLaterThan(batch, sampleTime);
- if (split < 0) {
- continue;
- }
-
- result = consumeSamples(factory, batch, split + 1, outSeq, outEvent);
- const InputMessage* next;
- if (batch.samples.empty()) {
- mBatches.erase(mBatches.begin() + i);
- next = nullptr;
- } else {
- next = &batch.samples[0];
- }
- if (!result && mResampleTouch) {
- resampleTouchState(sampleTime, static_cast<MotionEvent*>(*outEvent), next);
- }
- return result;
- }
-
- return WOULD_BLOCK;
-}
-
-status_t InputConsumer::consumeSamples(InputEventFactoryInterface* factory,
- Batch& batch, size_t count, uint32_t* outSeq, InputEvent** outEvent) {
- MotionEvent* motionEvent = factory->createMotionEvent();
- if (! motionEvent) return NO_MEMORY;
-
- uint32_t chain = 0;
- for (size_t i = 0; i < count; i++) {
- InputMessage& msg = batch.samples[i];
- updateTouchState(msg);
- if (i) {
- SeqChain seqChain;
- seqChain.seq = msg.header.seq;
- seqChain.chain = chain;
- mSeqChains.push_back(seqChain);
- addSample(motionEvent, &msg);
- } else {
- initializeMotionEvent(motionEvent, &msg);
- }
- chain = msg.header.seq;
- }
- batch.samples.erase(batch.samples.begin(), batch.samples.begin() + count);
-
- *outSeq = chain;
- *outEvent = motionEvent;
- return OK;
-}
-
-void InputConsumer::updateTouchState(InputMessage& msg) {
- if (!mResampleTouch || !isPointerEvent(msg.body.motion.source)) {
- return;
- }
-
- int32_t deviceId = msg.body.motion.deviceId;
- int32_t source = msg.body.motion.source;
-
- // Update the touch state history to incorporate the new input message.
- // If the message is in the past relative to the most recently produced resampled
- // touch, then use the resampled time and coordinates instead.
- switch (msg.body.motion.action & AMOTION_EVENT_ACTION_MASK) {
- case AMOTION_EVENT_ACTION_DOWN: {
- ssize_t index = findTouchState(deviceId, source);
- if (index < 0) {
- mTouchStates.push_back({});
- index = mTouchStates.size() - 1;
- }
- TouchState& touchState = mTouchStates[index];
- touchState.initialize(deviceId, source);
- touchState.addHistory(msg);
- break;
- }
-
- case AMOTION_EVENT_ACTION_MOVE: {
- ssize_t index = findTouchState(deviceId, source);
- if (index >= 0) {
- TouchState& touchState = mTouchStates[index];
- touchState.addHistory(msg);
- rewriteMessage(touchState, msg);
- }
- break;
- }
-
- case AMOTION_EVENT_ACTION_POINTER_DOWN: {
- ssize_t index = findTouchState(deviceId, source);
- if (index >= 0) {
- TouchState& touchState = mTouchStates[index];
- touchState.lastResample.idBits.clearBit(msg.body.motion.getActionId());
- rewriteMessage(touchState, msg);
- }
- break;
- }
-
- case AMOTION_EVENT_ACTION_POINTER_UP: {
- ssize_t index = findTouchState(deviceId, source);
- if (index >= 0) {
- TouchState& touchState = mTouchStates[index];
- rewriteMessage(touchState, msg);
- touchState.lastResample.idBits.clearBit(msg.body.motion.getActionId());
- }
- break;
- }
-
- case AMOTION_EVENT_ACTION_SCROLL: {
- ssize_t index = findTouchState(deviceId, source);
- if (index >= 0) {
- TouchState& touchState = mTouchStates[index];
- rewriteMessage(touchState, msg);
- }
- break;
- }
-
- case AMOTION_EVENT_ACTION_UP:
- case AMOTION_EVENT_ACTION_CANCEL: {
- ssize_t index = findTouchState(deviceId, source);
- if (index >= 0) {
- TouchState& touchState = mTouchStates[index];
- rewriteMessage(touchState, msg);
- mTouchStates.erase(mTouchStates.begin() + index);
- }
- break;
- }
- }
-}
-
-/**
- * Replace the coordinates in msg with the coordinates in lastResample, if necessary.
- *
- * If lastResample is no longer valid for a specific pointer (i.e. the lastResample time
- * is in the past relative to msg and the past two events do not contain identical coordinates),
- * then invalidate the lastResample data for that pointer.
- * If the two past events have identical coordinates, then lastResample data for that pointer will
- * remain valid, and will be used to replace these coordinates. Thus, if a certain coordinate x0 is
- * resampled to the new value x1, then x1 will always be used to replace x0 until some new value
- * not equal to x0 is received.
- */
-void InputConsumer::rewriteMessage(TouchState& state, InputMessage& msg) {
- nsecs_t eventTime = msg.body.motion.eventTime;
- for (uint32_t i = 0; i < msg.body.motion.pointerCount; i++) {
- uint32_t id = msg.body.motion.pointers[i].properties.id;
- if (state.lastResample.idBits.hasBit(id)) {
- if (eventTime < state.lastResample.eventTime ||
- state.recentCoordinatesAreIdentical(id)) {
- PointerCoords& msgCoords = msg.body.motion.pointers[i].coords;
- const PointerCoords& resampleCoords = state.lastResample.getPointerById(id);
- ALOGD_IF(debugResampling(), "[%d] - rewrite (%0.3f, %0.3f), old (%0.3f, %0.3f)", id,
- resampleCoords.getX(), resampleCoords.getY(), msgCoords.getX(),
- msgCoords.getY());
- msgCoords.setAxisValue(AMOTION_EVENT_AXIS_X, resampleCoords.getX());
- msgCoords.setAxisValue(AMOTION_EVENT_AXIS_Y, resampleCoords.getY());
- msgCoords.isResampled = true;
- } else {
- state.lastResample.idBits.clearBit(id);
- }
- }
- }
-}
-
-void InputConsumer::resampleTouchState(nsecs_t sampleTime, MotionEvent* event,
- const InputMessage* next) {
- if (!mResampleTouch
- || !(isPointerEvent(event->getSource()))
- || event->getAction() != AMOTION_EVENT_ACTION_MOVE) {
- return;
- }
-
- ssize_t index = findTouchState(event->getDeviceId(), event->getSource());
- if (index < 0) {
- ALOGD_IF(debugResampling(), "Not resampled, no touch state for device.");
- return;
- }
-
- TouchState& touchState = mTouchStates[index];
- if (touchState.historySize < 1) {
- ALOGD_IF(debugResampling(), "Not resampled, no history for device.");
- return;
- }
-
- // Ensure that the current sample has all of the pointers that need to be reported.
- const History* current = touchState.getHistory(0);
- size_t pointerCount = event->getPointerCount();
- for (size_t i = 0; i < pointerCount; i++) {
- uint32_t id = event->getPointerId(i);
- if (!current->idBits.hasBit(id)) {
- ALOGD_IF(debugResampling(), "Not resampled, missing id %d", id);
- return;
- }
- }
-
- // Find the data to use for resampling.
- const History* other;
- History future;
- float alpha;
- if (next) {
- // Interpolate between current sample and future sample.
- // So current->eventTime <= sampleTime <= future.eventTime.
- future.initializeFrom(*next);
- other = &future;
- nsecs_t delta = future.eventTime - current->eventTime;
- if (delta < RESAMPLE_MIN_DELTA) {
- ALOGD_IF(debugResampling(), "Not resampled, delta time is too small: %" PRId64 " ns.",
- delta);
- return;
- }
- alpha = float(sampleTime - current->eventTime) / delta;
- } else if (touchState.historySize >= 2) {
- // Extrapolate future sample using current sample and past sample.
- // So other->eventTime <= current->eventTime <= sampleTime.
- other = touchState.getHistory(1);
- nsecs_t delta = current->eventTime - other->eventTime;
- if (delta < RESAMPLE_MIN_DELTA) {
- ALOGD_IF(debugResampling(), "Not resampled, delta time is too small: %" PRId64 " ns.",
- delta);
- return;
- } else if (delta > RESAMPLE_MAX_DELTA) {
- ALOGD_IF(debugResampling(), "Not resampled, delta time is too large: %" PRId64 " ns.",
- delta);
- return;
- }
- nsecs_t maxPredict = current->eventTime + min(delta / 2, RESAMPLE_MAX_PREDICTION);
- if (sampleTime > maxPredict) {
- ALOGD_IF(debugResampling(),
- "Sample time is too far in the future, adjusting prediction "
- "from %" PRId64 " to %" PRId64 " ns.",
- sampleTime - current->eventTime, maxPredict - current->eventTime);
- sampleTime = maxPredict;
- }
- alpha = float(current->eventTime - sampleTime) / delta;
- } else {
- ALOGD_IF(debugResampling(), "Not resampled, insufficient data.");
- return;
- }
-
- if (current->eventTime == sampleTime) {
- // Prevents having 2 events with identical times and coordinates.
- return;
- }
-
- // Resample touch coordinates.
- History oldLastResample;
- oldLastResample.initializeFrom(touchState.lastResample);
- touchState.lastResample.eventTime = sampleTime;
- touchState.lastResample.idBits.clear();
- for (size_t i = 0; i < pointerCount; i++) {
- uint32_t id = event->getPointerId(i);
- touchState.lastResample.idToIndex[id] = i;
- touchState.lastResample.idBits.markBit(id);
- if (oldLastResample.hasPointerId(id) && touchState.recentCoordinatesAreIdentical(id)) {
- // We maintain the previously resampled value for this pointer (stored in
- // oldLastResample) when the coordinates for this pointer haven't changed since then.
- // This way we don't introduce artificial jitter when pointers haven't actually moved.
- // The isResampled flag isn't cleared as the values don't reflect what the device is
- // actually reporting.
-
- // We know here that the coordinates for the pointer haven't changed because we
- // would've cleared the resampled bit in rewriteMessage if they had. We can't modify
- // lastResample in place becasue the mapping from pointer ID to index may have changed.
- touchState.lastResample.pointers[i] = oldLastResample.getPointerById(id);
- continue;
- }
-
- PointerCoords& resampledCoords = touchState.lastResample.pointers[i];
- const PointerCoords& currentCoords = current->getPointerById(id);
- resampledCoords = currentCoords;
- resampledCoords.isResampled = true;
- if (other->idBits.hasBit(id) && shouldResampleTool(event->getToolType(i))) {
- const PointerCoords& otherCoords = other->getPointerById(id);
- resampledCoords.setAxisValue(AMOTION_EVENT_AXIS_X,
- lerp(currentCoords.getX(), otherCoords.getX(), alpha));
- resampledCoords.setAxisValue(AMOTION_EVENT_AXIS_Y,
- lerp(currentCoords.getY(), otherCoords.getY(), alpha));
- ALOGD_IF(debugResampling(),
- "[%d] - out (%0.3f, %0.3f), cur (%0.3f, %0.3f), "
- "other (%0.3f, %0.3f), alpha %0.3f",
- id, resampledCoords.getX(), resampledCoords.getY(), currentCoords.getX(),
- currentCoords.getY(), otherCoords.getX(), otherCoords.getY(), alpha);
- } else {
- ALOGD_IF(debugResampling(), "[%d] - out (%0.3f, %0.3f), cur (%0.3f, %0.3f)", id,
- resampledCoords.getX(), resampledCoords.getY(), currentCoords.getX(),
- currentCoords.getY());
- }
- }
-
- event->addSample(sampleTime, touchState.lastResample.pointers);
-}
-
-status_t InputConsumer::sendFinishedSignal(uint32_t seq, bool handled) {
- ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
- "channel '%s' consumer ~ sendFinishedSignal: seq=%u, handled=%s",
- mChannel->getName().c_str(), seq, toString(handled));
-
- if (!seq) {
- ALOGE("Attempted to send a finished signal with sequence number 0.");
- return BAD_VALUE;
- }
-
- // Send finished signals for the batch sequence chain first.
- size_t seqChainCount = mSeqChains.size();
- if (seqChainCount) {
- uint32_t currentSeq = seq;
- uint32_t chainSeqs[seqChainCount];
- size_t chainIndex = 0;
- for (size_t i = seqChainCount; i > 0; ) {
- i--;
- const SeqChain& seqChain = mSeqChains[i];
- if (seqChain.seq == currentSeq) {
- currentSeq = seqChain.chain;
- chainSeqs[chainIndex++] = currentSeq;
- mSeqChains.erase(mSeqChains.begin() + i);
- }
- }
- status_t status = OK;
- while (!status && chainIndex > 0) {
- chainIndex--;
- status = sendUnchainedFinishedSignal(chainSeqs[chainIndex], handled);
- }
- if (status) {
- // An error occurred so at least one signal was not sent, reconstruct the chain.
- for (;;) {
- SeqChain seqChain;
- seqChain.seq = chainIndex != 0 ? chainSeqs[chainIndex - 1] : seq;
- seqChain.chain = chainSeqs[chainIndex];
- mSeqChains.push_back(seqChain);
- if (!chainIndex) break;
- chainIndex--;
- }
- return status;
- }
- }
-
- // Send finished signal for the last message in the batch.
- return sendUnchainedFinishedSignal(seq, handled);
-}
-
-status_t InputConsumer::sendTimeline(int32_t inputEventId,
- std::array<nsecs_t, GraphicsTimeline::SIZE> graphicsTimeline) {
- ALOGD_IF(DEBUG_TRANSPORT_CONSUMER,
- "channel '%s' consumer ~ sendTimeline: inputEventId=%" PRId32
- ", gpuCompletedTime=%" PRId64 ", presentTime=%" PRId64,
- mChannel->getName().c_str(), inputEventId,
- graphicsTimeline[GraphicsTimeline::GPU_COMPLETED_TIME],
- graphicsTimeline[GraphicsTimeline::PRESENT_TIME]);
-
- InputMessage msg;
- msg.header.type = InputMessage::Type::TIMELINE;
- msg.header.seq = 0;
- msg.body.timeline.eventId = inputEventId;
- msg.body.timeline.graphicsTimeline = std::move(graphicsTimeline);
- return mChannel->sendMessage(&msg);
-}
-
-nsecs_t InputConsumer::getConsumeTime(uint32_t seq) const {
- auto it = mConsumeTimes.find(seq);
- // Consume time will be missing if either 'finishInputEvent' is called twice, or if it was
- // called for the wrong (synthetic?) input event. Either way, it is a bug that should be fixed.
- LOG_ALWAYS_FATAL_IF(it == mConsumeTimes.end(), "Could not find consume time for seq=%" PRIu32,
- seq);
- return it->second;
-}
-
-void InputConsumer::popConsumeTime(uint32_t seq) {
- mConsumeTimes.erase(seq);
-}
-
-status_t InputConsumer::sendUnchainedFinishedSignal(uint32_t seq, bool handled) {
- InputMessage msg;
- msg.header.type = InputMessage::Type::FINISHED;
- msg.header.seq = seq;
- msg.body.finished.handled = handled;
- msg.body.finished.consumeTime = getConsumeTime(seq);
- status_t result = mChannel->sendMessage(&msg);
- if (result == OK) {
- // Remove the consume time if the socket write succeeded. We will not need to ack this
- // message anymore. If the socket write did not succeed, we will try again and will still
- // need consume time.
- popConsumeTime(seq);
-
- // Trace the event processing timeline - event was just finished
- ATRACE_ASYNC_END("InputConsumer processing", /*cookie=*/seq);
- }
- return result;
-}
-
-bool InputConsumer::hasPendingBatch() const {
- return !mBatches.empty();
-}
-
-int32_t InputConsumer::getPendingBatchSource() const {
- if (mBatches.empty()) {
- return AINPUT_SOURCE_CLASS_NONE;
- }
-
- const Batch& batch = mBatches[0];
- const InputMessage& head = batch.samples[0];
- return head.body.motion.source;
-}
-
-bool InputConsumer::probablyHasInput() const {
- return hasPendingBatch() || mChannel->probablyHasInput();
-}
-
-ssize_t InputConsumer::findBatch(int32_t deviceId, int32_t source) const {
- for (size_t i = 0; i < mBatches.size(); i++) {
- const Batch& batch = mBatches[i];
- const InputMessage& head = batch.samples[0];
- if (head.body.motion.deviceId == deviceId && head.body.motion.source == source) {
- return i;
- }
- }
- return -1;
-}
-
-ssize_t InputConsumer::findTouchState(int32_t deviceId, int32_t source) const {
- for (size_t i = 0; i < mTouchStates.size(); i++) {
- const TouchState& touchState = mTouchStates[i];
- if (touchState.deviceId == deviceId && touchState.source == source) {
- return i;
- }
- }
- return -1;
-}
-
-void InputConsumer::initializeKeyEvent(KeyEvent* event, const InputMessage* msg) {
- event->initialize(msg->body.key.eventId, msg->body.key.deviceId, msg->body.key.source,
- msg->body.key.displayId, msg->body.key.hmac, msg->body.key.action,
- msg->body.key.flags, msg->body.key.keyCode, msg->body.key.scanCode,
- msg->body.key.metaState, msg->body.key.repeatCount, msg->body.key.downTime,
- msg->body.key.eventTime);
-}
-
-void InputConsumer::initializeFocusEvent(FocusEvent* event, const InputMessage* msg) {
- event->initialize(msg->body.focus.eventId, msg->body.focus.hasFocus);
-}
-
-void InputConsumer::initializeCaptureEvent(CaptureEvent* event, const InputMessage* msg) {
- event->initialize(msg->body.capture.eventId, msg->body.capture.pointerCaptureEnabled);
-}
-
-void InputConsumer::initializeDragEvent(DragEvent* event, const InputMessage* msg) {
- event->initialize(msg->body.drag.eventId, msg->body.drag.x, msg->body.drag.y,
- msg->body.drag.isExiting);
-}
-
-void InputConsumer::initializeMotionEvent(MotionEvent* event, const InputMessage* msg) {
- uint32_t pointerCount = msg->body.motion.pointerCount;
- PointerProperties pointerProperties[pointerCount];
- PointerCoords pointerCoords[pointerCount];
- for (uint32_t i = 0; i < pointerCount; i++) {
- pointerProperties[i] = msg->body.motion.pointers[i].properties;
- pointerCoords[i] = msg->body.motion.pointers[i].coords;
- }
-
- ui::Transform transform;
- transform.set({msg->body.motion.dsdx, msg->body.motion.dtdx, msg->body.motion.tx,
- msg->body.motion.dtdy, msg->body.motion.dsdy, msg->body.motion.ty, 0, 0, 1});
- ui::Transform displayTransform;
- displayTransform.set({msg->body.motion.dsdxRaw, msg->body.motion.dtdxRaw,
- msg->body.motion.txRaw, msg->body.motion.dtdyRaw,
- msg->body.motion.dsdyRaw, msg->body.motion.tyRaw, 0, 0, 1});
- event->initialize(msg->body.motion.eventId, msg->body.motion.deviceId, msg->body.motion.source,
- msg->body.motion.displayId, msg->body.motion.hmac, msg->body.motion.action,
- msg->body.motion.actionButton, msg->body.motion.flags,
- msg->body.motion.edgeFlags, msg->body.motion.metaState,
- msg->body.motion.buttonState, msg->body.motion.classification, transform,
- msg->body.motion.xPrecision, msg->body.motion.yPrecision,
- msg->body.motion.xCursorPosition, msg->body.motion.yCursorPosition,
- displayTransform, msg->body.motion.downTime, msg->body.motion.eventTime,
- pointerCount, pointerProperties, pointerCoords);
-}
-
-void InputConsumer::initializeTouchModeEvent(TouchModeEvent* event, const InputMessage* msg) {
- event->initialize(msg->body.touchMode.eventId, msg->body.touchMode.isInTouchMode);
-}
-
-void InputConsumer::addSample(MotionEvent* event, const InputMessage* msg) {
- uint32_t pointerCount = msg->body.motion.pointerCount;
- PointerCoords pointerCoords[pointerCount];
- for (uint32_t i = 0; i < pointerCount; i++) {
- pointerCoords[i] = msg->body.motion.pointers[i].coords;
- }
-
- event->setMetaState(event->getMetaState() | msg->body.motion.metaState);
- event->addSample(msg->body.motion.eventTime, pointerCoords);
-}
-
-bool InputConsumer::canAddSample(const Batch& batch, const InputMessage *msg) {
- const InputMessage& head = batch.samples[0];
- uint32_t pointerCount = msg->body.motion.pointerCount;
- if (head.body.motion.pointerCount != pointerCount
- || head.body.motion.action != msg->body.motion.action) {
- return false;
- }
- for (size_t i = 0; i < pointerCount; i++) {
- if (head.body.motion.pointers[i].properties
- != msg->body.motion.pointers[i].properties) {
- return false;
- }
- }
- return true;
-}
-
-ssize_t InputConsumer::findSampleNoLaterThan(const Batch& batch, nsecs_t time) {
- size_t numSamples = batch.samples.size();
- size_t index = 0;
- while (index < numSamples && batch.samples[index].body.motion.eventTime <= time) {
- index += 1;
- }
- return ssize_t(index) - 1;
-}
-
-std::string InputConsumer::dump() const {
- std::string out;
- out = out + "mResampleTouch = " + toString(mResampleTouch) + "\n";
- out = out + "mChannel = " + mChannel->getName() + "\n";
- out = out + "mMsgDeferred: " + toString(mMsgDeferred) + "\n";
- if (mMsgDeferred) {
- out = out + "mMsg : " + ftl::enum_string(mMsg.header.type) + "\n";
- }
- out += "Batches:\n";
- for (const Batch& batch : mBatches) {
- out += " Batch:\n";
- for (const InputMessage& msg : batch.samples) {
- out += android::base::StringPrintf(" Message %" PRIu32 ": %s ", msg.header.seq,
- ftl::enum_string(msg.header.type).c_str());
- switch (msg.header.type) {
- case InputMessage::Type::KEY: {
- out += android::base::StringPrintf("action=%s keycode=%" PRId32,
- KeyEvent::actionToString(
- msg.body.key.action),
- msg.body.key.keyCode);
- break;
- }
- case InputMessage::Type::MOTION: {
- out = out + "action=" + MotionEvent::actionToString(msg.body.motion.action);
- for (uint32_t i = 0; i < msg.body.motion.pointerCount; i++) {
- const float x = msg.body.motion.pointers[i].coords.getX();
- const float y = msg.body.motion.pointers[i].coords.getY();
- out += android::base::StringPrintf("\n Pointer %" PRIu32
- " : x=%.1f y=%.1f",
- i, x, y);
- }
- break;
- }
- case InputMessage::Type::FINISHED: {
- out += android::base::StringPrintf("handled=%s, consumeTime=%" PRId64,
- toString(msg.body.finished.handled),
- msg.body.finished.consumeTime);
- break;
- }
- case InputMessage::Type::FOCUS: {
- out += android::base::StringPrintf("hasFocus=%s",
- toString(msg.body.focus.hasFocus));
- break;
- }
- case InputMessage::Type::CAPTURE: {
- out += android::base::StringPrintf("hasCapture=%s",
- toString(msg.body.capture
- .pointerCaptureEnabled));
- break;
- }
- case InputMessage::Type::DRAG: {
- out += android::base::StringPrintf("x=%.1f y=%.1f, isExiting=%s",
- msg.body.drag.x, msg.body.drag.y,
- toString(msg.body.drag.isExiting));
- break;
- }
- case InputMessage::Type::TIMELINE: {
- const nsecs_t gpuCompletedTime =
- msg.body.timeline
- .graphicsTimeline[GraphicsTimeline::GPU_COMPLETED_TIME];
- const nsecs_t presentTime =
- msg.body.timeline.graphicsTimeline[GraphicsTimeline::PRESENT_TIME];
- out += android::base::StringPrintf("inputEventId=%" PRId32
- ", gpuCompletedTime=%" PRId64
- ", presentTime=%" PRId64,
- msg.body.timeline.eventId, gpuCompletedTime,
- presentTime);
- break;
- }
- case InputMessage::Type::TOUCH_MODE: {
- out += android::base::StringPrintf("isInTouchMode=%s",
- toString(msg.body.touchMode.isInTouchMode));
- break;
- }
- }
- out += "\n";
- }
- }
- if (mBatches.empty()) {
- out += " <empty>\n";
- }
- out += "mSeqChains:\n";
- for (const SeqChain& chain : mSeqChains) {
- out += android::base::StringPrintf(" chain: seq = %" PRIu32 " chain=%" PRIu32, chain.seq,
- chain.chain);
- }
- if (mSeqChains.empty()) {
- out += " <empty>\n";
- }
- out += "mConsumeTimes:\n";
- for (const auto& [seq, consumeTime] : mConsumeTimes) {
- out += android::base::StringPrintf(" seq = %" PRIu32 " consumeTime = %" PRId64, seq,
- consumeTime);
- }
- if (mConsumeTimes.empty()) {
- out += " <empty>\n";
- }
- return out;
-}
-
} // namespace android
diff --git a/libs/input/KeyCharacterMap.cpp b/libs/input/KeyCharacterMap.cpp
index e2feabcbbe..1cf5612d45 100644
--- a/libs/input/KeyCharacterMap.cpp
+++ b/libs/input/KeyCharacterMap.cpp
@@ -19,16 +19,13 @@
#include <stdlib.h>
#include <string.h>
-#ifdef __linux__
-#include <binder/Parcel.h>
-#endif
#include <android/keycodes.h>
#include <attestation/HmacKeyManager.h>
+#include <binder/Parcel.h>
#include <input/InputEventLabels.h>
#include <input/KeyCharacterMap.h>
#include <input/Keyboard.h>
-#include <gui/constants.h>
#include <utils/Errors.h>
#include <utils/Log.h>
#include <utils/Timers.h>
@@ -496,13 +493,14 @@ bool KeyCharacterMap::findKey(char16_t ch, int32_t* outKeyCode, int32_t* outMeta
return false;
}
-void KeyCharacterMap::addKey(Vector<KeyEvent>& outEvents,
- int32_t deviceId, int32_t keyCode, int32_t metaState, bool down, nsecs_t time) {
+void KeyCharacterMap::addKey(Vector<KeyEvent>& outEvents, int32_t deviceId, int32_t keyCode,
+ int32_t metaState, bool down, nsecs_t time) {
outEvents.push();
KeyEvent& event = outEvents.editTop();
- event.initialize(InputEvent::nextId(), deviceId, AINPUT_SOURCE_KEYBOARD, ADISPLAY_ID_NONE,
- INVALID_HMAC, down ? AKEY_EVENT_ACTION_DOWN : AKEY_EVENT_ACTION_UP, 0, keyCode,
- 0, metaState, 0, time, time);
+ event.initialize(InputEvent::nextId(), deviceId, AINPUT_SOURCE_KEYBOARD,
+ ui::LogicalDisplayId::INVALID, INVALID_HMAC,
+ down ? AKEY_EVENT_ACTION_DOWN : AKEY_EVENT_ACTION_UP, 0, keyCode, 0, metaState,
+ 0, time, time);
}
void KeyCharacterMap::addMetaKeys(Vector<KeyEvent>& outEvents,
@@ -612,7 +610,6 @@ void KeyCharacterMap::addLockedMetaKey(Vector<KeyEvent>& outEvents,
}
}
-#ifdef __linux__
std::unique_ptr<KeyCharacterMap> KeyCharacterMap::readFromParcel(Parcel* parcel) {
if (parcel == nullptr) {
ALOGE("%s: Null parcel", __func__);
@@ -745,7 +742,6 @@ void KeyCharacterMap::writeToParcel(Parcel* parcel) const {
parcel->writeInt32(toAndroidKeyCode);
}
}
-#endif // __linux__
// --- KeyCharacterMap::Parser ---
diff --git a/libs/input/KeyboardClassifier.cpp b/libs/input/KeyboardClassifier.cpp
new file mode 100644
index 0000000000..0c2c7be582
--- /dev/null
+++ b/libs/input/KeyboardClassifier.cpp
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "KeyboardClassifier"
+
+#include <android-base/logging.h>
+#include <com_android_input_flags.h>
+#include <ftl/flags.h>
+#include <input/KeyboardClassifier.h>
+
+#include "input_cxx_bridge.rs.h"
+
+namespace input_flags = com::android::input::flags;
+
+using android::input::RustInputDeviceIdentifier;
+
+namespace android {
+
+KeyboardClassifier::KeyboardClassifier() {
+ if (input_flags::enable_keyboard_classifier()) {
+ mRustClassifier = android::input::keyboardClassifier::create();
+ }
+}
+
+KeyboardType KeyboardClassifier::getKeyboardType(DeviceId deviceId) {
+ if (mRustClassifier) {
+ return static_cast<KeyboardType>(
+ android::input::keyboardClassifier::getKeyboardType(**mRustClassifier, deviceId));
+ } else {
+ auto it = mKeyboardTypeMap.find(deviceId);
+ if (it == mKeyboardTypeMap.end()) {
+ return KeyboardType::NONE;
+ }
+ return it->second;
+ }
+}
+
+// Copied from EventHub.h
+const uint32_t DEVICE_CLASS_KEYBOARD = android::os::IInputConstants::DEVICE_CLASS_KEYBOARD;
+const uint32_t DEVICE_CLASS_ALPHAKEY = android::os::IInputConstants::DEVICE_CLASS_ALPHAKEY;
+
+void KeyboardClassifier::notifyKeyboardChanged(DeviceId deviceId,
+ const InputDeviceIdentifier& identifier,
+ uint32_t deviceClasses) {
+ if (mRustClassifier) {
+ RustInputDeviceIdentifier rustIdentifier;
+ rustIdentifier.name = identifier.name;
+ rustIdentifier.location = identifier.location;
+ rustIdentifier.unique_id = identifier.uniqueId;
+ rustIdentifier.bus = identifier.bus;
+ rustIdentifier.vendor = identifier.vendor;
+ rustIdentifier.product = identifier.product;
+ rustIdentifier.version = identifier.version;
+ rustIdentifier.descriptor = identifier.descriptor;
+ android::input::keyboardClassifier::notifyKeyboardChanged(**mRustClassifier, deviceId,
+ rustIdentifier, deviceClasses);
+ } else {
+ bool isKeyboard = (deviceClasses & DEVICE_CLASS_KEYBOARD) != 0;
+ bool hasAlphabeticKey = (deviceClasses & DEVICE_CLASS_ALPHAKEY) != 0;
+ mKeyboardTypeMap.insert_or_assign(deviceId,
+ isKeyboard ? (hasAlphabeticKey
+ ? KeyboardType::ALPHABETIC
+ : KeyboardType::NON_ALPHABETIC)
+ : KeyboardType::NONE);
+ }
+}
+
+void KeyboardClassifier::processKey(DeviceId deviceId, int32_t evdevCode, uint32_t metaState) {
+ if (mRustClassifier &&
+ !android::input::keyboardClassifier::isFinalized(**mRustClassifier, deviceId)) {
+ android::input::keyboardClassifier::processKey(**mRustClassifier, deviceId, evdevCode,
+ metaState);
+ }
+}
+
+} // namespace android
diff --git a/libs/input/MotionPredictor.cpp b/libs/input/MotionPredictor.cpp
index c4e3ff6dee..5b61d3953f 100644
--- a/libs/input/MotionPredictor.cpp
+++ b/libs/input/MotionPredictor.cpp
@@ -18,21 +18,29 @@
#include <input/MotionPredictor.h>
+#include <algorithm>
+#include <array>
#include <cinttypes>
#include <cmath>
#include <cstddef>
#include <cstdint>
+#include <limits>
+#include <optional>
#include <string>
+#include <utility>
#include <vector>
#include <android-base/logging.h>
#include <android-base/strings.h>
#include <android/input.h>
+#include <com_android_input_flags.h>
#include <attestation/HmacKeyManager.h>
#include <ftl/enum.h>
#include <input/TfLiteMotionPredictor.h>
+namespace input_flags = com::android::input::flags;
+
namespace android {
namespace {
@@ -55,8 +63,73 @@ TfLiteMotionPredictorSample::Point convertPrediction(
return {.x = axisTo.x + x_delta, .y = axisTo.y + y_delta};
}
+float normalizeRange(float x, float min, float max) {
+ const float normalized = (x - min) / (max - min);
+ return std::min(1.0f, std::max(0.0f, normalized));
+}
+
} // namespace
+// --- JerkTracker ---
+
+JerkTracker::JerkTracker(bool normalizedDt) : mNormalizedDt(normalizedDt) {}
+
+void JerkTracker::pushSample(int64_t timestamp, float xPos, float yPos) {
+ mTimestamps.pushBack(timestamp);
+ const int numSamples = mTimestamps.size();
+
+ std::array<float, 4> newXDerivatives;
+ std::array<float, 4> newYDerivatives;
+
+ /**
+ * Diagram showing the calculation of higher order derivatives of sample x3
+ * collected at time=t3.
+ * Terms in parentheses are not stored (and not needed for calculations)
+ * t0 ----- t1 ----- t2 ----- t3
+ * (x0)-----(x1) ----- x2 ----- x3
+ * (x'0) --- x'1 --- x'2
+ * x''0 - x''1
+ * x'''0
+ *
+ * In this example:
+ * x'2 = (x3 - x2) / (t3 - t2)
+ * x''1 = (x'2 - x'1) / (t2 - t1)
+ * x'''0 = (x''1 - x''0) / (t1 - t0)
+ * Therefore, timestamp history is needed to calculate higher order derivatives,
+ * compared to just the last calculated derivative sample.
+ *
+ * If mNormalizedDt = true, then dt = 1 and the division is moot.
+ */
+ for (int i = 0; i < numSamples; ++i) {
+ if (i == 0) {
+ newXDerivatives[i] = xPos;
+ newYDerivatives[i] = yPos;
+ } else {
+ newXDerivatives[i] = newXDerivatives[i - 1] - mXDerivatives[i - 1];
+ newYDerivatives[i] = newYDerivatives[i - 1] - mYDerivatives[i - 1];
+ if (!mNormalizedDt) {
+ const float dt = mTimestamps[numSamples - i] - mTimestamps[numSamples - i - 1];
+ newXDerivatives[i] = newXDerivatives[i] / dt;
+ newYDerivatives[i] = newYDerivatives[i] / dt;
+ }
+ }
+ }
+
+ std::swap(newXDerivatives, mXDerivatives);
+ std::swap(newYDerivatives, mYDerivatives);
+}
+
+void JerkTracker::reset() {
+ mTimestamps.clear();
+}
+
+std::optional<float> JerkTracker::jerkMagnitude() const {
+ if (mTimestamps.size() == mTimestamps.capacity()) {
+ return std::hypot(mXDerivatives[3], mYDerivatives[3]);
+ }
+ return std::nullopt;
+}
+
// --- MotionPredictor ---
MotionPredictor::MotionPredictor(nsecs_t predictionTimestampOffsetNanos,
@@ -103,6 +176,7 @@ android::base::Result<void> MotionPredictor::record(const MotionEvent& event) {
if (action == AMOTION_EVENT_ACTION_UP || action == AMOTION_EVENT_ACTION_CANCEL) {
ALOGD_IF(isDebug(), "End of event stream");
mBuffers->reset();
+ mJerkTracker.reset();
mLastEvent.reset();
return {};
} else if (action != AMOTION_EVENT_ACTION_DOWN && action != AMOTION_EVENT_ACTION_MOVE) {
@@ -137,6 +211,9 @@ android::base::Result<void> MotionPredictor::record(const MotionEvent& event) {
0, i),
.orientation = event.getHistoricalOrientation(0, i),
});
+ mJerkTracker.pushSample(event.getHistoricalEventTime(i),
+ coords->getAxisValue(AMOTION_EVENT_AXIS_X),
+ coords->getAxisValue(AMOTION_EVENT_AXIS_Y));
}
if (!mLastEvent) {
@@ -184,6 +261,17 @@ std::unique_ptr<MotionEvent> MotionPredictor::predict(nsecs_t timestamp) {
int64_t predictionTime = mBuffers->lastTimestamp();
const int64_t futureTime = timestamp + mPredictionTimestampOffsetNanos;
+ const float jerkMagnitude = mJerkTracker.jerkMagnitude().value_or(0);
+ const float fractionKept =
+ 1 - normalizeRange(jerkMagnitude, mModel->config().lowJerk, mModel->config().highJerk);
+ // float to ensure proper division below.
+ const float predictionTimeWindow = futureTime - predictionTime;
+ const int maxNumPredictions = static_cast<int>(
+ std::ceil(predictionTimeWindow / mModel->config().predictionInterval * fractionKept));
+ ALOGD_IF(isDebug(),
+ "jerk (d^3p/normalizedDt^3): %f, fraction of prediction window pruned: %f, max number "
+ "of predictions: %d",
+ jerkMagnitude, 1 - fractionKept, maxNumPredictions);
for (size_t i = 0; i < static_cast<size_t>(predictedR.size()) && predictionTime <= futureTime;
++i) {
if (predictedR[i] < mModel->config().distanceNoiseFloor) {
@@ -197,7 +285,13 @@ std::unique_ptr<MotionEvent> MotionPredictor::predict(nsecs_t timestamp) {
// device starts to speed up, but avoids producing noisy predictions as it slows down.
break;
}
- // TODO(b/266747654): Stop predictions if confidence is < some threshold.
+ if (input_flags::enable_prediction_pruning_via_jerk_thresholding()) {
+ if (i >= static_cast<size_t>(maxNumPredictions)) {
+ break;
+ }
+ }
+ // TODO(b/266747654): Stop predictions if confidence is < some
+ // threshold. Currently predictions are pruned via jerk thresholding.
const TfLiteMotionPredictorSample::Point predictedPoint =
convertPrediction(axisFrom, axisTo, predictedR[i], predictedPhi[i]);
diff --git a/libs/input/MotionPredictorMetricsManager.cpp b/libs/input/MotionPredictorMetricsManager.cpp
index 0412d08181..ccf018e56a 100644
--- a/libs/input/MotionPredictorMetricsManager.cpp
+++ b/libs/input/MotionPredictorMetricsManager.cpp
@@ -21,14 +21,13 @@
#include <algorithm>
#include <android-base/logging.h>
+#ifdef __ANDROID__
+#include <statslog_libinput.h>
+#endif // __ANDROID__
#include "Eigen/Core"
#include "Eigen/Geometry"
-#ifdef __ANDROID__
-#include <statslog_libinput.h>
-#endif
-
namespace android {
namespace {
@@ -48,22 +47,20 @@ inline constexpr float PATH_LENGTH_EPSILON = 0.001;
void MotionPredictorMetricsManager::defaultReportAtomFunction(
const MotionPredictorMetricsManager::AtomFields& atomFields) {
- // Call stats_write logging function only on Android targets (not supported on host).
#ifdef __ANDROID__
- android::stats::libinput::
- stats_write(android::stats::libinput::STYLUS_PREDICTION_METRICS_REPORTED,
- /*stylus_vendor_id=*/0,
- /*stylus_product_id=*/0,
- atomFields.deltaTimeBucketMilliseconds,
- atomFields.alongTrajectoryErrorMeanMillipixels,
- atomFields.alongTrajectoryErrorStdMillipixels,
- atomFields.offTrajectoryRmseMillipixels,
- atomFields.pressureRmseMilliunits,
- atomFields.highVelocityAlongTrajectoryRmse,
- atomFields.highVelocityOffTrajectoryRmse,
- atomFields.scaleInvariantAlongTrajectoryRmse,
- atomFields.scaleInvariantOffTrajectoryRmse);
-#endif
+ android::libinput::stats_write(android::libinput::STYLUS_PREDICTION_METRICS_REPORTED,
+ /*stylus_vendor_id=*/0,
+ /*stylus_product_id=*/0,
+ atomFields.deltaTimeBucketMilliseconds,
+ atomFields.alongTrajectoryErrorMeanMillipixels,
+ atomFields.alongTrajectoryErrorStdMillipixels,
+ atomFields.offTrajectoryRmseMillipixels,
+ atomFields.pressureRmseMilliunits,
+ atomFields.highVelocityAlongTrajectoryRmse,
+ atomFields.highVelocityOffTrajectoryRmse,
+ atomFields.scaleInvariantAlongTrajectoryRmse,
+ atomFields.scaleInvariantOffTrajectoryRmse);
+#endif // __ANDROID__
}
MotionPredictorMetricsManager::MotionPredictorMetricsManager(
@@ -113,7 +110,12 @@ void MotionPredictorMetricsManager::onRecord(const MotionEvent& inputEvent) {
// Adds new predictions to mRecentPredictions and maintains the invariant that elements are
// sorted in ascending order of targetTimestamp.
void MotionPredictorMetricsManager::onPredict(const MotionEvent& predictionEvent) {
- for (size_t i = 0; i < predictionEvent.getHistorySize() + 1; ++i) {
+ const size_t numPredictions = predictionEvent.getHistorySize() + 1;
+ if (numPredictions > mMaxNumPredictions) {
+ LOG(WARNING) << "numPredictions (" << numPredictions << ") > mMaxNumPredictions ("
+ << mMaxNumPredictions << "). Ignoring extra predictions in metrics.";
+ }
+ for (size_t i = 0; (i < numPredictions) && (i < mMaxNumPredictions); ++i) {
// Convert MotionEvent to PredictionPoint.
const PointerCoords* coords =
predictionEvent.getHistoricalRawPointerCoords(/*pointerIndex=*/0, i);
@@ -325,42 +327,44 @@ void MotionPredictorMetricsManager::computeAtomFields() {
mAtomFields[i].highVelocityOffTrajectoryRmse =
static_cast<int>(offTrajectoryRmse * 1000);
}
+ }
- // Scale-invariant errors: reported only for the last time bucket, where the values
- // represent an average across all time buckets.
- if (i + 1 == mMaxNumPredictions) {
- // Compute error averages.
- float alongTrajectoryRmseSum = 0;
- float offTrajectoryRmseSum = 0;
- for (size_t j = 0; j < mAggregatedMetrics.size(); ++j) {
- // If we have general errors (checked above), we should always also have
- // scale-invariant errors.
- LOG_ALWAYS_FATAL_IF(mAggregatedMetrics[j].scaleInvariantErrorsCount == 0,
- "mAggregatedMetrics[%zu].scaleInvariantErrorsCount is 0", j);
-
- LOG_ALWAYS_FATAL_IF(mAggregatedMetrics[j].scaleInvariantAlongTrajectorySse < 0,
- "mAggregatedMetrics[%zu].scaleInvariantAlongTrajectorySse = %f "
- "should not be negative",
- j, mAggregatedMetrics[j].scaleInvariantAlongTrajectorySse);
- alongTrajectoryRmseSum +=
- std::sqrt(mAggregatedMetrics[j].scaleInvariantAlongTrajectorySse /
- mAggregatedMetrics[j].scaleInvariantErrorsCount);
-
- LOG_ALWAYS_FATAL_IF(mAggregatedMetrics[j].scaleInvariantOffTrajectorySse < 0,
- "mAggregatedMetrics[%zu].scaleInvariantOffTrajectorySse = %f "
- "should not be negative",
- j, mAggregatedMetrics[j].scaleInvariantOffTrajectorySse);
- offTrajectoryRmseSum +=
- std::sqrt(mAggregatedMetrics[j].scaleInvariantOffTrajectorySse /
- mAggregatedMetrics[j].scaleInvariantErrorsCount);
+ // Scale-invariant errors: the average scale-invariant error across all time buckets
+ // is reported in the last time bucket.
+ {
+ // Compute error averages.
+ float alongTrajectoryRmseSum = 0;
+ float offTrajectoryRmseSum = 0;
+ int bucket_count = 0;
+ for (size_t j = 0; j < mAggregatedMetrics.size(); ++j) {
+ if (mAggregatedMetrics[j].scaleInvariantErrorsCount == 0) {
+ continue;
}
- const float averageAlongTrajectoryRmse =
- alongTrajectoryRmseSum / mAggregatedMetrics.size();
+ LOG_ALWAYS_FATAL_IF(mAggregatedMetrics[j].scaleInvariantAlongTrajectorySse < 0,
+ "mAggregatedMetrics[%zu].scaleInvariantAlongTrajectorySse = %f "
+ "should not be negative",
+ j, mAggregatedMetrics[j].scaleInvariantAlongTrajectorySse);
+ alongTrajectoryRmseSum +=
+ std::sqrt(mAggregatedMetrics[j].scaleInvariantAlongTrajectorySse /
+ mAggregatedMetrics[j].scaleInvariantErrorsCount);
+
+ LOG_ALWAYS_FATAL_IF(mAggregatedMetrics[j].scaleInvariantOffTrajectorySse < 0,
+ "mAggregatedMetrics[%zu].scaleInvariantOffTrajectorySse = %f "
+ "should not be negative",
+ j, mAggregatedMetrics[j].scaleInvariantOffTrajectorySse);
+ offTrajectoryRmseSum += std::sqrt(mAggregatedMetrics[j].scaleInvariantOffTrajectorySse /
+ mAggregatedMetrics[j].scaleInvariantErrorsCount);
+
+ ++bucket_count;
+ }
+
+ if (bucket_count > 0) {
+ const float averageAlongTrajectoryRmse = alongTrajectoryRmseSum / bucket_count;
mAtomFields.back().scaleInvariantAlongTrajectoryRmse =
static_cast<int>(averageAlongTrajectoryRmse * 1000);
- const float averageOffTrajectoryRmse = offTrajectoryRmseSum / mAggregatedMetrics.size();
+ const float averageOffTrajectoryRmse = offTrajectoryRmseSum / bucket_count;
mAtomFields.back().scaleInvariantOffTrajectoryRmse =
static_cast<int>(averageOffTrajectoryRmse * 1000);
}
diff --git a/libs/input/TfLiteMotionPredictor.cpp b/libs/input/TfLiteMotionPredictor.cpp
index d17476e216..b843a4bbf6 100644
--- a/libs/input/TfLiteMotionPredictor.cpp
+++ b/libs/input/TfLiteMotionPredictor.cpp
@@ -281,6 +281,8 @@ std::unique_ptr<TfLiteMotionPredictorModel> TfLiteMotionPredictorModel::create()
Config config{
.predictionInterval = parseXMLInt64(*configRoot, "prediction-interval"),
.distanceNoiseFloor = parseXMLFloat(*configRoot, "distance-noise-floor"),
+ .lowJerk = parseXMLFloat(*configRoot, "low-jerk"),
+ .highJerk = parseXMLFloat(*configRoot, "high-jerk"),
};
return std::unique_ptr<TfLiteMotionPredictorModel>(
diff --git a/libs/input/android/os/IInputConstants.aidl b/libs/input/android/os/IInputConstants.aidl
index 8f6f95b7d1..a77dfa59fe 100644
--- a/libs/input/android/os/IInputConstants.aidl
+++ b/libs/input/android/os/IInputConstants.aidl
@@ -49,12 +49,130 @@ interface IInputConstants
const int POLICY_FLAG_INJECTED_FROM_ACCESSIBILITY = 0x20000;
/**
+ * This flag indicates that the window that received this motion event is partly
+ * or wholly obscured by another visible window above it and the event directly passed through
+ * the obscured area.
+ *
+ * A security sensitive application can check this flag to identify situations in which
+ * a malicious application may have covered up part of its content for the purpose
+ * of misleading the user or hijacking touches. An appropriate response might be
+ * to drop the suspect touches or to take additional precautions to confirm the user's
+ * actual intent.
+ */
+ const int MOTION_EVENT_FLAG_WINDOW_IS_OBSCURED = 0x1;
+
+ /**
+ * This flag indicates that the window that received this motion event is partly
+ * or wholly obscured by another visible window above it and the event did not directly pass
+ * through the obscured area.
+ *
+ * A security sensitive application can check this flag to identify situations in which
+ * a malicious application may have covered up part of its content for the purpose
+ * of misleading the user or hijacking touches. An appropriate response might be
+ * to drop the suspect touches or to take additional precautions to confirm the user's
+ * actual intent.
+ *
+ * Unlike FLAG_WINDOW_IS_OBSCURED, this is only true if the window that received this event is
+ * obstructed in areas other than the touched location.
+ */
+ const int MOTION_EVENT_FLAG_WINDOW_IS_PARTIALLY_OBSCURED = 0x2;
+
+ /**
+ * This private flag is only set on {@link #ACTION_HOVER_MOVE} events and indicates that
+ * this event will be immediately followed by a {@link #ACTION_HOVER_EXIT}. It is used to
+ * prevent generating redundant {@link #ACTION_HOVER_ENTER} events.
+ * @hide
+ */
+ const int MOTION_EVENT_FLAG_HOVER_EXIT_PENDING = 0x4;
+
+ /**
+ * This flag indicates that the event has been generated by a gesture generator. It
+ * provides a hint to the GestureDetector to not apply any touch slop.
+ *
+ * @hide
+ */
+ const int MOTION_EVENT_FLAG_IS_GENERATED_GESTURE = 0x8;
+
+ /**
+ * This flag is only set for events with {@link #ACTION_POINTER_UP} and {@link #ACTION_CANCEL}.
+ * It indicates that the pointer going up was an unintentional user touch. When FLAG_CANCELED
+ * is set, the typical actions that occur in response for a pointer going up (such as click
+ * handlers, end of drawing) should be aborted. This flag is typically set when the user was
+ * accidentally touching the screen, such as by gripping the device, or placing the palm on the
+ * screen.
+ *
+ * @see #ACTION_POINTER_UP
+ * @see #ACTION_CANCEL
+ */
+ const int INPUT_EVENT_FLAG_CANCELED = 0x20;
+
+ /**
+ * This flag indicates that the event will not cause a focus change if it is directed to an
+ * unfocused window, even if it an {@link #ACTION_DOWN}. This is typically used with pointer
+ * gestures to allow the user to direct gestures to an unfocused window without bringing the
+ * window into focus.
+ * @hide
+ */
+ const int MOTION_EVENT_FLAG_NO_FOCUS_CHANGE = 0x40;
+
+ /**
+ * This flag indicates that the event has a valid value for AXIS_ORIENTATION.
+ *
+ * This is a private flag that is not used in Java.
+ * @hide
+ */
+ const int MOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION = 0x80;
+
+ /**
+ * This flag indicates that the pointers' AXIS_ORIENTATION can be used to precisely determine
+ * the direction in which the tool is pointing. The value of the orientation axis will be in
+ * the range [-pi, pi], which represents a full circle. This is usually supported by devices
+ * like styluses.
+ *
+ * Conversely, AXIS_ORIENTATION cannot be used to tell which direction the tool is pointing
+ * when this flag is not set. In this case, the axis value will have a range of [-pi/2, pi/2],
+ * which represents half a circle. This is usually the case for devices like touchscreens and
+ * touchpads, for which it is difficult to tell which direction along the major axis of the
+ * touch ellipse the finger is pointing.
+ *
+ * This is a private flag that is not used in Java.
+ * @hide
+ */
+ const int MOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION = 0x100;
+
+ /**
* The input event was generated or modified by accessibility service.
* Shared by both KeyEvent and MotionEvent flags, so this value should not overlap with either
* set of flags, including in input/Input.h and in android/input.h.
*/
const int INPUT_EVENT_FLAG_IS_ACCESSIBILITY_EVENT = 0x800;
+ /**
+ * Private flag that indicates when the system has detected that this motion event
+ * may be inconsistent with respect to the sequence of previously delivered motion events,
+ * such as when a pointer move event is sent but the pointer is not down.
+ *
+ * @hide
+ * @see #isTainted
+ * @see #setTainted
+ */
+ const int INPUT_EVENT_FLAG_TAINTED = 0x80000000;
+
+ /**
+ * Private flag indicating that this event was synthesized by the system and should be delivered
+ * to the accessibility focused view first. When being dispatched such an event is not handled
+ * by predecessors of the accessibility focused view and after the event reaches that view the
+ * flag is cleared and normal event dispatch is performed. This ensures that the platform can
+ * click on any view that has accessibility focus which is semantically equivalent to asking the
+ * view to perform a click accessibility action but more generic as views not implementing click
+ * action correctly can still be activated.
+ *
+ * @hide
+ * @see #isTargetAccessibilityFocus()
+ * @see #setTargetAccessibilityFocus(boolean)
+ */
+ const int MOTION_EVENT_FLAG_TARGET_ACCESSIBILITY_FOCUS = 0x40000000;
+
/* The default pointer acceleration value. */
const int DEFAULT_POINTER_ACCELERATION = 3;
@@ -141,4 +259,157 @@ interface IInputConstants
* time to adjust to changes in direction.
*/
const int VELOCITY_TRACKER_STRATEGY_LEGACY = 9;
+
+
+ /*
+ * Input device class: Keyboard
+ * The input device is a keyboard or has buttons.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_KEYBOARD = 0x00000001;
+
+ /*
+ * Input device class: Alphakey
+ * The input device is an alpha-numeric keyboard (not just a dial pad).
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_ALPHAKEY = 0x00000002;
+
+ /*
+ * Input device class: Touch
+ * The input device is a touchscreen or a touchpad (either single-touch or multi-touch).
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_TOUCH = 0x00000004;
+
+ /*
+ * Input device class: Cursor
+ * The input device is a cursor device such as a trackball or mouse.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_CURSOR = 0x00000008;
+
+ /*
+ * Input device class: Multi-touch
+ * The input device is a multi-touch touchscreen or touchpad.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_TOUCH_MT = 0x00000010;
+
+ /*
+ * Input device class: Dpad
+ * The input device is a directional pad (implies keyboard, has DPAD keys).
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_DPAD = 0x00000020;
+
+ /*
+ * Input device class: Gamepad
+ * The input device is a gamepad (implies keyboard, has BUTTON keys).
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_GAMEPAD = 0x00000040;
+
+ /*
+ * Input device class: Switch
+ * The input device has switches.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_SWITCH = 0x00000080;
+
+ /*
+ * Input device class: Joystick
+ * The input device is a joystick (implies gamepad, has joystick absolute axes).
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_JOYSTICK = 0x00000100;
+
+ /*
+ * Input device class: Vibrator
+ * The input device has a vibrator (supports FF_RUMBLE).
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_VIBRATOR = 0x00000200;
+
+ /*
+ * Input device class: Mic
+ * The input device has a microphone.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_MIC = 0x00000400;
+
+ /*
+ * Input device class: External Stylus
+ * The input device is an external stylus (has data we want to fuse with touch data).
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_EXTERNAL_STYLUS = 0x00000800;
+
+ /*
+ * Input device class: Rotary Encoder
+ * The input device has a rotary encoder.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_ROTARY_ENCODER = 0x00001000;
+
+ /*
+ * Input device class: Sensor
+ * The input device has a sensor like accelerometer, gyro, etc.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_SENSOR = 0x00002000;
+
+ /*
+ * Input device class: Battery
+ * The input device has a battery.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_BATTERY = 0x00004000;
+
+ /*
+ * Input device class: Light
+ * The input device has sysfs controllable lights.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_LIGHT = 0x00008000;
+
+ /*
+ * Input device class: Touchpad
+ * The input device is a touchpad, requiring an on-screen cursor.
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_TOUCHPAD = 0x00010000;
+
+ /*
+ * Input device class: Virtual
+ * The input device is virtual (not a real device, not part of UI configuration).
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_VIRTUAL = 0x20000000;
+
+ /*
+ * Input device class: External
+ * The input device is external (not built-in).
+ *
+ * @hide
+ */
+ const int DEVICE_CLASS_EXTERNAL = 0x40000000;
}
diff --git a/libs/input/android/os/InputConfig.aidl b/libs/input/android/os/InputConfig.aidl
index 5d391551c2..da62e03821 100644
--- a/libs/input/android/os/InputConfig.aidl
+++ b/libs/input/android/os/InputConfig.aidl
@@ -157,4 +157,14 @@ enum InputConfig {
* like StatusBar and TaskBar.
*/
GLOBAL_STYLUS_BLOCKS_TOUCH = 1 << 17,
+
+ /**
+ * InputConfig used to indicate that this window is privacy sensitive. This may be used to
+ * redact input interactions from tracing or screen mirroring.
+ *
+ * This must be set on windows that use {@link WindowManager.LayoutParams#FLAG_SECURE},
+ * but it may also be set without setting FLAG_SECURE. The tracing configuration will
+ * determine how these sensitive events are eventually traced.
+ */
+ SENSITIVE_FOR_PRIVACY = 1 << 18,
}
diff --git a/libs/input/android/os/PointerIconType.aidl b/libs/input/android/os/PointerIconType.aidl
new file mode 100644
index 0000000000..f244c62ed6
--- /dev/null
+++ b/libs/input/android/os/PointerIconType.aidl
@@ -0,0 +1,56 @@
+/**
+ * Copyright (c) 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.os;
+
+/**
+ * Represents an icon that can be used as a mouse pointer.
+ * Please look at frameworks/base/core/java/android/view/PointerIcon.java for the detailed
+ * explanation of each constant.
+ * @hide
+ */
+@Backing(type="int")
+enum PointerIconType {
+ CUSTOM = -1,
+ TYPE_NULL = 0,
+ NOT_SPECIFIED = 1,
+ ARROW = 1000,
+ CONTEXT_MENU = 1001,
+ HAND = 1002,
+ HELP = 1003,
+ WAIT = 1004,
+ CELL = 1006,
+ CROSSHAIR = 1007,
+ TEXT = 1008,
+ VERTICAL_TEXT = 1009,
+ ALIAS = 1010,
+ COPY = 1011,
+ NO_DROP = 1012,
+ ALL_SCROLL = 1013,
+ HORIZONTAL_DOUBLE_ARROW = 1014,
+ VERTICAL_DOUBLE_ARROW = 1015,
+ TOP_RIGHT_DOUBLE_ARROW = 1016,
+ TOP_LEFT_DOUBLE_ARROW = 1017,
+ ZOOM_IN = 1018,
+ ZOOM_OUT = 1019,
+ GRAB = 1020,
+ GRABBING = 1021,
+ HANDWRITING = 1022,
+
+ SPOT_HOVER = 2000,
+ SPOT_TOUCH = 2001,
+ SPOT_ANCHOR = 2002,
+}
diff --git a/libs/input/input_flags.aconfig b/libs/input/input_flags.aconfig
index bdec5c33cd..a2192cbdc4 100644
--- a/libs/input/input_flags.aconfig
+++ b/libs/input/input_flags.aconfig
@@ -16,13 +16,6 @@ flag {
}
flag {
- name: "enable_pointer_choreographer"
- namespace: "input"
- description: "Set to true to enable PointerChoreographer: the new pipeline for showing pointer icons"
- bug: "293587049"
-}
-
-flag {
name: "enable_gestures_library_timer_provider"
namespace: "input"
description: "Set to true to enable timer support for the touchpad Gestures library"
@@ -87,6 +80,7 @@ flag {
flag {
name: "override_key_behavior_permission_apis"
+ is_exported: true
namespace: "input"
description: "enable override key behavior permission APIs"
bug: "309018874"
@@ -115,6 +109,7 @@ flag {
flag {
name: "input_device_view_behavior_api"
+ is_exported: true
namespace: "input"
description: "Controls the API to provide InputDevice view behavior."
bug: "246946631"
@@ -126,3 +121,39 @@ flag {
description: "Enable fling scrolling to be stopped by putting a finger on the touchpad again"
bug: "281106755"
}
+
+flag {
+ name: "enable_prediction_pruning_via_jerk_thresholding"
+ namespace: "input"
+ description: "Enable prediction pruning based on jerk thresholds."
+ bug: "266747654"
+ is_fixed_read_only: true
+}
+
+flag {
+ name: "device_associations"
+ namespace: "input"
+ description: "Binds InputDevice name and InputDevice description against display unique id."
+ bug: "324075859"
+}
+
+flag {
+ name: "enable_multi_device_same_window_stream"
+ namespace: "input"
+ description: "Allow multiple input devices to be active in the same window simultaneously"
+ bug: "330752824"
+}
+
+flag {
+ name: "hide_pointer_indicators_for_secure_windows"
+ namespace: "input"
+ description: "Hide touch and pointer indicators if a secure window is present on display"
+ bug: "325252005"
+}
+
+flag {
+ name: "enable_keyboard_classifier"
+ namespace: "input"
+ description: "Keyboard classifier that classifies all keyboards into alphabetic or non-alphabetic"
+ bug: "263559234"
+}
diff --git a/libs/input/rust/input.rs b/libs/input/rust/input.rs
index 705c959d04..564d94dbb4 100644
--- a/libs/input/rust/input.rs
+++ b/libs/input/rust/input.rs
@@ -16,13 +16,26 @@
//! Common definitions of the Android Input Framework in rust.
+use crate::ffi::RustInputDeviceIdentifier;
use bitflags::bitflags;
+use inputconstants::aidl::android::os::IInputConstants;
use std::fmt;
/// The InputDevice ID.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct DeviceId(pub i32);
+/// The InputDevice equivalent for Rust inputflinger
+#[derive(Debug)]
+pub struct InputDevice {
+ /// InputDevice ID
+ pub device_id: DeviceId,
+ /// InputDevice unique identifier
+ pub identifier: RustInputDeviceIdentifier,
+ /// InputDevice classes (equivalent to EventHub InputDeviceClass)
+ pub classes: DeviceClass,
+}
+
#[repr(u32)]
pub enum SourceClass {
None = input_bindgen::AINPUT_SOURCE_CLASS_NONE,
@@ -182,18 +195,29 @@ bitflags! {
/// MotionEvent flags.
#[derive(Debug)]
pub struct MotionFlags: u32 {
- /// FLAG_CANCELED
- const CANCELED = input_bindgen::AMOTION_EVENT_FLAG_CANCELED as u32;
/// FLAG_WINDOW_IS_OBSCURED
- const WINDOW_IS_OBSCURED = input_bindgen::AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED;
+ const WINDOW_IS_OBSCURED = IInputConstants::MOTION_EVENT_FLAG_WINDOW_IS_OBSCURED as u32;
/// FLAG_WINDOW_IS_PARTIALLY_OBSCURED
- const WINDOW_IS_PARTIALLY_OBSCURED =
- input_bindgen::AMOTION_EVENT_FLAG_WINDOW_IS_PARTIALLY_OBSCURED;
- /// FLAG_IS_ACCESSIBILITY_EVENT
- const IS_ACCESSIBILITY_EVENT =
- input_bindgen::AMOTION_EVENT_FLAG_IS_ACCESSIBILITY_EVENT;
+ const WINDOW_IS_PARTIALLY_OBSCURED = IInputConstants::MOTION_EVENT_FLAG_WINDOW_IS_PARTIALLY_OBSCURED as u32;
+ /// FLAG_HOVER_EXIT_PENDING
+ const HOVER_EXIT_PENDING = IInputConstants::MOTION_EVENT_FLAG_HOVER_EXIT_PENDING as u32;
+ /// FLAG_IS_GENERATED_GESTURE
+ const IS_GENERATED_GESTURE = IInputConstants::MOTION_EVENT_FLAG_IS_GENERATED_GESTURE as u32;
+ /// FLAG_CANCELED
+ const CANCELED = IInputConstants::INPUT_EVENT_FLAG_CANCELED as u32;
/// FLAG_NO_FOCUS_CHANGE
- const NO_FOCUS_CHANGE = input_bindgen::AMOTION_EVENT_FLAG_NO_FOCUS_CHANGE;
+ const NO_FOCUS_CHANGE = IInputConstants::MOTION_EVENT_FLAG_NO_FOCUS_CHANGE as u32;
+ /// PRIVATE_FLAG_SUPPORTS_ORIENTATION
+ const PRIVATE_SUPPORTS_ORIENTATION = IInputConstants::MOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION as u32;
+ /// PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION
+ const PRIVATE_SUPPORTS_DIRECTIONAL_ORIENTATION =
+ IInputConstants::MOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION as u32;
+ /// FLAG_IS_ACCESSIBILITY_EVENT
+ const IS_ACCESSIBILITY_EVENT = IInputConstants::INPUT_EVENT_FLAG_IS_ACCESSIBILITY_EVENT as u32;
+ /// FLAG_TAINTED
+ const TAINTED = IInputConstants::INPUT_EVENT_FLAG_TAINTED as u32;
+ /// FLAG_TARGET_ACCESSIBILITY_FOCUS
+ const TARGET_ACCESSIBILITY_FOCUS = IInputConstants::MOTION_EVENT_FLAG_TARGET_ACCESSIBILITY_FOCUS as u32;
}
}
@@ -205,6 +229,107 @@ impl Source {
}
}
+bitflags! {
+ /// Device class of the input device. These are duplicated from Eventhub.h
+ /// We need to make sure the two version remain in sync when adding new classes.
+ #[derive(Debug, PartialEq)]
+ pub struct DeviceClass: u32 {
+ /// The input device is a keyboard or has buttons
+ const Keyboard = IInputConstants::DEVICE_CLASS_KEYBOARD as u32;
+ /// The input device is an alpha-numeric keyboard (not just a dial pad)
+ const AlphabeticKey = IInputConstants::DEVICE_CLASS_ALPHAKEY as u32;
+ /// The input device is a touchscreen or a touchpad (either single-touch or multi-touch)
+ const Touch = IInputConstants::DEVICE_CLASS_TOUCH as u32;
+ /// The input device is a cursor device such as a trackball or mouse.
+ const Cursor = IInputConstants::DEVICE_CLASS_CURSOR as u32;
+ /// The input device is a multi-touch touchscreen or touchpad.
+ const MultiTouch = IInputConstants::DEVICE_CLASS_TOUCH_MT as u32;
+ /// The input device is a directional pad (implies keyboard, has DPAD keys).
+ const Dpad = IInputConstants::DEVICE_CLASS_DPAD as u32;
+ /// The input device is a gamepad (implies keyboard, has BUTTON keys).
+ const Gamepad = IInputConstants::DEVICE_CLASS_GAMEPAD as u32;
+ /// The input device has switches.
+ const Switch = IInputConstants::DEVICE_CLASS_SWITCH as u32;
+ /// The input device is a joystick (implies gamepad, has joystick absolute axes).
+ const Joystick = IInputConstants::DEVICE_CLASS_JOYSTICK as u32;
+ /// The input device has a vibrator (supports FF_RUMBLE).
+ const Vibrator = IInputConstants::DEVICE_CLASS_VIBRATOR as u32;
+ /// The input device has a microphone.
+ const Mic = IInputConstants::DEVICE_CLASS_MIC as u32;
+ /// The input device is an external stylus (has data we want to fuse with touch data).
+ const ExternalStylus = IInputConstants::DEVICE_CLASS_EXTERNAL_STYLUS as u32;
+ /// The input device has a rotary encoder
+ const RotaryEncoder = IInputConstants::DEVICE_CLASS_ROTARY_ENCODER as u32;
+ /// The input device has a sensor like accelerometer, gyro, etc
+ const Sensor = IInputConstants::DEVICE_CLASS_SENSOR as u32;
+ /// The input device has a battery
+ const Battery = IInputConstants::DEVICE_CLASS_BATTERY as u32;
+ /// The input device has sysfs controllable lights
+ const Light = IInputConstants::DEVICE_CLASS_LIGHT as u32;
+ /// The input device is a touchpad, requiring an on-screen cursor.
+ const Touchpad = IInputConstants::DEVICE_CLASS_TOUCHPAD as u32;
+ /// The input device is virtual (not a real device, not part of UI configuration).
+ const Virtual = IInputConstants::DEVICE_CLASS_VIRTUAL as u32;
+ /// The input device is external (not built-in).
+ const External = IInputConstants::DEVICE_CLASS_EXTERNAL as u32;
+ }
+}
+
+bitflags! {
+ /// Modifier state flags
+ #[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
+ pub struct ModifierState: u32 {
+ /// No meta keys are pressed
+ const None = input_bindgen::AMETA_NONE;
+ /// This mask is used to check whether one of the ALT meta keys is pressed
+ const AltOn = input_bindgen::AMETA_ALT_ON;
+ /// This mask is used to check whether the left ALT meta key is pressed
+ const AltLeftOn = input_bindgen::AMETA_ALT_LEFT_ON;
+ /// This mask is used to check whether the right ALT meta key is pressed
+ const AltRightOn = input_bindgen::AMETA_ALT_RIGHT_ON;
+ /// This mask is used to check whether one of the SHIFT meta keys is pressed
+ const ShiftOn = input_bindgen::AMETA_SHIFT_ON;
+ /// This mask is used to check whether the left SHIFT meta key is pressed
+ const ShiftLeftOn = input_bindgen::AMETA_SHIFT_LEFT_ON;
+ /// This mask is used to check whether the right SHIFT meta key is pressed
+ const ShiftRightOn = input_bindgen::AMETA_SHIFT_RIGHT_ON;
+ /// This mask is used to check whether the SYM meta key is pressed
+ const SymOn = input_bindgen::AMETA_SYM_ON;
+ /// This mask is used to check whether the FUNCTION meta key is pressed
+ const FunctionOn = input_bindgen::AMETA_FUNCTION_ON;
+ /// This mask is used to check whether one of the CTRL meta keys is pressed
+ const CtrlOn = input_bindgen::AMETA_CTRL_ON;
+ /// This mask is used to check whether the left CTRL meta key is pressed
+ const CtrlLeftOn = input_bindgen::AMETA_CTRL_LEFT_ON;
+ /// This mask is used to check whether the right CTRL meta key is pressed
+ const CtrlRightOn = input_bindgen::AMETA_CTRL_RIGHT_ON;
+ /// This mask is used to check whether one of the META meta keys is pressed
+ const MetaOn = input_bindgen::AMETA_META_ON;
+ /// This mask is used to check whether the left META meta key is pressed
+ const MetaLeftOn = input_bindgen::AMETA_META_LEFT_ON;
+ /// This mask is used to check whether the right META meta key is pressed
+ const MetaRightOn = input_bindgen::AMETA_META_RIGHT_ON;
+ /// This mask is used to check whether the CAPS LOCK meta key is on
+ const CapsLockOn = input_bindgen::AMETA_CAPS_LOCK_ON;
+ /// This mask is used to check whether the NUM LOCK meta key is on
+ const NumLockOn = input_bindgen::AMETA_NUM_LOCK_ON;
+ /// This mask is used to check whether the SCROLL LOCK meta key is on
+ const ScrollLockOn = input_bindgen::AMETA_SCROLL_LOCK_ON;
+ }
+}
+
+/// A rust enum representation of a Keyboard type.
+#[repr(u32)]
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
+pub enum KeyboardType {
+ /// KEYBOARD_TYPE_NONE
+ None = input_bindgen::AINPUT_KEYBOARD_TYPE_NONE,
+ /// KEYBOARD_TYPE_NON_ALPHABETIC
+ NonAlphabetic = input_bindgen::AINPUT_KEYBOARD_TYPE_NON_ALPHABETIC,
+ /// KEYBOARD_TYPE_ALPHABETIC
+ Alphabetic = input_bindgen::AINPUT_KEYBOARD_TYPE_ALPHABETIC,
+}
+
#[cfg(test)]
mod tests {
use crate::input::SourceClass;
diff --git a/libs/input/rust/keyboard_classifier.rs b/libs/input/rust/keyboard_classifier.rs
new file mode 100644
index 0000000000..1063fac664
--- /dev/null
+++ b/libs/input/rust/keyboard_classifier.rs
@@ -0,0 +1,345 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! Contains the KeyboardClassifier, that tries to identify whether an Input device is an
+//! alphabetic or non-alphabetic keyboard. It also tracks the KeyEvents produced by the device
+//! in order to verify/change the inferred keyboard type.
+//!
+//! Initial classification:
+//! - If DeviceClass includes Dpad, Touch, Cursor, MultiTouch, ExternalStylus, Touchpad, Dpad,
+//! Gamepad, Switch, Joystick, RotaryEncoder => KeyboardType::NonAlphabetic
+//! - Otherwise if DeviceClass has Keyboard and not AlphabeticKey => KeyboardType::NonAlphabetic
+//! - Otherwise if DeviceClass has both Keyboard and AlphabeticKey => KeyboardType::Alphabetic
+//!
+//! On process keys:
+//! - If KeyboardType::NonAlphabetic and we receive alphabetic key event, then change type to
+//! KeyboardType::Alphabetic. Once changed, no further changes. (i.e. verified = true)
+//! - TODO(b/263559234): If KeyboardType::Alphabetic and we don't receive any alphabetic key event
+//! across multiple device connections in a time period, then change type to
+//! KeyboardType::NonAlphabetic. Once changed, it can still change back to Alphabetic
+//! (i.e. verified = false).
+//!
+//! TODO(b/263559234): Data store implementation to store information about past classification
+
+use crate::input::{DeviceId, InputDevice, KeyboardType};
+use crate::{DeviceClass, ModifierState};
+use std::collections::HashMap;
+
+/// The KeyboardClassifier is used to classify a keyboard device into non-keyboard, alphabetic
+/// keyboard or non-alphabetic keyboard
+#[derive(Default)]
+pub struct KeyboardClassifier {
+ device_map: HashMap<DeviceId, KeyboardInfo>,
+}
+
+struct KeyboardInfo {
+ _device: InputDevice,
+ keyboard_type: KeyboardType,
+ is_finalized: bool,
+}
+
+impl KeyboardClassifier {
+ /// Create a new KeyboardClassifier
+ pub fn new() -> Self {
+ Default::default()
+ }
+
+ /// Adds keyboard to KeyboardClassifier
+ pub fn notify_keyboard_changed(&mut self, device: InputDevice) {
+ let (keyboard_type, is_finalized) = self.classify_keyboard(&device);
+ self.device_map.insert(
+ device.device_id,
+ KeyboardInfo { _device: device, keyboard_type, is_finalized },
+ );
+ }
+
+ /// Get keyboard type for a tracked keyboard in KeyboardClassifier
+ pub fn get_keyboard_type(&self, device_id: DeviceId) -> KeyboardType {
+ return if let Some(keyboard) = self.device_map.get(&device_id) {
+ keyboard.keyboard_type
+ } else {
+ KeyboardType::None
+ };
+ }
+
+ /// Tells if keyboard type classification is finalized. Once finalized the classification can't
+ /// change until device is reconnected again.
+ ///
+ /// Finalized devices are either "alphabetic" keyboards or keyboards in blocklist or
+ /// allowlist that are explicitly categorized and won't change with future key events
+ pub fn is_finalized(&self, device_id: DeviceId) -> bool {
+ return if let Some(keyboard) = self.device_map.get(&device_id) {
+ keyboard.is_finalized
+ } else {
+ false
+ };
+ }
+
+ /// Process a key event and change keyboard type if required.
+ /// - If any key event occurs, the keyboard type will change from None to NonAlphabetic
+ /// - If an alphabetic key occurs, the keyboard type will change to Alphabetic
+ pub fn process_key(
+ &mut self,
+ device_id: DeviceId,
+ evdev_code: i32,
+ modifier_state: ModifierState,
+ ) {
+ if let Some(keyboard) = self.device_map.get_mut(&device_id) {
+ // Ignore all key events with modifier state since they can be macro shortcuts used by
+ // some non-keyboard peripherals like TV remotes, game controllers, etc.
+ if modifier_state.bits() != 0 {
+ return;
+ }
+ if Self::is_alphabetic_key(&evdev_code) {
+ keyboard.keyboard_type = KeyboardType::Alphabetic;
+ keyboard.is_finalized = true;
+ }
+ }
+ }
+
+ fn classify_keyboard(&self, device: &InputDevice) -> (KeyboardType, bool) {
+ // This should never happen but having keyboard device class is necessary to be classified
+ // as any type of keyboard.
+ if !device.classes.contains(DeviceClass::Keyboard) {
+ return (KeyboardType::None, true);
+ }
+ // Normal classification for internal and virtual keyboards
+ if !device.classes.contains(DeviceClass::External)
+ || device.classes.contains(DeviceClass::Virtual)
+ {
+ return if device.classes.contains(DeviceClass::AlphabeticKey) {
+ (KeyboardType::Alphabetic, true)
+ } else {
+ (KeyboardType::NonAlphabetic, true)
+ };
+ }
+ // Any composite device with multiple device classes should be categorized as non-alphabetic
+ // keyboard initially
+ if device.classes.contains(DeviceClass::Touch)
+ || device.classes.contains(DeviceClass::Cursor)
+ || device.classes.contains(DeviceClass::MultiTouch)
+ || device.classes.contains(DeviceClass::ExternalStylus)
+ || device.classes.contains(DeviceClass::Touchpad)
+ || device.classes.contains(DeviceClass::Dpad)
+ || device.classes.contains(DeviceClass::Gamepad)
+ || device.classes.contains(DeviceClass::Switch)
+ || device.classes.contains(DeviceClass::Joystick)
+ || device.classes.contains(DeviceClass::RotaryEncoder)
+ {
+ // If categorized as NonAlphabetic and no device class AlphabeticKey reported by the
+ // kernel, we no longer need to process key events to verify.
+ return (
+ KeyboardType::NonAlphabetic,
+ !device.classes.contains(DeviceClass::AlphabeticKey),
+ );
+ }
+ // Only devices with "Keyboard" and "AlphabeticKey" should be classified as full keyboard
+ if device.classes.contains(DeviceClass::AlphabeticKey) {
+ (KeyboardType::Alphabetic, true)
+ } else {
+ // If categorized as NonAlphabetic and no device class AlphabeticKey reported by the
+ // kernel, we no longer need to process key events to verify.
+ (KeyboardType::NonAlphabetic, true)
+ }
+ }
+
+ fn is_alphabetic_key(evdev_code: &i32) -> bool {
+ // Keyboard alphabetic row 1 (Q W E R T Y U I O P [ ])
+ (16..=27).contains(evdev_code)
+ // Keyboard alphabetic row 2 (A S D F G H J K L ; ' `)
+ || (30..=41).contains(evdev_code)
+ // Keyboard alphabetic row 3 (\ Z X C V B N M , . /)
+ || (43..=53).contains(evdev_code)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::input::{DeviceId, InputDevice, KeyboardType};
+ use crate::keyboard_classifier::KeyboardClassifier;
+ use crate::{DeviceClass, ModifierState, RustInputDeviceIdentifier};
+
+ static DEVICE_ID: DeviceId = DeviceId(1);
+ static KEY_A: i32 = 30;
+ static KEY_1: i32 = 2;
+
+ #[test]
+ fn classify_external_alphabetic_keyboard() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard | DeviceClass::AlphabeticKey | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::Alphabetic);
+ assert!(classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn classify_external_non_alphabetic_keyboard() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier
+ .notify_keyboard_changed(create_device(DeviceClass::Keyboard | DeviceClass::External));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn classify_mouse_pretending_as_keyboard() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard
+ | DeviceClass::Cursor
+ | DeviceClass::AlphabeticKey
+ | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn classify_touchpad_pretending_as_keyboard() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard
+ | DeviceClass::Touchpad
+ | DeviceClass::AlphabeticKey
+ | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn classify_stylus_pretending_as_keyboard() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard
+ | DeviceClass::ExternalStylus
+ | DeviceClass::AlphabeticKey
+ | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn classify_dpad_pretending_as_keyboard() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard
+ | DeviceClass::Dpad
+ | DeviceClass::AlphabeticKey
+ | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn classify_joystick_pretending_as_keyboard() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard
+ | DeviceClass::Joystick
+ | DeviceClass::AlphabeticKey
+ | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn classify_gamepad_pretending_as_keyboard() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard
+ | DeviceClass::Gamepad
+ | DeviceClass::AlphabeticKey
+ | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn reclassify_keyboard_on_alphabetic_key_event() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard
+ | DeviceClass::Dpad
+ | DeviceClass::AlphabeticKey
+ | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+
+ // on alphabetic key event
+ classifier.process_key(DEVICE_ID, KEY_A, ModifierState::None);
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::Alphabetic);
+ assert!(classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn dont_reclassify_keyboard_on_non_alphabetic_key_event() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard
+ | DeviceClass::Dpad
+ | DeviceClass::AlphabeticKey
+ | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+
+ // on number key event
+ classifier.process_key(DEVICE_ID, KEY_1, ModifierState::None);
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+ }
+
+ #[test]
+ fn dont_reclassify_keyboard_on_alphabetic_key_event_with_modifiers() {
+ let mut classifier = KeyboardClassifier::new();
+ classifier.notify_keyboard_changed(create_device(
+ DeviceClass::Keyboard
+ | DeviceClass::Dpad
+ | DeviceClass::AlphabeticKey
+ | DeviceClass::External,
+ ));
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+
+ classifier.process_key(DEVICE_ID, KEY_A, ModifierState::CtrlOn);
+ assert_eq!(classifier.get_keyboard_type(DEVICE_ID), KeyboardType::NonAlphabetic);
+ assert!(!classifier.is_finalized(DEVICE_ID));
+ }
+
+ fn create_device(classes: DeviceClass) -> InputDevice {
+ InputDevice {
+ device_id: DEVICE_ID,
+ identifier: RustInputDeviceIdentifier {
+ name: "test_device".to_string(),
+ location: "location".to_string(),
+ unique_id: "unique_id".to_string(),
+ bus: 123,
+ vendor: 234,
+ product: 345,
+ version: 567,
+ descriptor: "descriptor".to_string(),
+ },
+ classes,
+ }
+ }
+}
diff --git a/libs/input/rust/lib.rs b/libs/input/rust/lib.rs
index 01d959942c..5010475334 100644
--- a/libs/input/rust/lib.rs
+++ b/libs/input/rust/lib.rs
@@ -18,9 +18,13 @@
mod input;
mod input_verifier;
+mod keyboard_classifier;
-pub use input::{DeviceId, MotionAction, MotionFlags, Source};
+pub use input::{
+ DeviceClass, DeviceId, InputDevice, ModifierState, MotionAction, MotionFlags, Source,
+};
pub use input_verifier::InputVerifier;
+pub use keyboard_classifier::KeyboardClassifier;
#[cxx::bridge(namespace = "android::input")]
#[allow(unsafe_op_in_unsafe_fn)]
@@ -47,7 +51,8 @@ mod ffi {
/// }
/// ```
type InputVerifier;
- fn create(name: String) -> Box<InputVerifier>;
+ #[cxx_name = create]
+ fn create_input_verifier(name: String) -> Box<InputVerifier>;
fn process_movement(
verifier: &mut InputVerifier,
device_id: i32,
@@ -59,15 +64,53 @@ mod ffi {
fn reset_device(verifier: &mut InputVerifier, device_id: i32);
}
+ #[namespace = "android::input::keyboardClassifier"]
+ extern "Rust" {
+ /// Used to classify a keyboard into alphabetic and non-alphabetic
+ type KeyboardClassifier;
+ #[cxx_name = create]
+ fn create_keyboard_classifier() -> Box<KeyboardClassifier>;
+ #[cxx_name = notifyKeyboardChanged]
+ fn notify_keyboard_changed(
+ classifier: &mut KeyboardClassifier,
+ device_id: i32,
+ identifier: RustInputDeviceIdentifier,
+ device_classes: u32,
+ );
+ #[cxx_name = getKeyboardType]
+ fn get_keyboard_type(classifier: &mut KeyboardClassifier, device_id: i32) -> u32;
+ #[cxx_name = isFinalized]
+ fn is_finalized(classifier: &mut KeyboardClassifier, device_id: i32) -> bool;
+ #[cxx_name = processKey]
+ fn process_key(
+ classifier: &mut KeyboardClassifier,
+ device_id: i32,
+ evdev_code: i32,
+ modifier_state: u32,
+ );
+ }
+
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct RustPointerProperties {
pub id: i32,
}
+
+ #[derive(Debug)]
+ pub struct RustInputDeviceIdentifier {
+ pub name: String,
+ pub location: String,
+ pub unique_id: String,
+ pub bus: u16,
+ pub vendor: u16,
+ pub product: u16,
+ pub version: u16,
+ pub descriptor: String,
+ }
}
-use crate::ffi::RustPointerProperties;
+use crate::ffi::{RustInputDeviceIdentifier, RustPointerProperties};
-fn create(name: String) -> Box<InputVerifier> {
+fn create_input_verifier(name: String) -> Box<InputVerifier> {
Box::new(InputVerifier::new(&name, ffi::shouldLog("InputVerifierLogEvents")))
}
@@ -79,12 +122,20 @@ fn process_movement(
pointer_properties: &[RustPointerProperties],
flags: u32,
) -> String {
+ let motion_flags = MotionFlags::from_bits(flags);
+ if motion_flags.is_none() {
+ panic!(
+ "The conversion of flags 0x{:08x} failed, please check if some flags have not been \
+ added to MotionFlags.",
+ flags
+ );
+ }
let result = verifier.process_movement(
DeviceId(device_id),
Source::from_bits(source).unwrap(),
action,
pointer_properties,
- MotionFlags::from_bits(flags).unwrap(),
+ motion_flags.unwrap(),
);
match result {
Ok(()) => "".to_string(),
@@ -95,3 +146,53 @@ fn process_movement(
fn reset_device(verifier: &mut InputVerifier, device_id: i32) {
verifier.reset_device(DeviceId(device_id));
}
+
+fn create_keyboard_classifier() -> Box<KeyboardClassifier> {
+ Box::new(KeyboardClassifier::new())
+}
+
+fn notify_keyboard_changed(
+ classifier: &mut KeyboardClassifier,
+ device_id: i32,
+ identifier: RustInputDeviceIdentifier,
+ device_classes: u32,
+) {
+ let classes = DeviceClass::from_bits(device_classes);
+ if classes.is_none() {
+ panic!(
+ "The conversion of device class 0x{:08x} failed, please check if some device classes
+ have not been added to DeviceClass.",
+ device_classes
+ );
+ }
+ classifier.notify_keyboard_changed(InputDevice {
+ device_id: DeviceId(device_id),
+ identifier,
+ classes: classes.unwrap(),
+ });
+}
+
+fn get_keyboard_type(classifier: &mut KeyboardClassifier, device_id: i32) -> u32 {
+ classifier.get_keyboard_type(DeviceId(device_id)) as u32
+}
+
+fn is_finalized(classifier: &mut KeyboardClassifier, device_id: i32) -> bool {
+ classifier.is_finalized(DeviceId(device_id))
+}
+
+fn process_key(
+ classifier: &mut KeyboardClassifier,
+ device_id: i32,
+ evdev_code: i32,
+ meta_state: u32,
+) {
+ let modifier_state = ModifierState::from_bits(meta_state);
+ if modifier_state.is_none() {
+ panic!(
+ "The conversion of meta state 0x{:08x} failed, please check if some meta state
+ have not been added to ModifierState.",
+ meta_state
+ );
+ }
+ classifier.process_key(DeviceId(device_id), evdev_code, modifier_state.unwrap());
+}
diff --git a/libs/input/tests/Android.bp b/libs/input/tests/Android.bp
index 93af4c2066..e9d799ed3f 100644
--- a/libs/input/tests/Android.bp
+++ b/libs/input/tests/Android.bp
@@ -19,6 +19,7 @@ cc_test {
"InputDevice_test.cpp",
"InputEvent_test.cpp",
"InputPublisherAndConsumer_test.cpp",
+ "InputPublisherAndConsumerNoResampling_test.cpp",
"InputVerifier_test.cpp",
"MotionPredictor_test.cpp",
"MotionPredictorMetricsManager_test.cpp",
@@ -35,6 +36,7 @@ cc_test {
"tensorflow_headers",
],
static_libs: [
+ "libflagtest",
"libgmock",
"libgui_window_info_static",
"libinput",
@@ -63,6 +65,7 @@ cc_test {
"libcutils",
"liblog",
"libPlatformProperties",
+ "libstatslog",
"libtinyxml2",
"libutils",
"server_configurable_flags",
@@ -76,19 +79,17 @@ cc_test {
},
test_suites: ["device-tests"],
target: {
- host: {
- sanitize: {
- address: true,
- },
- },
android: {
static_libs: [
- // Stats logging library and its dependencies.
"libstatslog_libinput",
- "libstatsbootstrap",
- "android.os.statsbootstrap_aidl-cpp",
+ "libstatssocket_lazy",
],
},
+ host: {
+ sanitize: {
+ address: true,
+ },
+ },
},
}
diff --git a/libs/input/tests/InputChannel_test.cpp b/libs/input/tests/InputChannel_test.cpp
index 60feb53dcc..02d4c07bfa 100644
--- a/libs/input/tests/InputChannel_test.cpp
+++ b/libs/input/tests/InputChannel_test.cpp
@@ -16,8 +16,6 @@
#include <array>
-#include "TestHelpers.h"
-
#include <unistd.h>
#include <time.h>
#include <errno.h>
diff --git a/libs/input/tests/InputEvent_test.cpp b/libs/input/tests/InputEvent_test.cpp
index a9655730fc..3717f49fef 100644
--- a/libs/input/tests/InputEvent_test.cpp
+++ b/libs/input/tests/InputEvent_test.cpp
@@ -21,15 +21,43 @@
#include <attestation/HmacKeyManager.h>
#include <binder/Parcel.h>
#include <gtest/gtest.h>
-#include <gui/constants.h>
#include <input/Input.h>
+#include <input/InputEventBuilders.h>
namespace android {
-// Default display id.
-static constexpr int32_t DISPLAY_ID = ADISPLAY_ID_DEFAULT;
+namespace {
-static constexpr float EPSILON = MotionEvent::ROUNDING_PRECISION;
+// Default display id.
+constexpr ui::LogicalDisplayId DISPLAY_ID = ui::LogicalDisplayId::DEFAULT;
+
+constexpr float EPSILON = MotionEvent::ROUNDING_PRECISION;
+
+constexpr auto POINTER_0_DOWN =
+ AMOTION_EVENT_ACTION_POINTER_DOWN | (0 << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
+
+constexpr auto POINTER_1_DOWN =
+ AMOTION_EVENT_ACTION_POINTER_DOWN | (1 << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
+
+constexpr auto POINTER_0_UP =
+ AMOTION_EVENT_ACTION_POINTER_UP | (0 << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
+
+constexpr auto POINTER_1_UP =
+ AMOTION_EVENT_ACTION_POINTER_UP | (1 << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
+
+std::array<float, 9> asFloat9(const ui::Transform& t) {
+ std::array<float, 9> mat{};
+ mat[0] = t[0][0];
+ mat[1] = t[1][0];
+ mat[2] = t[2][0];
+ mat[3] = t[0][1];
+ mat[4] = t[1][1];
+ mat[5] = t[2][1];
+ mat[6] = t[0][2];
+ mat[7] = t[1][2];
+ mat[8] = t[2][2];
+ return mat;
+}
class BaseTest : public testing::Test {
protected:
@@ -38,6 +66,8 @@ protected:
22, 23, 24, 25, 26, 27, 28, 29, 30, 31};
};
+} // namespace
+
// --- PointerCoordsTest ---
class PointerCoordsTest : public BaseTest {
@@ -216,7 +246,7 @@ TEST_F(KeyEventTest, Properties) {
ASSERT_EQ(AINPUT_SOURCE_JOYSTICK, event.getSource());
// Set display id.
- constexpr int32_t newDisplayId = 2;
+ constexpr ui::LogicalDisplayId newDisplayId = ui::LogicalDisplayId{2};
event.setDisplayId(newDisplayId);
ASSERT_EQ(newDisplayId, event.getDisplayId());
}
@@ -332,13 +362,15 @@ void MotionEventTest::SetUp() {
}
void MotionEventTest::initializeEventWithHistory(MotionEvent* event) {
+ const int32_t flags = AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED |
+ AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION |
+ AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION;
event->initialize(mId, 2, AINPUT_SOURCE_TOUCHSCREEN, DISPLAY_ID, HMAC,
- AMOTION_EVENT_ACTION_MOVE, 0, AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED,
- AMOTION_EVENT_EDGE_FLAG_TOP, AMETA_ALT_ON, AMOTION_EVENT_BUTTON_PRIMARY,
- MotionClassification::NONE, mTransform, 2.0f, 2.1f,
- AMOTION_EVENT_INVALID_CURSOR_POSITION, AMOTION_EVENT_INVALID_CURSOR_POSITION,
- mRawTransform, ARBITRARY_DOWN_TIME, ARBITRARY_EVENT_TIME, 2,
- mPointerProperties, mSamples[0].pointerCoords);
+ AMOTION_EVENT_ACTION_MOVE, 0, flags, AMOTION_EVENT_EDGE_FLAG_TOP,
+ AMETA_ALT_ON, AMOTION_EVENT_BUTTON_PRIMARY, MotionClassification::NONE,
+ mTransform, 2.0f, 2.1f, AMOTION_EVENT_INVALID_CURSOR_POSITION,
+ AMOTION_EVENT_INVALID_CURSOR_POSITION, mRawTransform, ARBITRARY_DOWN_TIME,
+ ARBITRARY_EVENT_TIME, 2, mPointerProperties, mSamples[0].pointerCoords);
event->addSample(ARBITRARY_EVENT_TIME + 1, mSamples[1].pointerCoords);
event->addSample(ARBITRARY_EVENT_TIME + 2, mSamples[2].pointerCoords);
}
@@ -352,14 +384,19 @@ void MotionEventTest::assertEqualsEventWithHistory(const MotionEvent* event) {
ASSERT_EQ(DISPLAY_ID, event->getDisplayId());
EXPECT_EQ(HMAC, event->getHmac());
ASSERT_EQ(AMOTION_EVENT_ACTION_MOVE, event->getAction());
- ASSERT_EQ(AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED, event->getFlags());
+ ASSERT_EQ(AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED |
+ AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION |
+ AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION,
+ event->getFlags());
ASSERT_EQ(AMOTION_EVENT_EDGE_FLAG_TOP, event->getEdgeFlags());
ASSERT_EQ(AMETA_ALT_ON, event->getMetaState());
ASSERT_EQ(AMOTION_EVENT_BUTTON_PRIMARY, event->getButtonState());
ASSERT_EQ(MotionClassification::NONE, event->getClassification());
EXPECT_EQ(mTransform, event->getTransform());
- ASSERT_EQ(X_OFFSET, event->getXOffset());
- ASSERT_EQ(Y_OFFSET, event->getYOffset());
+ ASSERT_NEAR((-RAW_X_OFFSET / RAW_X_SCALE) * X_SCALE + X_OFFSET, event->getRawXOffset(),
+ EPSILON);
+ ASSERT_NEAR((-RAW_Y_OFFSET / RAW_Y_SCALE) * Y_SCALE + Y_OFFSET, event->getRawYOffset(),
+ EPSILON);
ASSERT_EQ(2.0f, event->getXPrecision());
ASSERT_EQ(2.1f, event->getYPrecision());
ASSERT_EQ(ARBITRARY_DOWN_TIME, event->getDownTime());
@@ -513,7 +550,7 @@ TEST_F(MotionEventTest, Properties) {
ASSERT_EQ(AINPUT_SOURCE_JOYSTICK, event.getSource());
// Set displayId.
- constexpr int32_t newDisplayId = 2;
+ constexpr ui::LogicalDisplayId newDisplayId = ui::LogicalDisplayId{2};
event.setDisplayId(newDisplayId);
ASSERT_EQ(newDisplayId, event.getDisplayId());
@@ -554,25 +591,168 @@ TEST_F(MotionEventTest, CopyFrom_DoNotKeepHistory) {
ASSERT_EQ(event.getX(0), copy.getX(0));
}
+TEST_F(MotionEventTest, SplitPointerDown) {
+ MotionEvent event = MotionEventBuilder(POINTER_1_DOWN, AINPUT_SOURCE_TOUCHSCREEN)
+ .downTime(ARBITRARY_DOWN_TIME)
+ .pointer(PointerBuilder(/*id=*/4, ToolType::FINGER).x(4).y(4))
+ .pointer(PointerBuilder(/*id=*/6, ToolType::FINGER).x(6).y(6))
+ .pointer(PointerBuilder(/*id=*/8, ToolType::FINGER).x(8).y(8))
+ .build();
+
+ MotionEvent splitDown;
+ std::bitset<MAX_POINTER_ID + 1> splitDownIds{};
+ splitDownIds.set(6, true);
+ splitDown.splitFrom(event, splitDownIds, /*eventId=*/42);
+ ASSERT_EQ(splitDown.getAction(), AMOTION_EVENT_ACTION_DOWN);
+ ASSERT_EQ(splitDown.getPointerCount(), 1u);
+ ASSERT_EQ(splitDown.getPointerId(0), 6);
+ ASSERT_EQ(splitDown.getX(0), 6);
+ ASSERT_EQ(splitDown.getY(0), 6);
+
+ MotionEvent splitPointerDown;
+ std::bitset<MAX_POINTER_ID + 1> splitPointerDownIds{};
+ splitPointerDownIds.set(6, true);
+ splitPointerDownIds.set(8, true);
+ splitPointerDown.splitFrom(event, splitPointerDownIds, /*eventId=*/42);
+ ASSERT_EQ(splitPointerDown.getAction(), POINTER_0_DOWN);
+ ASSERT_EQ(splitPointerDown.getPointerCount(), 2u);
+ ASSERT_EQ(splitPointerDown.getPointerId(0), 6);
+ ASSERT_EQ(splitPointerDown.getX(0), 6);
+ ASSERT_EQ(splitPointerDown.getY(0), 6);
+ ASSERT_EQ(splitPointerDown.getPointerId(1), 8);
+ ASSERT_EQ(splitPointerDown.getX(1), 8);
+ ASSERT_EQ(splitPointerDown.getY(1), 8);
+
+ MotionEvent splitMove;
+ std::bitset<MAX_POINTER_ID + 1> splitMoveIds{};
+ splitMoveIds.set(4, true);
+ splitMove.splitFrom(event, splitMoveIds, /*eventId=*/43);
+ ASSERT_EQ(splitMove.getAction(), AMOTION_EVENT_ACTION_MOVE);
+ ASSERT_EQ(splitMove.getPointerCount(), 1u);
+ ASSERT_EQ(splitMove.getPointerId(0), 4);
+ ASSERT_EQ(splitMove.getX(0), 4);
+ ASSERT_EQ(splitMove.getY(0), 4);
+}
+
+TEST_F(MotionEventTest, SplitPointerUp) {
+ MotionEvent event = MotionEventBuilder(POINTER_0_UP, AINPUT_SOURCE_TOUCHSCREEN)
+ .downTime(ARBITRARY_DOWN_TIME)
+ .pointer(PointerBuilder(/*id=*/4, ToolType::FINGER).x(4).y(4))
+ .pointer(PointerBuilder(/*id=*/6, ToolType::FINGER).x(6).y(6))
+ .pointer(PointerBuilder(/*id=*/8, ToolType::FINGER).x(8).y(8))
+ .build();
+
+ MotionEvent splitUp;
+ std::bitset<MAX_POINTER_ID + 1> splitUpIds{};
+ splitUpIds.set(4, true);
+ splitUp.splitFrom(event, splitUpIds, /*eventId=*/42);
+ ASSERT_EQ(splitUp.getAction(), AMOTION_EVENT_ACTION_UP);
+ ASSERT_EQ(splitUp.getPointerCount(), 1u);
+ ASSERT_EQ(splitUp.getPointerId(0), 4);
+ ASSERT_EQ(splitUp.getX(0), 4);
+ ASSERT_EQ(splitUp.getY(0), 4);
+
+ MotionEvent splitPointerUp;
+ std::bitset<MAX_POINTER_ID + 1> splitPointerUpIds{};
+ splitPointerUpIds.set(4, true);
+ splitPointerUpIds.set(8, true);
+ splitPointerUp.splitFrom(event, splitPointerUpIds, /*eventId=*/42);
+ ASSERT_EQ(splitPointerUp.getAction(), POINTER_0_UP);
+ ASSERT_EQ(splitPointerUp.getPointerCount(), 2u);
+ ASSERT_EQ(splitPointerUp.getPointerId(0), 4);
+ ASSERT_EQ(splitPointerUp.getX(0), 4);
+ ASSERT_EQ(splitPointerUp.getY(0), 4);
+ ASSERT_EQ(splitPointerUp.getPointerId(1), 8);
+ ASSERT_EQ(splitPointerUp.getX(1), 8);
+ ASSERT_EQ(splitPointerUp.getY(1), 8);
+
+ MotionEvent splitMove;
+ std::bitset<MAX_POINTER_ID + 1> splitMoveIds{};
+ splitMoveIds.set(6, true);
+ splitMoveIds.set(8, true);
+ splitMove.splitFrom(event, splitMoveIds, /*eventId=*/43);
+ ASSERT_EQ(splitMove.getAction(), AMOTION_EVENT_ACTION_MOVE);
+ ASSERT_EQ(splitMove.getPointerCount(), 2u);
+ ASSERT_EQ(splitMove.getPointerId(0), 6);
+ ASSERT_EQ(splitMove.getX(0), 6);
+ ASSERT_EQ(splitMove.getY(0), 6);
+ ASSERT_EQ(splitMove.getPointerId(1), 8);
+ ASSERT_EQ(splitMove.getX(1), 8);
+ ASSERT_EQ(splitMove.getY(1), 8);
+}
+
+TEST_F(MotionEventTest, SplitPointerUpCancel) {
+ MotionEvent event = MotionEventBuilder(POINTER_1_UP, AINPUT_SOURCE_TOUCHSCREEN)
+ .downTime(ARBITRARY_DOWN_TIME)
+ .pointer(PointerBuilder(/*id=*/4, ToolType::FINGER).x(4).y(4))
+ .pointer(PointerBuilder(/*id=*/6, ToolType::FINGER).x(6).y(6))
+ .pointer(PointerBuilder(/*id=*/8, ToolType::FINGER).x(8).y(8))
+ .addFlag(AMOTION_EVENT_FLAG_CANCELED)
+ .build();
+
+ MotionEvent splitUp;
+ std::bitset<MAX_POINTER_ID + 1> splitUpIds{};
+ splitUpIds.set(6, true);
+ splitUp.splitFrom(event, splitUpIds, /*eventId=*/42);
+ ASSERT_EQ(splitUp.getAction(), AMOTION_EVENT_ACTION_CANCEL);
+ ASSERT_EQ(splitUp.getPointerCount(), 1u);
+ ASSERT_EQ(splitUp.getPointerId(0), 6);
+ ASSERT_EQ(splitUp.getX(0), 6);
+ ASSERT_EQ(splitUp.getY(0), 6);
+}
+
+TEST_F(MotionEventTest, SplitPointerMove) {
+ MotionEvent event = MotionEventBuilder(AMOTION_EVENT_ACTION_MOVE, AINPUT_SOURCE_TOUCHSCREEN)
+ .downTime(ARBITRARY_DOWN_TIME)
+ .pointer(PointerBuilder(/*id=*/4, ToolType::FINGER).x(4).y(4))
+ .pointer(PointerBuilder(/*id=*/6, ToolType::FINGER).x(6).y(6))
+ .pointer(PointerBuilder(/*id=*/8, ToolType::FINGER).x(8).y(8))
+ .transform(ui::Transform(ui::Transform::ROT_90, 100, 100))
+ .rawTransform(ui::Transform(ui::Transform::FLIP_H, 50, 50))
+ .build();
+
+ MotionEvent splitMove;
+ std::bitset<MAX_POINTER_ID + 1> splitMoveIds{};
+ splitMoveIds.set(4, true);
+ splitMoveIds.set(8, true);
+ splitMove.splitFrom(event, splitMoveIds, /*eventId=*/42);
+ ASSERT_EQ(splitMove.getAction(), AMOTION_EVENT_ACTION_MOVE);
+ ASSERT_EQ(splitMove.getPointerCount(), 2u);
+ ASSERT_EQ(splitMove.getPointerId(0), 4);
+ ASSERT_EQ(splitMove.getX(0), event.getX(0));
+ ASSERT_EQ(splitMove.getY(0), event.getY(0));
+ ASSERT_EQ(splitMove.getRawX(0), event.getRawX(0));
+ ASSERT_EQ(splitMove.getRawY(0), event.getRawY(0));
+ ASSERT_EQ(splitMove.getPointerId(1), 8);
+ ASSERT_EQ(splitMove.getX(1), event.getX(2));
+ ASSERT_EQ(splitMove.getY(1), event.getY(2));
+ ASSERT_EQ(splitMove.getRawX(1), event.getRawX(2));
+ ASSERT_EQ(splitMove.getRawY(1), event.getRawY(2));
+}
+
TEST_F(MotionEventTest, OffsetLocation) {
MotionEvent event;
initializeEventWithHistory(&event);
+ const float xOffset = event.getRawXOffset();
+ const float yOffset = event.getRawYOffset();
event.offsetLocation(5.0f, -2.0f);
- ASSERT_EQ(X_OFFSET + 5.0f, event.getXOffset());
- ASSERT_EQ(Y_OFFSET - 2.0f, event.getYOffset());
+ ASSERT_EQ(xOffset + 5.0f, event.getRawXOffset());
+ ASSERT_EQ(yOffset - 2.0f, event.getRawYOffset());
}
TEST_F(MotionEventTest, Scale) {
MotionEvent event;
initializeEventWithHistory(&event);
const float unscaledOrientation = event.getOrientation(0);
+ const float unscaledXOffset = event.getRawXOffset();
+ const float unscaledYOffset = event.getRawYOffset();
event.scale(2.0f);
- ASSERT_EQ(X_OFFSET * 2, event.getXOffset());
- ASSERT_EQ(Y_OFFSET * 2, event.getYOffset());
+ ASSERT_EQ(unscaledXOffset * 2, event.getRawXOffset());
+ ASSERT_EQ(unscaledYOffset * 2, event.getRawYOffset());
ASSERT_NEAR((RAW_X_OFFSET + 210 * RAW_X_SCALE) * 2, event.getRawX(0), EPSILON);
ASSERT_NEAR((RAW_Y_OFFSET + 211 * RAW_Y_SCALE) * 2, event.getRawY(0), EPSILON);
@@ -642,8 +822,10 @@ TEST_F(MotionEventTest, Transform) {
}
MotionEvent event;
ui::Transform identityTransform;
+ const int32_t flags = AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION |
+ AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION;
event.initialize(InputEvent::nextId(), /*deviceId=*/0, AINPUT_SOURCE_TOUCHSCREEN, DISPLAY_ID,
- INVALID_HMAC, AMOTION_EVENT_ACTION_MOVE, /*actionButton=*/0, /*flags=*/0,
+ INVALID_HMAC, AMOTION_EVENT_ACTION_MOVE, /*actionButton=*/0, flags,
AMOTION_EVENT_EDGE_FLAG_NONE, AMETA_NONE, /*buttonState=*/0,
MotionClassification::NONE, identityTransform, /*xPrecision=*/0,
/*yPrecision=*/0, /*xCursorPosition=*/3 + RADIUS, /*yCursorPosition=*/2,
@@ -701,11 +883,10 @@ MotionEvent createMotionEvent(int32_t source, uint32_t action, float x, float y,
pointerCoords.back().setAxisValue(AMOTION_EVENT_AXIS_RELATIVE_Y, dy);
nsecs_t eventTime = systemTime(SYSTEM_TIME_MONOTONIC);
MotionEvent event;
- event.initialize(InputEvent::nextId(), /* deviceId */ 1, source,
- /* displayId */ 0, INVALID_HMAC, action,
- /* actionButton */ 0, /* flags */ 0, /* edgeFlags */ 0, AMETA_NONE,
- /* buttonState */ 0, MotionClassification::NONE, transform,
- /* xPrecision */ 0, /* yPrecision */ 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
+ event.initialize(InputEvent::nextId(), /*deviceId=*/1, source, ui::LogicalDisplayId::DEFAULT,
+ INVALID_HMAC, action, /*actionButton=*/0, /*flags=*/0, /*edgeFlags=*/0,
+ AMETA_NONE, /*buttonState=*/0, MotionClassification::NONE, transform,
+ /*xPrecision=*/0, /*yPrecision=*/0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
AMOTION_EVENT_INVALID_CURSOR_POSITION, rawTransform, eventTime, eventTime,
pointerCoords.size(), pointerProperties.data(), pointerCoords.data());
return event;
@@ -931,4 +1112,90 @@ TEST_F(MotionEventTest, CoordinatesAreRoundedAppropriately) {
ASSERT_EQ(EXPECTED.y, event.getYCursorPosition());
}
+TEST_F(MotionEventTest, InvalidOrientationNotRotated) {
+ // This touch event does not have a value for AXIS_ORIENTATION, and the flags are implicitly
+ // set to 0. The transform is set to a 90-degree rotation.
+ MotionEvent event = MotionEventBuilder(AMOTION_EVENT_ACTION_MOVE, AINPUT_SOURCE_TOUCHSCREEN)
+ .downTime(ARBITRARY_DOWN_TIME)
+ .pointer(PointerBuilder(/*id=*/4, ToolType::FINGER).x(4).y(4))
+ .transform(ui::Transform(ui::Transform::ROT_90, 100, 100))
+ .rawTransform(ui::Transform(ui::Transform::FLIP_H, 50, 50))
+ .build();
+ ASSERT_EQ(event.getOrientation(/*pointerIndex=*/0), 0.f);
+ event.transform(asFloat9(ui::Transform(ui::Transform::ROT_90, 100, 100)));
+ ASSERT_EQ(event.getOrientation(/*pointerIndex=*/0), 0.f);
+ event.transform(asFloat9(ui::Transform(ui::Transform::ROT_180, 100, 100)));
+ ASSERT_EQ(event.getOrientation(/*pointerIndex=*/0), 0.f);
+ event.applyTransform(asFloat9(ui::Transform(ui::Transform::ROT_270, 100, 100)));
+ ASSERT_EQ(event.getOrientation(/*pointerIndex=*/0), 0.f);
+}
+
+TEST_F(MotionEventTest, ValidZeroOrientationRotated) {
+ // This touch events will implicitly have a value of 0 for its AXIS_ORIENTATION.
+ auto builder = MotionEventBuilder(AMOTION_EVENT_ACTION_MOVE, AINPUT_SOURCE_TOUCHSCREEN)
+ .downTime(ARBITRARY_DOWN_TIME)
+ .pointer(PointerBuilder(/*id=*/4, ToolType::FINGER).x(4).y(4))
+ .transform(ui::Transform(ui::Transform::ROT_90, 100, 100))
+ .rawTransform(ui::Transform(ui::Transform::FLIP_H, 50, 50))
+ .addFlag(AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION);
+ MotionEvent nonDirectionalEvent = builder.build();
+ MotionEvent directionalEvent =
+ builder.addFlag(AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION).build();
+
+ // The angle is rotated by the initial transform, a 90-degree rotation.
+ ASSERT_NEAR(fabs(nonDirectionalEvent.getOrientation(/*pointerIndex=*/0)), M_PI_2, EPSILON);
+ ASSERT_NEAR(directionalEvent.getOrientation(/*pointerIndex=*/0), M_PI_2, EPSILON);
+
+ nonDirectionalEvent.transform(asFloat9(ui::Transform(ui::Transform::ROT_90, 100, 100)));
+ directionalEvent.transform(asFloat9(ui::Transform(ui::Transform::ROT_90, 100, 100)));
+ ASSERT_NEAR(nonDirectionalEvent.getOrientation(/*pointerIndex=*/0), 0.f, EPSILON);
+ ASSERT_NEAR(fabs(directionalEvent.getOrientation(/*pointerIndex=*/0)), M_PI, EPSILON);
+
+ nonDirectionalEvent.transform(asFloat9(ui::Transform(ui::Transform::ROT_180, 100, 100)));
+ directionalEvent.transform(asFloat9(ui::Transform(ui::Transform::ROT_180, 100, 100)));
+ ASSERT_NEAR(nonDirectionalEvent.getOrientation(/*pointerIndex=*/0), 0.f, EPSILON);
+ ASSERT_NEAR(directionalEvent.getOrientation(/*pointerIndex=*/0), 0.f, EPSILON);
+
+ nonDirectionalEvent.applyTransform(asFloat9(ui::Transform(ui::Transform::ROT_270, 100, 100)));
+ directionalEvent.applyTransform(asFloat9(ui::Transform(ui::Transform::ROT_270, 100, 100)));
+ ASSERT_NEAR(fabs(nonDirectionalEvent.getOrientation(/*pointerIndex=*/0)), M_PI_2, EPSILON);
+ ASSERT_NEAR(directionalEvent.getOrientation(/*pointerIndex=*/0), -M_PI_2, EPSILON);
+}
+
+TEST_F(MotionEventTest, ValidNonZeroOrientationRotated) {
+ const float initial = 1.f;
+ auto builder = MotionEventBuilder(AMOTION_EVENT_ACTION_MOVE, AINPUT_SOURCE_TOUCHSCREEN)
+ .downTime(ARBITRARY_DOWN_TIME)
+ .pointer(PointerBuilder(/*id=*/4, ToolType::FINGER)
+ .x(4)
+ .y(4)
+ .axis(AMOTION_EVENT_AXIS_ORIENTATION, initial))
+ .transform(ui::Transform(ui::Transform::ROT_90, 100, 100))
+ .rawTransform(ui::Transform(ui::Transform::FLIP_H, 50, 50))
+ .addFlag(AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION);
+
+ MotionEvent nonDirectionalEvent = builder.build();
+ MotionEvent directionalEvent =
+ builder.addFlag(AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION).build();
+
+ // The angle is rotated by the initial transform, a 90-degree rotation.
+ ASSERT_NEAR(nonDirectionalEvent.getOrientation(/*pointerIndex=*/0), initial - M_PI_2, EPSILON);
+ ASSERT_NEAR(directionalEvent.getOrientation(/*pointerIndex=*/0), initial + M_PI_2, EPSILON);
+
+ nonDirectionalEvent.transform(asFloat9(ui::Transform(ui::Transform::ROT_90, 100, 100)));
+ directionalEvent.transform(asFloat9(ui::Transform(ui::Transform::ROT_90, 100, 100)));
+ ASSERT_NEAR(nonDirectionalEvent.getOrientation(/*pointerIndex=*/0), initial, EPSILON);
+ ASSERT_NEAR(directionalEvent.getOrientation(/*pointerIndex=*/0), initial - M_PI, EPSILON);
+
+ nonDirectionalEvent.transform(asFloat9(ui::Transform(ui::Transform::ROT_180, 100, 100)));
+ directionalEvent.transform(asFloat9(ui::Transform(ui::Transform::ROT_180, 100, 100)));
+ ASSERT_NEAR(nonDirectionalEvent.getOrientation(/*pointerIndex=*/0), initial, EPSILON);
+ ASSERT_NEAR(directionalEvent.getOrientation(/*pointerIndex=*/0), initial, EPSILON);
+
+ nonDirectionalEvent.applyTransform(asFloat9(ui::Transform(ui::Transform::ROT_270, 100, 100)));
+ directionalEvent.applyTransform(asFloat9(ui::Transform(ui::Transform::ROT_270, 100, 100)));
+ ASSERT_NEAR(nonDirectionalEvent.getOrientation(/*pointerIndex=*/0), initial - M_PI_2, EPSILON);
+ ASSERT_NEAR(directionalEvent.getOrientation(/*pointerIndex=*/0), initial - M_PI_2, EPSILON);
+}
+
} // namespace android
diff --git a/libs/input/tests/InputPublisherAndConsumerNoResampling_test.cpp b/libs/input/tests/InputPublisherAndConsumerNoResampling_test.cpp
new file mode 100644
index 0000000000..f49469ccca
--- /dev/null
+++ b/libs/input/tests/InputPublisherAndConsumerNoResampling_test.cpp
@@ -0,0 +1,817 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/logging.h>
+#include <attestation/HmacKeyManager.h>
+#include <ftl/enum.h>
+#include <gtest/gtest.h>
+#include <input/BlockingQueue.h>
+#include <input/InputConsumerNoResampling.h>
+#include <input/InputTransport.h>
+
+using android::base::Result;
+
+namespace android {
+
+namespace {
+
+static constexpr float EPSILON = MotionEvent::ROUNDING_PRECISION;
+static constexpr int32_t ACTION_MOVE = AMOTION_EVENT_ACTION_MOVE;
+static constexpr int32_t POINTER_1_DOWN =
+ AMOTION_EVENT_ACTION_POINTER_DOWN | (1 << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
+static constexpr int32_t POINTER_2_DOWN =
+ AMOTION_EVENT_ACTION_POINTER_DOWN | (2 << AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT);
+
+static auto constexpr TIMEOUT = 5s;
+
+struct Pointer {
+ int32_t id;
+ float x;
+ float y;
+ bool isResampled = false;
+};
+
+// A collection of arguments to be sent as publishMotionEvent(). The saved members of this struct
+// allow to check the expectations against the event acquired from the InputConsumerCallbacks. To
+// help simplify expectation checking it carries members not present in MotionEvent, like
+// |rawXScale|.
+struct PublishMotionArgs {
+ const int32_t action;
+ const nsecs_t downTime;
+ const uint32_t seq;
+ const int32_t eventId;
+ const int32_t deviceId = 1;
+ const uint32_t source = AINPUT_SOURCE_TOUCHSCREEN;
+ const ui::LogicalDisplayId displayId = ui::LogicalDisplayId::DEFAULT;
+ const int32_t actionButton = 0;
+ const int32_t edgeFlags = AMOTION_EVENT_EDGE_FLAG_TOP;
+ const int32_t metaState = AMETA_ALT_LEFT_ON | AMETA_ALT_ON;
+ const int32_t buttonState = AMOTION_EVENT_BUTTON_PRIMARY;
+ const MotionClassification classification = MotionClassification::AMBIGUOUS_GESTURE;
+ const float xScale = 2;
+ const float yScale = 3;
+ const float xOffset = -10;
+ const float yOffset = -20;
+ const float rawXScale = 4;
+ const float rawYScale = -5;
+ const float rawXOffset = -11;
+ const float rawYOffset = 42;
+ const float xPrecision = 0.25;
+ const float yPrecision = 0.5;
+ const float xCursorPosition = 1.3;
+ const float yCursorPosition = 50.6;
+ std::array<uint8_t, 32> hmac;
+ int32_t flags;
+ ui::Transform transform;
+ ui::Transform rawTransform;
+ const nsecs_t eventTime;
+ size_t pointerCount;
+ std::vector<PointerProperties> pointerProperties;
+ std::vector<PointerCoords> pointerCoords;
+
+ PublishMotionArgs(int32_t action, nsecs_t downTime, const std::vector<Pointer>& pointers,
+ const uint32_t seq);
+};
+
+PublishMotionArgs::PublishMotionArgs(int32_t inAction, nsecs_t inDownTime,
+ const std::vector<Pointer>& pointers, const uint32_t inSeq)
+ : action(inAction),
+ downTime(inDownTime),
+ seq(inSeq),
+ eventId(InputEvent::nextId()),
+ eventTime(systemTime(SYSTEM_TIME_MONOTONIC)) {
+ hmac = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31};
+
+ flags = AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED |
+ AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION |
+ AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION;
+ if (action == AMOTION_EVENT_ACTION_CANCEL) {
+ flags |= AMOTION_EVENT_FLAG_CANCELED;
+ }
+ pointerCount = pointers.size();
+ for (size_t i = 0; i < pointerCount; i++) {
+ pointerProperties.push_back({});
+ pointerProperties[i].clear();
+ pointerProperties[i].id = pointers[i].id;
+ pointerProperties[i].toolType = ToolType::FINGER;
+
+ pointerCoords.push_back({});
+ pointerCoords[i].clear();
+ pointerCoords[i].isResampled = pointers[i].isResampled;
+ pointerCoords[i].setAxisValue(AMOTION_EVENT_AXIS_X, pointers[i].x);
+ pointerCoords[i].setAxisValue(AMOTION_EVENT_AXIS_Y, pointers[i].y);
+ pointerCoords[i].setAxisValue(AMOTION_EVENT_AXIS_PRESSURE, 0.5 * i);
+ pointerCoords[i].setAxisValue(AMOTION_EVENT_AXIS_SIZE, 0.7 * i);
+ pointerCoords[i].setAxisValue(AMOTION_EVENT_AXIS_TOUCH_MAJOR, 1.5 * i);
+ pointerCoords[i].setAxisValue(AMOTION_EVENT_AXIS_TOUCH_MINOR, 1.7 * i);
+ pointerCoords[i].setAxisValue(AMOTION_EVENT_AXIS_TOOL_MAJOR, 2.5 * i);
+ pointerCoords[i].setAxisValue(AMOTION_EVENT_AXIS_TOOL_MAJOR, 2.7 * i);
+ pointerCoords[i].setAxisValue(AMOTION_EVENT_AXIS_ORIENTATION, 3.5 * i);
+ }
+ transform.set({xScale, 0, xOffset, 0, yScale, yOffset, 0, 0, 1});
+ rawTransform.set({rawXScale, 0, rawXOffset, 0, rawYScale, rawYOffset, 0, 0, 1});
+}
+
+// Checks expectations against |motionEvent| acquired from an InputConsumer. Floating point
+// comparisons limit precision to EPSILON.
+void verifyArgsEqualToEvent(const PublishMotionArgs& args, const MotionEvent& motionEvent) {
+ EXPECT_EQ(args.eventId, motionEvent.getId());
+ EXPECT_EQ(args.deviceId, motionEvent.getDeviceId());
+ EXPECT_EQ(args.source, motionEvent.getSource());
+ EXPECT_EQ(args.displayId, motionEvent.getDisplayId());
+ EXPECT_EQ(args.hmac, motionEvent.getHmac());
+ EXPECT_EQ(args.action, motionEvent.getAction());
+ EXPECT_EQ(args.downTime, motionEvent.getDownTime());
+ EXPECT_EQ(args.flags, motionEvent.getFlags());
+ EXPECT_EQ(args.edgeFlags, motionEvent.getEdgeFlags());
+ EXPECT_EQ(args.metaState, motionEvent.getMetaState());
+ EXPECT_EQ(args.buttonState, motionEvent.getButtonState());
+ EXPECT_EQ(args.classification, motionEvent.getClassification());
+ EXPECT_EQ(args.transform, motionEvent.getTransform());
+ EXPECT_NEAR((-args.rawXOffset / args.rawXScale) * args.xScale + args.xOffset,
+ motionEvent.getRawXOffset(), EPSILON);
+ EXPECT_NEAR((-args.rawYOffset / args.rawYScale) * args.yScale + args.yOffset,
+ motionEvent.getRawYOffset(), EPSILON);
+ EXPECT_EQ(args.xPrecision, motionEvent.getXPrecision());
+ EXPECT_EQ(args.yPrecision, motionEvent.getYPrecision());
+ EXPECT_NEAR(args.xCursorPosition, motionEvent.getRawXCursorPosition(), EPSILON);
+ EXPECT_NEAR(args.yCursorPosition, motionEvent.getRawYCursorPosition(), EPSILON);
+ EXPECT_NEAR(args.xCursorPosition * args.xScale + args.xOffset, motionEvent.getXCursorPosition(),
+ EPSILON);
+ EXPECT_NEAR(args.yCursorPosition * args.yScale + args.yOffset, motionEvent.getYCursorPosition(),
+ EPSILON);
+ EXPECT_EQ(args.rawTransform, motionEvent.getRawTransform());
+ EXPECT_EQ(args.eventTime, motionEvent.getEventTime());
+ EXPECT_EQ(args.pointerCount, motionEvent.getPointerCount());
+ EXPECT_EQ(0U, motionEvent.getHistorySize());
+
+ for (size_t i = 0; i < args.pointerCount; i++) {
+ SCOPED_TRACE(i);
+ EXPECT_EQ(args.pointerProperties[i].id, motionEvent.getPointerId(i));
+ EXPECT_EQ(args.pointerProperties[i].toolType, motionEvent.getToolType(i));
+
+ const auto& pc = args.pointerCoords[i];
+ EXPECT_EQ(pc, motionEvent.getSamplePointerCoords()[i]);
+
+ EXPECT_NEAR(pc.getX() * args.rawXScale + args.rawXOffset, motionEvent.getRawX(i), EPSILON);
+ EXPECT_NEAR(pc.getY() * args.rawYScale + args.rawYOffset, motionEvent.getRawY(i), EPSILON);
+ EXPECT_NEAR(pc.getX() * args.xScale + args.xOffset, motionEvent.getX(i), EPSILON);
+ EXPECT_NEAR(pc.getY() * args.yScale + args.yOffset, motionEvent.getY(i), EPSILON);
+ EXPECT_EQ(pc.getAxisValue(AMOTION_EVENT_AXIS_PRESSURE), motionEvent.getPressure(i));
+ EXPECT_EQ(pc.getAxisValue(AMOTION_EVENT_AXIS_SIZE), motionEvent.getSize(i));
+ EXPECT_EQ(pc.getAxisValue(AMOTION_EVENT_AXIS_TOUCH_MAJOR), motionEvent.getTouchMajor(i));
+ EXPECT_EQ(pc.getAxisValue(AMOTION_EVENT_AXIS_TOUCH_MINOR), motionEvent.getTouchMinor(i));
+ EXPECT_EQ(pc.getAxisValue(AMOTION_EVENT_AXIS_TOOL_MAJOR), motionEvent.getToolMajor(i));
+ EXPECT_EQ(pc.getAxisValue(AMOTION_EVENT_AXIS_TOOL_MINOR), motionEvent.getToolMinor(i));
+
+ // Calculate the orientation after scaling, keeping in mind that an orientation of 0 is
+ // "up", and the positive y direction is "down".
+ const float unscaledOrientation = pc.getAxisValue(AMOTION_EVENT_AXIS_ORIENTATION);
+ const float x = sinf(unscaledOrientation) * args.xScale;
+ const float y = -cosf(unscaledOrientation) * args.yScale;
+ EXPECT_EQ(atan2f(x, -y), motionEvent.getOrientation(i));
+ }
+}
+
+void publishMotionEvent(InputPublisher& publisher, const PublishMotionArgs& a) {
+ status_t status =
+ publisher.publishMotionEvent(a.seq, a.eventId, a.deviceId, a.source, a.displayId,
+ a.hmac, a.action, a.actionButton, a.flags, a.edgeFlags,
+ a.metaState, a.buttonState, a.classification, a.transform,
+ a.xPrecision, a.yPrecision, a.xCursorPosition,
+ a.yCursorPosition, a.rawTransform, a.downTime, a.eventTime,
+ a.pointerCount, a.pointerProperties.data(),
+ a.pointerCoords.data());
+ ASSERT_EQ(OK, status) << "publisher publishMotionEvent should return OK";
+}
+
+Result<InputPublisher::ConsumerResponse> receiveConsumerResponse(
+ InputPublisher& publisher, std::chrono::milliseconds timeout) {
+ const std::chrono::time_point start = std::chrono::steady_clock::now();
+
+ while (true) {
+ Result<InputPublisher::ConsumerResponse> result = publisher.receiveConsumerResponse();
+ if (result.ok()) {
+ return result;
+ }
+ const std::chrono::duration waited = std::chrono::steady_clock::now() - start;
+ if (waited > timeout) {
+ return result;
+ }
+ }
+}
+
+void verifyFinishedSignal(InputPublisher& publisher, uint32_t seq, nsecs_t publishTime) {
+ Result<InputPublisher::ConsumerResponse> result = receiveConsumerResponse(publisher, TIMEOUT);
+ ASSERT_TRUE(result.ok()) << "receiveConsumerResponse returned " << result.error().message();
+ ASSERT_TRUE(std::holds_alternative<InputPublisher::Finished>(*result));
+ const InputPublisher::Finished& finish = std::get<InputPublisher::Finished>(*result);
+ ASSERT_EQ(seq, finish.seq)
+ << "receiveConsumerResponse should have returned the original sequence number";
+ ASSERT_TRUE(finish.handled)
+ << "receiveConsumerResponse should have set handled to consumer's reply";
+ ASSERT_GE(finish.consumeTime, publishTime)
+ << "finished signal's consume time should be greater than publish time";
+}
+
+} // namespace
+
+class InputConsumerMessageHandler : public MessageHandler {
+public:
+ InputConsumerMessageHandler(std::function<void(const Message&)> function)
+ : mFunction(function) {}
+
+private:
+ void handleMessage(const Message& message) override { mFunction(message); }
+
+ std::function<void(const Message&)> mFunction;
+};
+
+class InputPublisherAndConsumerNoResamplingTest : public testing::Test,
+ public InputConsumerCallbacks {
+protected:
+ std::unique_ptr<InputChannel> mClientChannel;
+ std::unique_ptr<InputPublisher> mPublisher;
+ std::unique_ptr<InputConsumerNoResampling> mConsumer;
+
+ std::thread mLooperThread;
+ sp<Looper> mLooper = sp<Looper>::make(/*allowNonCallbacks=*/false);
+
+ // LOOPER CONTROL
+ // Set to false when you want the looper to exit
+ std::atomic<bool> mExitLooper = false;
+ std::mutex mLock;
+
+ // Used by test to notify looper that the value of "mLooperMayProceed" has changed
+ std::condition_variable mNotifyLooperMayProceed;
+ bool mLooperMayProceed GUARDED_BY(mLock){true};
+ // Used by looper to notify the test that it's about to block on "mLooperMayProceed" -> true
+ std::condition_variable mNotifyLooperWaiting;
+ bool mLooperIsBlocked GUARDED_BY(mLock){false};
+
+ std::condition_variable mNotifyConsumerDestroyed;
+ bool mConsumerDestroyed GUARDED_BY(mLock){false};
+
+ void runLooper() {
+ static constexpr int LOOP_INDEFINITELY = -1;
+ Looper::setForThread(mLooper);
+ // Loop forever -- this thread is dedicated to servicing the looper callbacks.
+ while (!mExitLooper) {
+ mLooper->pollOnce(/*timeoutMillis=*/LOOP_INDEFINITELY);
+ }
+ }
+
+ void SetUp() override {
+ std::unique_ptr<InputChannel> serverChannel;
+ status_t result =
+ InputChannel::openInputChannelPair("channel name", serverChannel, mClientChannel);
+ ASSERT_EQ(OK, result);
+
+ mPublisher = std::make_unique<InputPublisher>(std::move(serverChannel));
+ mMessageHandler = sp<InputConsumerMessageHandler>::make(
+ [this](const Message& message) { handleMessage(message); });
+ mLooperThread = std::thread([this] { runLooper(); });
+ sendMessage(LooperMessage::CREATE_CONSUMER);
+ }
+
+ void publishAndConsumeKeyEvent();
+ void publishAndConsumeMotionStream();
+ void publishAndConsumeMotionDown(nsecs_t downTime);
+ void publishAndConsumeBatchedMotionMove(nsecs_t downTime);
+ void publishAndConsumeFocusEvent();
+ void publishAndConsumeCaptureEvent();
+ void publishAndConsumeDragEvent();
+ void publishAndConsumeTouchModeEvent();
+ void publishAndConsumeMotionEvent(int32_t action, nsecs_t downTime,
+ const std::vector<Pointer>& pointers);
+ void TearDown() override {
+ // Destroy the consumer, flushing any of the pending ack's.
+ sendMessage(LooperMessage::DESTROY_CONSUMER);
+ {
+ std::unique_lock lock(mLock);
+ base::ScopedLockAssertion assumeLocked(mLock);
+ mNotifyConsumerDestroyed.wait(lock, [this] { return mConsumerDestroyed; });
+ }
+ // Stop the looper thread so that we can destroy the object.
+ mExitLooper = true;
+ mLooper->wake();
+ mLooperThread.join();
+ }
+
+protected:
+ // Interaction with the looper thread
+ enum class LooperMessage : int {
+ CALL_PROBABLY_HAS_INPUT,
+ CREATE_CONSUMER,
+ DESTROY_CONSUMER,
+ CALL_REPORT_TIMELINE,
+ BLOCK_LOOPER,
+ };
+ void sendMessage(LooperMessage message);
+ struct ReportTimelineArgs {
+ int32_t inputEventId;
+ nsecs_t gpuCompletedTime;
+ nsecs_t presentTime;
+ };
+ // The input to the function "InputConsumer::reportTimeline". Populated on the test thread and
+ // accessed on the looper thread.
+ BlockingQueue<ReportTimelineArgs> mReportTimelineArgs;
+ // The output of calling "InputConsumer::probablyHasInput()". Populated on the looper thread and
+ // accessed on the test thread.
+ BlockingQueue<bool> mProbablyHasInputResponses;
+
+private:
+ sp<MessageHandler> mMessageHandler;
+ void handleMessage(const Message& message);
+
+ static auto constexpr NO_EVENT_TIMEOUT = 10ms;
+ // The sequence number to use when publishing the next event
+ uint32_t mSeq = 1;
+
+ BlockingQueue<std::unique_ptr<KeyEvent>> mKeyEvents;
+ BlockingQueue<std::unique_ptr<MotionEvent>> mMotionEvents;
+ BlockingQueue<std::unique_ptr<FocusEvent>> mFocusEvents;
+ BlockingQueue<std::unique_ptr<CaptureEvent>> mCaptureEvents;
+ BlockingQueue<std::unique_ptr<DragEvent>> mDragEvents;
+ BlockingQueue<std::unique_ptr<TouchModeEvent>> mTouchModeEvents;
+
+ // InputConsumerCallbacks interface
+ void onKeyEvent(std::unique_ptr<KeyEvent> event, uint32_t seq) override {
+ mKeyEvents.push(std::move(event));
+ mConsumer->finishInputEvent(seq, true);
+ }
+ void onMotionEvent(std::unique_ptr<MotionEvent> event, uint32_t seq) override {
+ mMotionEvents.push(std::move(event));
+ mConsumer->finishInputEvent(seq, true);
+ }
+ void onBatchedInputEventPending(int32_t pendingBatchSource) override {
+ if (!mConsumer->probablyHasInput()) {
+ ADD_FAILURE() << "should deterministically have input because there is a batch";
+ }
+ mConsumer->consumeBatchedInputEvents(std::nullopt);
+ };
+ void onFocusEvent(std::unique_ptr<FocusEvent> event, uint32_t seq) override {
+ mFocusEvents.push(std::move(event));
+ mConsumer->finishInputEvent(seq, true);
+ };
+ void onCaptureEvent(std::unique_ptr<CaptureEvent> event, uint32_t seq) override {
+ mCaptureEvents.push(std::move(event));
+ mConsumer->finishInputEvent(seq, true);
+ };
+ void onDragEvent(std::unique_ptr<DragEvent> event, uint32_t seq) override {
+ mDragEvents.push(std::move(event));
+ mConsumer->finishInputEvent(seq, true);
+ }
+ void onTouchModeEvent(std::unique_ptr<TouchModeEvent> event, uint32_t seq) override {
+ mTouchModeEvents.push(std::move(event));
+ mConsumer->finishInputEvent(seq, true);
+ };
+};
+
+void InputPublisherAndConsumerNoResamplingTest::sendMessage(LooperMessage message) {
+ Message msg{ftl::to_underlying(message)};
+ mLooper->sendMessage(mMessageHandler, msg);
+}
+
+void InputPublisherAndConsumerNoResamplingTest::handleMessage(const Message& message) {
+ switch (static_cast<LooperMessage>(message.what)) {
+ case LooperMessage::CALL_PROBABLY_HAS_INPUT: {
+ mProbablyHasInputResponses.push(mConsumer->probablyHasInput());
+ break;
+ }
+ case LooperMessage::CREATE_CONSUMER: {
+ mConsumer = std::make_unique<InputConsumerNoResampling>(std::move(mClientChannel),
+ mLooper, *this);
+ break;
+ }
+ case LooperMessage::DESTROY_CONSUMER: {
+ mConsumer = nullptr;
+ {
+ std::unique_lock lock(mLock);
+ mConsumerDestroyed = true;
+ }
+ mNotifyConsumerDestroyed.notify_all();
+ break;
+ }
+ case LooperMessage::CALL_REPORT_TIMELINE: {
+ std::optional<ReportTimelineArgs> args = mReportTimelineArgs.pop();
+ if (!args.has_value()) {
+ ADD_FAILURE() << "Couldn't get the 'reportTimeline' args in time";
+ return;
+ }
+ mConsumer->reportTimeline(args->inputEventId, args->gpuCompletedTime,
+ args->presentTime);
+ break;
+ }
+ case LooperMessage::BLOCK_LOOPER: {
+ {
+ std::unique_lock lock(mLock);
+ mLooperIsBlocked = true;
+ }
+ mNotifyLooperWaiting.notify_all();
+
+ {
+ std::unique_lock lock(mLock);
+ base::ScopedLockAssertion assumeLocked(mLock);
+ mNotifyLooperMayProceed.wait(lock, [this] { return mLooperMayProceed; });
+ }
+
+ {
+ std::unique_lock lock(mLock);
+ mLooperIsBlocked = false;
+ }
+ mNotifyLooperWaiting.notify_all();
+ break;
+ }
+ }
+}
+
+void InputPublisherAndConsumerNoResamplingTest::publishAndConsumeKeyEvent() {
+ status_t status;
+
+ const uint32_t seq = mSeq++;
+ int32_t eventId = InputEvent::nextId();
+ constexpr int32_t deviceId = 1;
+ constexpr uint32_t source = AINPUT_SOURCE_KEYBOARD;
+ constexpr ui::LogicalDisplayId displayId = ui::LogicalDisplayId::DEFAULT;
+ constexpr std::array<uint8_t, 32> hmac = {31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21,
+ 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10,
+ 9, 8, 7, 6, 5, 4, 3, 2, 1, 0};
+ constexpr int32_t action = AKEY_EVENT_ACTION_DOWN;
+ constexpr int32_t flags = AKEY_EVENT_FLAG_FROM_SYSTEM;
+ constexpr int32_t keyCode = AKEYCODE_ENTER;
+ constexpr int32_t scanCode = 13;
+ constexpr int32_t metaState = AMETA_ALT_LEFT_ON | AMETA_ALT_ON;
+ constexpr int32_t repeatCount = 1;
+ constexpr nsecs_t downTime = 3;
+ constexpr nsecs_t eventTime = 4;
+ const nsecs_t publishTime = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ status = mPublisher->publishKeyEvent(seq, eventId, deviceId, source, displayId, hmac, action,
+ flags, keyCode, scanCode, metaState, repeatCount, downTime,
+ eventTime);
+ ASSERT_EQ(OK, status) << "publisher publishKeyEvent should return OK";
+
+ std::optional<std::unique_ptr<KeyEvent>> optKeyEvent = mKeyEvents.popWithTimeout(TIMEOUT);
+ ASSERT_TRUE(optKeyEvent.has_value()) << "consumer should have returned non-NULL event";
+ std::unique_ptr<KeyEvent> keyEvent = std::move(*optKeyEvent);
+
+ sendMessage(LooperMessage::CALL_PROBABLY_HAS_INPUT);
+ std::optional<bool> probablyHasInput = mProbablyHasInputResponses.popWithTimeout(TIMEOUT);
+ ASSERT_TRUE(probablyHasInput.has_value());
+ ASSERT_FALSE(probablyHasInput.value()) << "no events should be waiting after being consumed";
+
+ EXPECT_EQ(eventId, keyEvent->getId());
+ EXPECT_EQ(deviceId, keyEvent->getDeviceId());
+ EXPECT_EQ(source, keyEvent->getSource());
+ EXPECT_EQ(displayId, keyEvent->getDisplayId());
+ EXPECT_EQ(hmac, keyEvent->getHmac());
+ EXPECT_EQ(action, keyEvent->getAction());
+ EXPECT_EQ(flags, keyEvent->getFlags());
+ EXPECT_EQ(keyCode, keyEvent->getKeyCode());
+ EXPECT_EQ(scanCode, keyEvent->getScanCode());
+ EXPECT_EQ(metaState, keyEvent->getMetaState());
+ EXPECT_EQ(repeatCount, keyEvent->getRepeatCount());
+ EXPECT_EQ(downTime, keyEvent->getDownTime());
+ EXPECT_EQ(eventTime, keyEvent->getEventTime());
+
+ verifyFinishedSignal(*mPublisher, seq, publishTime);
+}
+
+void InputPublisherAndConsumerNoResamplingTest::publishAndConsumeMotionStream() {
+ const nsecs_t downTime = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ publishAndConsumeMotionEvent(AMOTION_EVENT_ACTION_DOWN, downTime,
+ {Pointer{.id = 0, .x = 20, .y = 30}});
+
+ publishAndConsumeMotionEvent(POINTER_1_DOWN, downTime,
+ {Pointer{.id = 0, .x = 20, .y = 30},
+ Pointer{.id = 1, .x = 200, .y = 300}});
+
+ publishAndConsumeMotionEvent(POINTER_2_DOWN, downTime,
+ {Pointer{.id = 0, .x = 20, .y = 30},
+ Pointer{.id = 1, .x = 200, .y = 300},
+ Pointer{.id = 2, .x = 300, .y = 400}});
+
+ // Provide a consistent input stream - cancel the gesture that was started above
+ publishAndConsumeMotionEvent(AMOTION_EVENT_ACTION_CANCEL, downTime,
+ {Pointer{.id = 0, .x = 20, .y = 30},
+ Pointer{.id = 1, .x = 200, .y = 300},
+ Pointer{.id = 2, .x = 300, .y = 400}});
+}
+
+void InputPublisherAndConsumerNoResamplingTest::publishAndConsumeMotionDown(nsecs_t downTime) {
+ publishAndConsumeMotionEvent(AMOTION_EVENT_ACTION_DOWN, downTime,
+ {Pointer{.id = 0, .x = 20, .y = 30}});
+}
+
+void InputPublisherAndConsumerNoResamplingTest::publishAndConsumeBatchedMotionMove(
+ nsecs_t downTime) {
+ uint32_t seq = mSeq++;
+ const std::vector<Pointer> pointers = {Pointer{.id = 0, .x = 20, .y = 30}};
+ PublishMotionArgs args(AMOTION_EVENT_ACTION_MOVE, downTime, pointers, seq);
+ const nsecs_t publishTime = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ // Block the looper thread, preventing it from being able to service any of the fd callbacks.
+
+ {
+ std::scoped_lock lock(mLock);
+ mLooperMayProceed = false;
+ }
+ sendMessage(LooperMessage::BLOCK_LOOPER);
+ {
+ std::unique_lock lock(mLock);
+ mNotifyLooperWaiting.wait(lock, [this] { return mLooperIsBlocked; });
+ }
+
+ publishMotionEvent(*mPublisher, args);
+
+ // Ensure no event arrives because the UI thread is blocked
+ std::optional<std::unique_ptr<MotionEvent>> noEvent =
+ mMotionEvents.popWithTimeout(NO_EVENT_TIMEOUT);
+ ASSERT_FALSE(noEvent.has_value()) << "Got unexpected event: " << *noEvent;
+
+ Result<InputPublisher::ConsumerResponse> result = mPublisher->receiveConsumerResponse();
+ ASSERT_FALSE(result.ok());
+ ASSERT_EQ(WOULD_BLOCK, result.error().code());
+
+ // We shouldn't be calling mConsumer on the UI thread, but in this situation, the looper
+ // thread is locked, so this should be safe to do.
+ ASSERT_TRUE(mConsumer->probablyHasInput())
+ << "should deterministically have input because there is a batch";
+
+ // Now, unblock the looper thread, so that the event can arrive.
+ {
+ std::scoped_lock lock(mLock);
+ mLooperMayProceed = true;
+ }
+ mNotifyLooperMayProceed.notify_all();
+
+ std::optional<std::unique_ptr<MotionEvent>> optMotion = mMotionEvents.popWithTimeout(TIMEOUT);
+ ASSERT_TRUE(optMotion.has_value());
+ std::unique_ptr<MotionEvent> motion = std::move(*optMotion);
+ ASSERT_EQ(ACTION_MOVE, motion->getAction());
+
+ verifyFinishedSignal(*mPublisher, seq, publishTime);
+}
+
+void InputPublisherAndConsumerNoResamplingTest::publishAndConsumeMotionEvent(
+ int32_t action, nsecs_t downTime, const std::vector<Pointer>& pointers) {
+ uint32_t seq = mSeq++;
+ PublishMotionArgs args(action, downTime, pointers, seq);
+ nsecs_t publishTime = systemTime(SYSTEM_TIME_MONOTONIC);
+ publishMotionEvent(*mPublisher, args);
+
+ std::optional<std::unique_ptr<MotionEvent>> optMotion = mMotionEvents.popWithTimeout(TIMEOUT);
+ ASSERT_TRUE(optMotion.has_value());
+ std::unique_ptr<MotionEvent> event = std::move(*optMotion);
+
+ verifyArgsEqualToEvent(args, *event);
+
+ verifyFinishedSignal(*mPublisher, seq, publishTime);
+}
+
+void InputPublisherAndConsumerNoResamplingTest::publishAndConsumeFocusEvent() {
+ status_t status;
+
+ constexpr uint32_t seq = 15;
+ int32_t eventId = InputEvent::nextId();
+ constexpr bool hasFocus = true;
+ const nsecs_t publishTime = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ status = mPublisher->publishFocusEvent(seq, eventId, hasFocus);
+ ASSERT_EQ(OK, status) << "publisher publishFocusEvent should return OK";
+
+ std::optional<std::unique_ptr<FocusEvent>> optFocusEvent = mFocusEvents.popWithTimeout(TIMEOUT);
+ ASSERT_TRUE(optFocusEvent.has_value()) << "consumer should have returned non-NULL event";
+ std::unique_ptr<FocusEvent> focusEvent = std::move(*optFocusEvent);
+ EXPECT_EQ(eventId, focusEvent->getId());
+ EXPECT_EQ(hasFocus, focusEvent->getHasFocus());
+
+ verifyFinishedSignal(*mPublisher, seq, publishTime);
+}
+
+void InputPublisherAndConsumerNoResamplingTest::publishAndConsumeCaptureEvent() {
+ status_t status;
+
+ constexpr uint32_t seq = 42;
+ int32_t eventId = InputEvent::nextId();
+ constexpr bool captureEnabled = true;
+ const nsecs_t publishTime = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ status = mPublisher->publishCaptureEvent(seq, eventId, captureEnabled);
+ ASSERT_EQ(OK, status) << "publisher publishCaptureEvent should return OK";
+
+ std::optional<std::unique_ptr<CaptureEvent>> optEvent = mCaptureEvents.popWithTimeout(TIMEOUT);
+ ASSERT_TRUE(optEvent.has_value()) << "consumer should have returned non-NULL event";
+ std::unique_ptr<CaptureEvent> event = std::move(*optEvent);
+
+ const CaptureEvent& captureEvent = *event;
+ EXPECT_EQ(eventId, captureEvent.getId());
+ EXPECT_EQ(captureEnabled, captureEvent.getPointerCaptureEnabled());
+
+ verifyFinishedSignal(*mPublisher, seq, publishTime);
+}
+
+void InputPublisherAndConsumerNoResamplingTest::publishAndConsumeDragEvent() {
+ status_t status;
+
+ constexpr uint32_t seq = 15;
+ int32_t eventId = InputEvent::nextId();
+ constexpr bool isExiting = false;
+ constexpr float x = 10;
+ constexpr float y = 15;
+ const nsecs_t publishTime = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ status = mPublisher->publishDragEvent(seq, eventId, x, y, isExiting);
+ ASSERT_EQ(OK, status) << "publisher publishDragEvent should return OK";
+
+ std::optional<std::unique_ptr<DragEvent>> optEvent = mDragEvents.popWithTimeout(TIMEOUT);
+ ASSERT_TRUE(optEvent.has_value()) << "consumer should have returned non-NULL event";
+ std::unique_ptr<DragEvent> event = std::move(*optEvent);
+
+ const DragEvent& dragEvent = *event;
+ EXPECT_EQ(eventId, dragEvent.getId());
+ EXPECT_EQ(isExiting, dragEvent.isExiting());
+ EXPECT_EQ(x, dragEvent.getX());
+ EXPECT_EQ(y, dragEvent.getY());
+
+ verifyFinishedSignal(*mPublisher, seq, publishTime);
+}
+
+void InputPublisherAndConsumerNoResamplingTest::publishAndConsumeTouchModeEvent() {
+ status_t status;
+
+ constexpr uint32_t seq = 15;
+ int32_t eventId = InputEvent::nextId();
+ constexpr bool touchModeEnabled = true;
+ const nsecs_t publishTime = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ status = mPublisher->publishTouchModeEvent(seq, eventId, touchModeEnabled);
+ ASSERT_EQ(OK, status) << "publisher publishTouchModeEvent should return OK";
+
+ std::optional<std::unique_ptr<TouchModeEvent>> optEvent =
+ mTouchModeEvents.popWithTimeout(TIMEOUT);
+ ASSERT_TRUE(optEvent.has_value());
+ std::unique_ptr<TouchModeEvent> event = std::move(*optEvent);
+
+ const TouchModeEvent& touchModeEvent = *event;
+ EXPECT_EQ(eventId, touchModeEvent.getId());
+ EXPECT_EQ(touchModeEnabled, touchModeEvent.isInTouchMode());
+
+ verifyFinishedSignal(*mPublisher, seq, publishTime);
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest, SendTimeline) {
+ const int32_t inputEventId = 20;
+ const nsecs_t gpuCompletedTime = 30;
+ const nsecs_t presentTime = 40;
+
+ mReportTimelineArgs.emplace(inputEventId, gpuCompletedTime, presentTime);
+ sendMessage(LooperMessage::CALL_REPORT_TIMELINE);
+
+ Result<InputPublisher::ConsumerResponse> result = receiveConsumerResponse(*mPublisher, TIMEOUT);
+ ASSERT_TRUE(result.ok()) << "receiveConsumerResponse should return OK";
+ ASSERT_TRUE(std::holds_alternative<InputPublisher::Timeline>(*result));
+ const InputPublisher::Timeline& timeline = std::get<InputPublisher::Timeline>(*result);
+ ASSERT_EQ(inputEventId, timeline.inputEventId);
+ ASSERT_EQ(gpuCompletedTime, timeline.graphicsTimeline[GraphicsTimeline::GPU_COMPLETED_TIME]);
+ ASSERT_EQ(presentTime, timeline.graphicsTimeline[GraphicsTimeline::PRESENT_TIME]);
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest, PublishKeyEvent_EndToEnd) {
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeKeyEvent());
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest, PublishMotionEvent_EndToEnd) {
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeMotionStream());
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest, PublishMotionMoveEvent_EndToEnd) {
+ // Publish a DOWN event before MOVE to pass the InputVerifier checks.
+ const nsecs_t downTime = systemTime(SYSTEM_TIME_MONOTONIC);
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeMotionDown(downTime));
+
+ // Publish the MOVE event and check expectations.
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeBatchedMotionMove(downTime));
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest, PublishFocusEvent_EndToEnd) {
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeFocusEvent());
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest, PublishCaptureEvent_EndToEnd) {
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeCaptureEvent());
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest, PublishDragEvent_EndToEnd) {
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeDragEvent());
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest, PublishTouchModeEvent_EndToEnd) {
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeTouchModeEvent());
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest,
+ PublishMotionEvent_WhenSequenceNumberIsZero_ReturnsError) {
+ status_t status;
+ const size_t pointerCount = 1;
+ PointerProperties pointerProperties[pointerCount];
+ PointerCoords pointerCoords[pointerCount];
+ for (size_t i = 0; i < pointerCount; i++) {
+ pointerProperties[i].clear();
+ pointerCoords[i].clear();
+ }
+
+ ui::Transform identityTransform;
+ status =
+ mPublisher->publishMotionEvent(0, InputEvent::nextId(), 0, 0,
+ ui::LogicalDisplayId::DEFAULT, INVALID_HMAC, 0, 0, 0, 0,
+ 0, 0, MotionClassification::NONE, identityTransform, 0,
+ 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
+ AMOTION_EVENT_INVALID_CURSOR_POSITION, identityTransform,
+ 0, 0, pointerCount, pointerProperties, pointerCoords);
+ ASSERT_EQ(BAD_VALUE, status) << "publisher publishMotionEvent should return BAD_VALUE";
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest,
+ PublishMotionEvent_WhenPointerCountLessThan1_ReturnsError) {
+ status_t status;
+ const size_t pointerCount = 0;
+ PointerProperties pointerProperties[pointerCount];
+ PointerCoords pointerCoords[pointerCount];
+
+ ui::Transform identityTransform;
+ status =
+ mPublisher->publishMotionEvent(1, InputEvent::nextId(), 0, 0,
+ ui::LogicalDisplayId::DEFAULT, INVALID_HMAC, 0, 0, 0, 0,
+ 0, 0, MotionClassification::NONE, identityTransform, 0,
+ 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
+ AMOTION_EVENT_INVALID_CURSOR_POSITION, identityTransform,
+ 0, 0, pointerCount, pointerProperties, pointerCoords);
+ ASSERT_EQ(BAD_VALUE, status) << "publisher publishMotionEvent should return BAD_VALUE";
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest,
+ PublishMotionEvent_WhenPointerCountGreaterThanMax_ReturnsError) {
+ status_t status;
+ const size_t pointerCount = MAX_POINTERS + 1;
+ PointerProperties pointerProperties[pointerCount];
+ PointerCoords pointerCoords[pointerCount];
+ for (size_t i = 0; i < pointerCount; i++) {
+ pointerProperties[i].clear();
+ pointerCoords[i].clear();
+ }
+
+ ui::Transform identityTransform;
+ status =
+ mPublisher->publishMotionEvent(1, InputEvent::nextId(), 0, 0,
+ ui::LogicalDisplayId::DEFAULT, INVALID_HMAC, 0, 0, 0, 0,
+ 0, 0, MotionClassification::NONE, identityTransform, 0,
+ 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
+ AMOTION_EVENT_INVALID_CURSOR_POSITION, identityTransform,
+ 0, 0, pointerCount, pointerProperties, pointerCoords);
+ ASSERT_EQ(BAD_VALUE, status) << "publisher publishMotionEvent should return BAD_VALUE";
+}
+
+TEST_F(InputPublisherAndConsumerNoResamplingTest, PublishMultipleEvents_EndToEnd) {
+ const nsecs_t downTime = systemTime(SYSTEM_TIME_MONOTONIC);
+
+ publishAndConsumeMotionEvent(AMOTION_EVENT_ACTION_DOWN, downTime,
+ {Pointer{.id = 0, .x = 20, .y = 30}});
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeKeyEvent());
+ publishAndConsumeMotionEvent(POINTER_1_DOWN, downTime,
+ {Pointer{.id = 0, .x = 20, .y = 30},
+ Pointer{.id = 1, .x = 200, .y = 300}});
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeFocusEvent());
+ publishAndConsumeMotionEvent(POINTER_2_DOWN, downTime,
+ {Pointer{.id = 0, .x = 20, .y = 30},
+ Pointer{.id = 1, .x = 200, .y = 300},
+ Pointer{.id = 2, .x = 200, .y = 300}});
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeKeyEvent());
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeCaptureEvent());
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeDragEvent());
+ // Provide a consistent input stream - cancel the gesture that was started above
+ publishAndConsumeMotionEvent(AMOTION_EVENT_ACTION_CANCEL, downTime,
+ {Pointer{.id = 0, .x = 20, .y = 30},
+ Pointer{.id = 1, .x = 200, .y = 300},
+ Pointer{.id = 2, .x = 200, .y = 300}});
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeKeyEvent());
+ ASSERT_NO_FATAL_FAILURE(publishAndConsumeTouchModeEvent());
+}
+
+} // namespace android
diff --git a/libs/input/tests/InputPublisherAndConsumer_test.cpp b/libs/input/tests/InputPublisherAndConsumer_test.cpp
index 35430207f9..e65a919bd6 100644
--- a/libs/input/tests/InputPublisherAndConsumer_test.cpp
+++ b/libs/input/tests/InputPublisherAndConsumer_test.cpp
@@ -14,11 +14,9 @@
* limitations under the License.
*/
-#include "TestHelpers.h"
-
#include <attestation/HmacKeyManager.h>
#include <gtest/gtest.h>
-#include <gui/constants.h>
+#include <input/InputConsumer.h>
#include <input/InputTransport.h>
using android::base::Result;
@@ -50,7 +48,7 @@ struct PublishMotionArgs {
const int32_t eventId;
const int32_t deviceId = 1;
const uint32_t source = AINPUT_SOURCE_TOUCHSCREEN;
- const int32_t displayId = ADISPLAY_ID_DEFAULT;
+ const ui::LogicalDisplayId displayId = ui::LogicalDisplayId::DEFAULT;
const int32_t actionButton = 0;
const int32_t edgeFlags = AMOTION_EVENT_EDGE_FLAG_TOP;
const int32_t metaState = AMETA_ALT_LEFT_ON | AMETA_ALT_ON;
@@ -91,7 +89,9 @@ PublishMotionArgs::PublishMotionArgs(int32_t inAction, nsecs_t inDownTime,
hmac = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31};
- flags = AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED;
+ flags = AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED |
+ AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_ORIENTATION |
+ AMOTION_EVENT_PRIVATE_FLAG_SUPPORTS_DIRECTIONAL_ORIENTATION;
if (action == AMOTION_EVENT_ACTION_CANCEL) {
flags |= AMOTION_EVENT_FLAG_CANCELED;
}
@@ -135,8 +135,10 @@ void verifyArgsEqualToEvent(const PublishMotionArgs& args, const MotionEvent& mo
EXPECT_EQ(args.buttonState, motionEvent.getButtonState());
EXPECT_EQ(args.classification, motionEvent.getClassification());
EXPECT_EQ(args.transform, motionEvent.getTransform());
- EXPECT_EQ(args.xOffset, motionEvent.getXOffset());
- EXPECT_EQ(args.yOffset, motionEvent.getYOffset());
+ EXPECT_NEAR((-args.rawXOffset / args.rawXScale) * args.xScale + args.xOffset,
+ motionEvent.getRawXOffset(), EPSILON);
+ EXPECT_NEAR((-args.rawYOffset / args.rawYScale) * args.yScale + args.yOffset,
+ motionEvent.getRawYOffset(), EPSILON);
EXPECT_EQ(args.xPrecision, motionEvent.getXPrecision());
EXPECT_EQ(args.yPrecision, motionEvent.getYPrecision());
EXPECT_NEAR(args.xCursorPosition, motionEvent.getRawXCursorPosition(), EPSILON);
@@ -262,7 +264,7 @@ void InputPublisherAndConsumerTest::publishAndConsumeKeyEvent() {
int32_t eventId = InputEvent::nextId();
constexpr int32_t deviceId = 1;
constexpr uint32_t source = AINPUT_SOURCE_KEYBOARD;
- constexpr int32_t displayId = ADISPLAY_ID_DEFAULT;
+ constexpr ui::LogicalDisplayId displayId = ui::LogicalDisplayId::DEFAULT;
constexpr std::array<uint8_t, 32> hmac = {31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21,
20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10,
9, 8, 7, 6, 5, 4, 3, 2, 1, 0};
@@ -622,13 +624,13 @@ TEST_F(InputPublisherAndConsumerTest, PublishMotionEvent_WhenSequenceNumberIsZer
ui::Transform identityTransform;
status =
- mPublisher->publishMotionEvent(0, InputEvent::nextId(), 0, 0, 0, INVALID_HMAC, 0, 0, 0,
- 0, 0, 0, MotionClassification::NONE, identityTransform,
- 0, 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
+ mPublisher->publishMotionEvent(0, InputEvent::nextId(), 0, 0,
+ ui::LogicalDisplayId::DEFAULT, INVALID_HMAC, 0, 0, 0, 0,
+ 0, 0, MotionClassification::NONE, identityTransform, 0,
+ 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
AMOTION_EVENT_INVALID_CURSOR_POSITION, identityTransform,
0, 0, pointerCount, pointerProperties, pointerCoords);
- ASSERT_EQ(BAD_VALUE, status)
- << "publisher publishMotionEvent should return BAD_VALUE";
+ ASSERT_EQ(BAD_VALUE, status) << "publisher publishMotionEvent should return BAD_VALUE";
}
TEST_F(InputPublisherAndConsumerTest, PublishMotionEvent_WhenPointerCountLessThan1_ReturnsError) {
@@ -639,17 +641,17 @@ TEST_F(InputPublisherAndConsumerTest, PublishMotionEvent_WhenPointerCountLessTha
ui::Transform identityTransform;
status =
- mPublisher->publishMotionEvent(1, InputEvent::nextId(), 0, 0, 0, INVALID_HMAC, 0, 0, 0,
- 0, 0, 0, MotionClassification::NONE, identityTransform,
- 0, 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
+ mPublisher->publishMotionEvent(1, InputEvent::nextId(), 0, 0,
+ ui::LogicalDisplayId::DEFAULT, INVALID_HMAC, 0, 0, 0, 0,
+ 0, 0, MotionClassification::NONE, identityTransform, 0,
+ 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
AMOTION_EVENT_INVALID_CURSOR_POSITION, identityTransform,
0, 0, pointerCount, pointerProperties, pointerCoords);
- ASSERT_EQ(BAD_VALUE, status)
- << "publisher publishMotionEvent should return BAD_VALUE";
+ ASSERT_EQ(BAD_VALUE, status) << "publisher publishMotionEvent should return BAD_VALUE";
}
TEST_F(InputPublisherAndConsumerTest,
- PublishMotionEvent_WhenPointerCountGreaterThanMax_ReturnsError) {
+ PublishMotionEvent_WhenPointerCountGreaterThanMax_ReturnsError) {
status_t status;
const size_t pointerCount = MAX_POINTERS + 1;
PointerProperties pointerProperties[pointerCount];
@@ -661,13 +663,13 @@ TEST_F(InputPublisherAndConsumerTest,
ui::Transform identityTransform;
status =
- mPublisher->publishMotionEvent(1, InputEvent::nextId(), 0, 0, 0, INVALID_HMAC, 0, 0, 0,
- 0, 0, 0, MotionClassification::NONE, identityTransform,
- 0, 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
+ mPublisher->publishMotionEvent(1, InputEvent::nextId(), 0, 0,
+ ui::LogicalDisplayId::DEFAULT, INVALID_HMAC, 0, 0, 0, 0,
+ 0, 0, MotionClassification::NONE, identityTransform, 0,
+ 0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
AMOTION_EVENT_INVALID_CURSOR_POSITION, identityTransform,
0, 0, pointerCount, pointerProperties, pointerCoords);
- ASSERT_EQ(BAD_VALUE, status)
- << "publisher publishMotionEvent should return BAD_VALUE";
+ ASSERT_EQ(BAD_VALUE, status) << "publisher publishMotionEvent should return BAD_VALUE";
}
TEST_F(InputPublisherAndConsumerTest, PublishMultipleEvents_EndToEnd) {
diff --git a/libs/input/tests/MotionPredictorMetricsManager_test.cpp b/libs/input/tests/MotionPredictorMetricsManager_test.cpp
index 31cc1459fc..cc41eeb5e7 100644
--- a/libs/input/tests/MotionPredictorMetricsManager_test.cpp
+++ b/libs/input/tests/MotionPredictorMetricsManager_test.cpp
@@ -238,14 +238,17 @@ TEST(MakeMotionEventTest, MakeLiftMotionEvent) {
// --- Ground-truth-generation helper functions. ---
+// Generates numPoints ground truth points with values equal to those of the given
+// GroundTruthPoint, and with consecutive timestamps separated by the given inputInterval.
std::vector<GroundTruthPoint> generateConstantGroundTruthPoints(
- const GroundTruthPoint& groundTruthPoint, size_t numPoints) {
+ const GroundTruthPoint& groundTruthPoint, size_t numPoints,
+ nsecs_t inputInterval = TEST_PREDICTION_INTERVAL_NANOS) {
std::vector<GroundTruthPoint> groundTruthPoints;
nsecs_t timestamp = groundTruthPoint.timestamp;
for (size_t i = 0; i < numPoints; ++i) {
groundTruthPoints.emplace_back(groundTruthPoint);
groundTruthPoints.back().timestamp = timestamp;
- timestamp += TEST_PREDICTION_INTERVAL_NANOS;
+ timestamp += inputInterval;
}
return groundTruthPoints;
}
@@ -280,7 +283,8 @@ TEST(GenerateConstantGroundTruthPointsTest, BasicTest) {
const GroundTruthPoint groundTruthPoint{{.position = Eigen::Vector2f(10, 20), .pressure = 0.3f},
.timestamp = TEST_INITIAL_TIMESTAMP};
const std::vector<GroundTruthPoint> groundTruthPoints =
- generateConstantGroundTruthPoints(groundTruthPoint, /*numPoints=*/3);
+ generateConstantGroundTruthPoints(groundTruthPoint, /*numPoints=*/3,
+ /*inputInterval=*/10);
ASSERT_EQ(3u, groundTruthPoints.size());
// First point.
@@ -290,11 +294,11 @@ TEST(GenerateConstantGroundTruthPointsTest, BasicTest) {
// Second point.
EXPECT_EQ(groundTruthPoints[1].position, groundTruthPoint.position);
EXPECT_EQ(groundTruthPoints[1].pressure, groundTruthPoint.pressure);
- EXPECT_GT(groundTruthPoints[1].timestamp, groundTruthPoints[0].timestamp);
+ EXPECT_EQ(groundTruthPoints[1].timestamp, groundTruthPoint.timestamp + 10);
// Third point.
EXPECT_EQ(groundTruthPoints[2].position, groundTruthPoint.position);
EXPECT_EQ(groundTruthPoints[2].pressure, groundTruthPoint.pressure);
- EXPECT_GT(groundTruthPoints[2].timestamp, groundTruthPoints[1].timestamp);
+ EXPECT_EQ(groundTruthPoints[2].timestamp, groundTruthPoint.timestamp + 20);
}
TEST(GenerateCircularArcGroundTruthTest, StraightLineUpwards) {
@@ -333,16 +337,19 @@ TEST(GenerateCircularArcGroundTruthTest, CounterclockwiseSquare) {
// --- Prediction-generation helper functions. ---
-// Creates a sequence of predictions with values equal to those of the given GroundTruthPoint.
-std::vector<PredictionPoint> generateConstantPredictions(const GroundTruthPoint& groundTruthPoint) {
+// Generates TEST_MAX_NUM_PREDICTIONS predictions with values equal to those of the given
+// GroundTruthPoint, and with consecutive timestamps separated by the given predictionInterval.
+std::vector<PredictionPoint> generateConstantPredictions(
+ const GroundTruthPoint& groundTruthPoint,
+ nsecs_t predictionInterval = TEST_PREDICTION_INTERVAL_NANOS) {
std::vector<PredictionPoint> predictions;
- nsecs_t predictionTimestamp = groundTruthPoint.timestamp + TEST_PREDICTION_INTERVAL_NANOS;
+ nsecs_t predictionTimestamp = groundTruthPoint.timestamp + predictionInterval;
for (size_t j = 0; j < TEST_MAX_NUM_PREDICTIONS; ++j) {
predictions.push_back(PredictionPoint{{.position = groundTruthPoint.position,
.pressure = groundTruthPoint.pressure},
.originTimestamp = groundTruthPoint.timestamp,
.targetTimestamp = predictionTimestamp});
- predictionTimestamp += TEST_PREDICTION_INTERVAL_NANOS;
+ predictionTimestamp += predictionInterval;
}
return predictions;
}
@@ -375,8 +382,9 @@ std::vector<PredictionPoint> generatePredictionsByLinearExtrapolation(
TEST(GeneratePredictionsTest, GenerateConstantPredictions) {
const GroundTruthPoint groundTruthPoint{{.position = Eigen::Vector2f(10, 20), .pressure = 0.3f},
.timestamp = TEST_INITIAL_TIMESTAMP};
+ const nsecs_t predictionInterval = 10;
const std::vector<PredictionPoint> predictionPoints =
- generateConstantPredictions(groundTruthPoint);
+ generateConstantPredictions(groundTruthPoint, predictionInterval);
ASSERT_EQ(TEST_MAX_NUM_PREDICTIONS, predictionPoints.size());
for (size_t i = 0; i < predictionPoints.size(); ++i) {
@@ -385,8 +393,7 @@ TEST(GeneratePredictionsTest, GenerateConstantPredictions) {
EXPECT_THAT(predictionPoints[i].pressure, FloatNear(groundTruthPoint.pressure, 1e-6));
EXPECT_EQ(predictionPoints[i].originTimestamp, groundTruthPoint.timestamp);
EXPECT_EQ(predictionPoints[i].targetTimestamp,
- groundTruthPoint.timestamp +
- static_cast<nsecs_t>(i + 1) * TEST_PREDICTION_INTERVAL_NANOS);
+ TEST_INITIAL_TIMESTAMP + static_cast<nsecs_t>(i + 1) * predictionInterval);
}
}
@@ -678,12 +685,9 @@ ReportAtomFunction createMockReportAtomFunction(std::vector<AtomFields>& reporte
// • groundTruthPoints: chronologically-ordered ground truth points, with at least 2 elements.
// • predictionPoints: the first index points to a vector of predictions corresponding to the
// source ground truth point with the same index.
-// - The first element should be empty, because there are not expected to be predictions until
-// we have received 2 ground truth points.
-// - The last element may be empty, because there will be no future ground truth points to
-// associate with those predictions (if not empty, it will be ignored).
+// - For empty prediction vectors, MetricsManager::onPredict will not be called.
// - To test all prediction buckets, there should be at least TEST_MAX_NUM_PREDICTIONS non-empty
-// prediction sets (that is, excluding the first and last). Thus, groundTruthPoints and
+// prediction vectors (that is, excluding the first and last). Thus, groundTruthPoints and
// predictionPoints should have size at least TEST_MAX_NUM_PREDICTIONS + 2.
//
// When the function returns, outReportedAtomFields will contain the reported AtomFields.
@@ -697,19 +701,12 @@ void runMetricsManager(const std::vector<GroundTruthPoint>& groundTruthPoints,
createMockReportAtomFunction(
outReportedAtomFields));
- // Validate structure of groundTruthPoints and predictionPoints.
- ASSERT_EQ(predictionPoints.size(), groundTruthPoints.size());
ASSERT_GE(groundTruthPoints.size(), 2u);
- ASSERT_EQ(predictionPoints[0].size(), 0u);
- for (size_t i = 1; i + 1 < predictionPoints.size(); ++i) {
- SCOPED_TRACE(testing::Message() << "i = " << i);
- ASSERT_EQ(predictionPoints[i].size(), TEST_MAX_NUM_PREDICTIONS);
- }
+ ASSERT_EQ(predictionPoints.size(), groundTruthPoints.size());
- // Pass ground truth points and predictions (for all except first and last ground truth).
for (size_t i = 0; i < groundTruthPoints.size(); ++i) {
metricsManager.onRecord(makeMotionEvent(groundTruthPoints[i]));
- if ((i > 0) && (i + 1 < predictionPoints.size())) {
+ if (!predictionPoints[i].empty()) {
metricsManager.onPredict(makeMotionEvent(predictionPoints[i]));
}
}
@@ -738,7 +735,7 @@ TEST(MotionPredictorMetricsManagerTest, NoPredictions) {
// Perfect predictions test:
// • Input: constant input events, perfect predictions matching the input events.
// • Expectation: all error metrics should be zero, or NO_DATA_SENTINEL for "unreported" metrics.
-// (For example, scale-invariant errors are only reported for the final time bucket.)
+// (For example, scale-invariant errors are only reported for the last time bucket.)
TEST(MotionPredictorMetricsManagerTest, ConstantGroundTruthPerfectPredictions) {
GroundTruthPoint groundTruthPoint{{.position = Eigen::Vector2f(10.0f, 20.0f), .pressure = 0.6f},
.timestamp = TEST_INITIAL_TIMESTAMP};
@@ -977,5 +974,35 @@ TEST(MotionPredictorMetricsManagerTest, CounterclockwiseOctagonGroundTruthLinear
}
}
+// Robustness test:
+// • Input: input events separated by a significantly greater time interval than the interval
+// between predictions.
+// • Expectation: the MetricsManager should not crash in this case. (No assertions are made about
+// the resulting metrics.)
+//
+// In practice, this scenario could arise either if the input and prediction intervals are
+// mismatched, or if input events are missing (dropped or skipped for some reason).
+TEST(MotionPredictorMetricsManagerTest, MismatchedInputAndPredictionInterval) {
+ // Create two ground truth points separated by MAX_NUM_PREDICTIONS * PREDICTION_INTERVAL,
+ // so that the second ground truth point corresponds to the last prediction bucket. This
+ // ensures that the scale-invariant error codepath will be run, giving full code coverage.
+ GroundTruthPoint groundTruthPoint{{.position = Eigen::Vector2f(0.0f, 0.0f), .pressure = 0.5f},
+ .timestamp = TEST_INITIAL_TIMESTAMP};
+ const nsecs_t inputInterval = TEST_MAX_NUM_PREDICTIONS * TEST_PREDICTION_INTERVAL_NANOS;
+ const std::vector<GroundTruthPoint> groundTruthPoints =
+ generateConstantGroundTruthPoints(groundTruthPoint, /*numPoints=*/2, inputInterval);
+
+ // Create predictions separated by the prediction interval.
+ std::vector<std::vector<PredictionPoint>> predictionPoints;
+ for (size_t i = 0; i < groundTruthPoints.size(); ++i) {
+ predictionPoints.push_back(
+ generateConstantPredictions(groundTruthPoints[i], TEST_PREDICTION_INTERVAL_NANOS));
+ }
+
+ // Test that we can run the MetricsManager without crashing.
+ std::vector<AtomFields> reportedAtomFields;
+ runMetricsManager(groundTruthPoints, predictionPoints, reportedAtomFields);
+}
+
} // namespace
} // namespace android
diff --git a/libs/input/tests/MotionPredictor_test.cpp b/libs/input/tests/MotionPredictor_test.cpp
index 33431146ea..d077760757 100644
--- a/libs/input/tests/MotionPredictor_test.cpp
+++ b/libs/input/tests/MotionPredictor_test.cpp
@@ -14,11 +14,14 @@
* limitations under the License.
*/
+// TODO(b/331815574): Decouple this test from assumed config values.
#include <chrono>
+#include <cmath>
+#include <com_android_input_flags.h>
+#include <flag_macros.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
-#include <gui/constants.h>
#include <input/Input.h>
#include <input/MotionPredictor.h>
@@ -55,9 +58,10 @@ static MotionEvent getMotionEvent(int32_t action, float x, float y,
}
ui::Transform identityTransform;
- event.initialize(InputEvent::nextId(), deviceId, AINPUT_SOURCE_STYLUS, ADISPLAY_ID_DEFAULT, {0},
- action, /*actionButton=*/0, /*flags=*/0, AMOTION_EVENT_EDGE_FLAG_NONE,
- AMETA_NONE, /*buttonState=*/0, MotionClassification::NONE, identityTransform,
+ event.initialize(InputEvent::nextId(), deviceId, AINPUT_SOURCE_STYLUS,
+ ui::LogicalDisplayId::DEFAULT, {0}, action, /*actionButton=*/0, /*flags=*/0,
+ AMOTION_EVENT_EDGE_FLAG_NONE, AMETA_NONE, /*buttonState=*/0,
+ MotionClassification::NONE, identityTransform,
/*xPrecision=*/0.1,
/*yPrecision=*/0.2, /*xCursorPosition=*/280, /*yCursorPosition=*/540,
identityTransform, /*downTime=*/100, eventTime.count(), pointerCount,
@@ -65,6 +69,108 @@ static MotionEvent getMotionEvent(int32_t action, float x, float y,
return event;
}
+TEST(JerkTrackerTest, JerkReadiness) {
+ JerkTracker jerkTracker(true);
+ EXPECT_FALSE(jerkTracker.jerkMagnitude());
+ jerkTracker.pushSample(/*timestamp=*/0, 20, 50);
+ EXPECT_FALSE(jerkTracker.jerkMagnitude());
+ jerkTracker.pushSample(/*timestamp=*/1, 25, 53);
+ EXPECT_FALSE(jerkTracker.jerkMagnitude());
+ jerkTracker.pushSample(/*timestamp=*/2, 30, 60);
+ EXPECT_FALSE(jerkTracker.jerkMagnitude());
+ jerkTracker.pushSample(/*timestamp=*/3, 35, 70);
+ EXPECT_TRUE(jerkTracker.jerkMagnitude());
+ jerkTracker.reset();
+ EXPECT_FALSE(jerkTracker.jerkMagnitude());
+ jerkTracker.pushSample(/*timestamp=*/4, 30, 60);
+ EXPECT_FALSE(jerkTracker.jerkMagnitude());
+}
+
+TEST(JerkTrackerTest, JerkCalculationNormalizedDtTrue) {
+ JerkTracker jerkTracker(true);
+ jerkTracker.pushSample(/*timestamp=*/0, 20, 50);
+ jerkTracker.pushSample(/*timestamp=*/1, 25, 53);
+ jerkTracker.pushSample(/*timestamp=*/2, 30, 60);
+ jerkTracker.pushSample(/*timestamp=*/3, 45, 70);
+ /**
+ * Jerk derivative table
+ * x: 20 25 30 45
+ * x': 5 5 15
+ * x'': 0 10
+ * x''': 10
+ *
+ * y: 50 53 60 70
+ * y': 3 7 10
+ * y'': 4 3
+ * y''': -1
+ */
+ EXPECT_FLOAT_EQ(jerkTracker.jerkMagnitude().value(), std::hypot(10, -1));
+ jerkTracker.pushSample(/*timestamp=*/4, 20, 65);
+ /**
+ * (continuing from above table)
+ * x: 45 -> 20
+ * x': 15 -> -25
+ * x'': 10 -> -40
+ * x''': -50
+ *
+ * y: 70 -> 65
+ * y': 10 -> -5
+ * y'': 3 -> -15
+ * y''': -18
+ */
+ EXPECT_FLOAT_EQ(jerkTracker.jerkMagnitude().value(), std::hypot(-50, -18));
+}
+
+TEST(JerkTrackerTest, JerkCalculationNormalizedDtFalse) {
+ JerkTracker jerkTracker(false);
+ jerkTracker.pushSample(/*timestamp=*/0, 20, 50);
+ jerkTracker.pushSample(/*timestamp=*/10, 25, 53);
+ jerkTracker.pushSample(/*timestamp=*/20, 30, 60);
+ jerkTracker.pushSample(/*timestamp=*/30, 45, 70);
+ /**
+ * Jerk derivative table
+ * x: 20 25 30 45
+ * x': .5 .5 1.5
+ * x'': 0 .1
+ * x''': .01
+ *
+ * y: 50 53 60 70
+ * y': .3 .7 1
+ * y'': .04 .03
+ * y''': -.001
+ */
+ EXPECT_FLOAT_EQ(jerkTracker.jerkMagnitude().value(), std::hypot(.01, -.001));
+ jerkTracker.pushSample(/*timestamp=*/50, 20, 65);
+ /**
+ * (continuing from above table)
+ * x: 45 -> 20
+ * x': 1.5 -> -1.25 (delta above, divide by 20)
+ * x'': .1 -> -.275 (delta above, divide by 10)
+ * x''': -.0375 (delta above, divide by 10)
+ *
+ * y: 70 -> 65
+ * y': 1 -> -.25 (delta above, divide by 20)
+ * y'': .03 -> -.125 (delta above, divide by 10)
+ * y''': -.0155 (delta above, divide by 10)
+ */
+ EXPECT_FLOAT_EQ(jerkTracker.jerkMagnitude().value(), std::hypot(-.0375, -.0155));
+}
+
+TEST(JerkTrackerTest, JerkCalculationAfterReset) {
+ JerkTracker jerkTracker(true);
+ jerkTracker.pushSample(/*timestamp=*/0, 20, 50);
+ jerkTracker.pushSample(/*timestamp=*/1, 25, 53);
+ jerkTracker.pushSample(/*timestamp=*/2, 30, 60);
+ jerkTracker.pushSample(/*timestamp=*/3, 45, 70);
+ jerkTracker.pushSample(/*timestamp=*/4, 20, 65);
+ jerkTracker.reset();
+ jerkTracker.pushSample(/*timestamp=*/5, 20, 50);
+ jerkTracker.pushSample(/*timestamp=*/6, 25, 53);
+ jerkTracker.pushSample(/*timestamp=*/7, 30, 60);
+ jerkTracker.pushSample(/*timestamp=*/8, 45, 70);
+ EXPECT_FLOAT_EQ(jerkTracker.jerkMagnitude().value(), std::hypot(10, -1));
+}
+
TEST(MotionPredictorTest, IsPredictionAvailable) {
MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/0,
[]() { return true /*enable prediction*/; });
@@ -94,18 +200,14 @@ TEST(MotionPredictorTest, Offset) {
TEST(MotionPredictorTest, FollowsGesture) {
MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/0,
[]() { return true /*enable prediction*/; });
+ predictor.record(getMotionEvent(DOWN, 3.75, 3, 20ms));
+ predictor.record(getMotionEvent(MOVE, 4.8, 3, 30ms));
+ predictor.record(getMotionEvent(MOVE, 6.2, 3, 40ms));
+ predictor.record(getMotionEvent(MOVE, 8, 3, 50ms));
+ EXPECT_NE(nullptr, predictor.predict(90 * NSEC_PER_MSEC));
- // MOVE without a DOWN is ignored.
- predictor.record(getMotionEvent(MOVE, 1, 3, 10ms));
- EXPECT_EQ(nullptr, predictor.predict(20 * NSEC_PER_MSEC));
-
- predictor.record(getMotionEvent(DOWN, 2, 5, 20ms));
- predictor.record(getMotionEvent(MOVE, 2, 7, 30ms));
- predictor.record(getMotionEvent(MOVE, 3, 9, 40ms));
- EXPECT_NE(nullptr, predictor.predict(50 * NSEC_PER_MSEC));
-
- predictor.record(getMotionEvent(UP, 4, 11, 50ms));
- EXPECT_EQ(nullptr, predictor.predict(20 * NSEC_PER_MSEC));
+ predictor.record(getMotionEvent(UP, 10.25, 3, 60ms));
+ EXPECT_EQ(nullptr, predictor.predict(100 * NSEC_PER_MSEC));
}
TEST(MotionPredictorTest, MultipleDevicesNotSupported) {
@@ -147,6 +249,63 @@ TEST(MotionPredictorTest, FlagDisablesPrediction) {
ASSERT_FALSE(predictor.isPredictionAvailable(/*deviceId=*/1, AINPUT_SOURCE_TOUCHSCREEN));
}
+TEST_WITH_FLAGS(
+ MotionPredictorTest, LowJerkNoPruning,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(com::android::input::flags,
+ enable_prediction_pruning_via_jerk_thresholding))) {
+ MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/0,
+ []() { return true /*enable prediction*/; });
+
+ // Jerk is low (0.05 normalized).
+ predictor.record(getMotionEvent(DOWN, 2, 7, 20ms));
+ predictor.record(getMotionEvent(MOVE, 2.75, 7, 30ms));
+ predictor.record(getMotionEvent(MOVE, 3.8, 7, 40ms));
+ predictor.record(getMotionEvent(MOVE, 5.2, 7, 50ms));
+ predictor.record(getMotionEvent(MOVE, 7, 7, 60ms));
+ std::unique_ptr<MotionEvent> predicted = predictor.predict(90 * NSEC_PER_MSEC);
+ EXPECT_NE(nullptr, predicted);
+ EXPECT_EQ(static_cast<size_t>(5), predicted->getHistorySize() + 1);
+}
+
+TEST_WITH_FLAGS(
+ MotionPredictorTest, HighJerkPredictionsPruned,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(com::android::input::flags,
+ enable_prediction_pruning_via_jerk_thresholding))) {
+ MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/0,
+ []() { return true /*enable prediction*/; });
+
+ // Jerk is incredibly high.
+ predictor.record(getMotionEvent(DOWN, 0, 5, 20ms));
+ predictor.record(getMotionEvent(MOVE, 0, 70, 30ms));
+ predictor.record(getMotionEvent(MOVE, 0, 139, 40ms));
+ predictor.record(getMotionEvent(MOVE, 0, 1421, 50ms));
+ predictor.record(getMotionEvent(MOVE, 0, 41233, 60ms));
+ std::unique_ptr<MotionEvent> predicted = predictor.predict(90 * NSEC_PER_MSEC);
+ EXPECT_EQ(nullptr, predicted);
+}
+
+TEST_WITH_FLAGS(
+ MotionPredictorTest, MediumJerkPredictionsSomePruned,
+ REQUIRES_FLAGS_ENABLED(ACONFIG_FLAG(com::android::input::flags,
+ enable_prediction_pruning_via_jerk_thresholding))) {
+ MotionPredictor predictor(/*predictionTimestampOffsetNanos=*/0,
+ []() { return true /*enable prediction*/; });
+
+ // Jerk is medium (1.05 normalized, which is halfway between LOW_JANK and HIGH_JANK)
+ predictor.record(getMotionEvent(DOWN, 0, 5.2, 20ms));
+ predictor.record(getMotionEvent(MOVE, 0, 11.5, 30ms));
+ predictor.record(getMotionEvent(MOVE, 0, 22, 40ms));
+ predictor.record(getMotionEvent(MOVE, 0, 37.75, 50ms));
+ predictor.record(getMotionEvent(MOVE, 0, 59.8, 60ms));
+ std::unique_ptr<MotionEvent> predicted = predictor.predict(82 * NSEC_PER_MSEC);
+ EXPECT_NE(nullptr, predicted);
+ // Halfway between LOW_JANK and HIGH_JANK means that half of the predictions
+ // will be pruned. If model prediction window is close enough to predict()
+ // call time window, then half of the model predictions (5/2 -> 2) will be
+ // ouputted.
+ EXPECT_EQ(static_cast<size_t>(3), predicted->getHistorySize() + 1);
+}
+
using AtomFields = MotionPredictorMetricsManager::AtomFields;
using ReportAtomFunction = MotionPredictorMetricsManager::ReportAtomFunction;
diff --git a/libs/input/tests/TestHelpers.h b/libs/input/tests/TestHelpers.h
deleted file mode 100644
index 343d81f917..0000000000
--- a/libs/input/tests/TestHelpers.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef TESTHELPERS_H
-#define TESTHELPERS_H
-
-#include <unistd.h>
-
-#include <utils/threads.h>
-
-namespace android {
-
-class Pipe {
-public:
- int sendFd;
- int receiveFd;
-
- Pipe() {
- int fds[2];
- ::pipe(fds);
-
- receiveFd = fds[0];
- sendFd = fds[1];
- }
-
- ~Pipe() {
- if (sendFd != -1) {
- ::close(sendFd);
- }
-
- if (receiveFd != -1) {
- ::close(receiveFd);
- }
- }
-
- status_t writeSignal() {
- ssize_t nWritten = ::write(sendFd, "*", 1);
- return nWritten == 1 ? 0 : -errno;
- }
-
- status_t readSignal() {
- char buf[1];
- ssize_t nRead = ::read(receiveFd, buf, 1);
- return nRead == 1 ? 0 : nRead == 0 ? -EPIPE : -errno;
- }
-};
-
-class DelayedTask : public Thread {
- int mDelayMillis;
-
-public:
- explicit DelayedTask(int delayMillis) : mDelayMillis(delayMillis) { }
-
-protected:
- virtual ~DelayedTask() { }
-
- virtual void doTask() = 0;
-
- virtual bool threadLoop() {
- usleep(mDelayMillis * 1000);
- doTask();
- return false;
- }
-};
-
-} // namespace android
-
-#endif // TESTHELPERS_H
diff --git a/libs/input/tests/TouchResampling_test.cpp b/libs/input/tests/TouchResampling_test.cpp
index 1cb7f7ba8c..8d8b5300c1 100644
--- a/libs/input/tests/TouchResampling_test.cpp
+++ b/libs/input/tests/TouchResampling_test.cpp
@@ -14,13 +14,12 @@
* limitations under the License.
*/
-#include "TestHelpers.h"
-
#include <chrono>
#include <vector>
#include <attestation/HmacKeyManager.h>
#include <gtest/gtest.h>
+#include <input/InputConsumer.h>
#include <input/InputTransport.h>
using namespace std::chrono_literals;
@@ -85,10 +84,11 @@ status_t TouchResamplingTest::publishSimpleMotionEventWithCoords(
ADD_FAILURE() << "Downtime should be equal to 0 (hardcoded for convenience)";
}
return mPublisher->publishMotionEvent(mSeq++, InputEvent::nextId(), /*deviceId=*/1,
- AINPUT_SOURCE_TOUCHSCREEN, /*displayId=*/0, INVALID_HMAC,
- action, /*actionButton=*/0, /*flags=*/0, /*edgeFlags=*/0,
- AMETA_NONE, /*buttonState=*/0, MotionClassification::NONE,
- identityTransform, /*xPrecision=*/0, /*yPrecision=*/0,
+ AINPUT_SOURCE_TOUCHSCREEN, ui::LogicalDisplayId::DEFAULT,
+ INVALID_HMAC, action, /*actionButton=*/0, /*flags=*/0,
+ /*edgeFlags=*/0, AMETA_NONE, /*buttonState=*/0,
+ MotionClassification::NONE, identityTransform,
+ /*xPrecision=*/0, /*yPrecision=*/0,
AMOTION_EVENT_INVALID_CURSOR_POSITION,
AMOTION_EVENT_INVALID_CURSOR_POSITION, identityTransform,
downTime, eventTime, properties.size(), properties.data(),
@@ -297,10 +297,9 @@ TEST_F(TouchResamplingTest, EventIsResampledWithDifferentId) {
}
/**
- * Stylus pointer coordinates are not resampled, but an event is still generated for the batch with
- * a resampled timestamp and should be marked as such.
+ * Stylus pointer coordinates are resampled.
*/
-TEST_F(TouchResamplingTest, StylusCoordinatesNotResampledFor) {
+TEST_F(TouchResamplingTest, StylusEventIsResampled) {
std::chrono::nanoseconds frameTime;
std::vector<InputEventEntry> entries, expectedEntries;
@@ -330,15 +329,91 @@ TEST_F(TouchResamplingTest, StylusCoordinatesNotResampledFor) {
// id x y
{10ms, {{0, 20, 30, .toolType = ToolType::STYLUS}}, AMOTION_EVENT_ACTION_MOVE},
{20ms, {{0, 30, 30, .toolType = ToolType::STYLUS}}, AMOTION_EVENT_ACTION_MOVE},
- // A resampled event is generated, but the stylus coordinates are not resampled.
{25ms,
- {{0, 30, 30, .toolType = ToolType::STYLUS, .isResampled = true}},
+ {{0, 35, 30, .toolType = ToolType::STYLUS, .isResampled = true}},
AMOTION_EVENT_ACTION_MOVE},
};
consumeInputEventEntries(expectedEntries, frameTime);
}
/**
+ * Mouse pointer coordinates are resampled.
+ */
+TEST_F(TouchResamplingTest, MouseEventIsResampled) {
+ std::chrono::nanoseconds frameTime;
+ std::vector<InputEventEntry> entries, expectedEntries;
+
+ // Initial ACTION_DOWN should be separate, because the first consume event will only return
+ // InputEvent with a single action.
+ entries = {
+ // id x y
+ {0ms, {{0, 10, 20, .toolType = ToolType::MOUSE}}, AMOTION_EVENT_ACTION_DOWN},
+ };
+ publishInputEventEntries(entries);
+ frameTime = 5ms;
+ expectedEntries = {
+ // id x y
+ {0ms, {{0, 10, 20, .toolType = ToolType::MOUSE}}, AMOTION_EVENT_ACTION_DOWN},
+ };
+ consumeInputEventEntries(expectedEntries, frameTime);
+
+ // Two ACTION_MOVE events 10 ms apart that move in X direction and stay still in Y
+ entries = {
+ // id x y
+ {10ms, {{0, 20, 30, .toolType = ToolType::MOUSE}}, AMOTION_EVENT_ACTION_MOVE},
+ {20ms, {{0, 30, 30, .toolType = ToolType::MOUSE}}, AMOTION_EVENT_ACTION_MOVE},
+ };
+ publishInputEventEntries(entries);
+ frameTime = 35ms;
+ expectedEntries = {
+ // id x y
+ {10ms, {{0, 20, 30, .toolType = ToolType::MOUSE}}, AMOTION_EVENT_ACTION_MOVE},
+ {20ms, {{0, 30, 30, .toolType = ToolType::MOUSE}}, AMOTION_EVENT_ACTION_MOVE},
+ {25ms,
+ {{0, 35, 30, .toolType = ToolType::MOUSE, .isResampled = true}},
+ AMOTION_EVENT_ACTION_MOVE},
+ };
+ consumeInputEventEntries(expectedEntries, frameTime);
+}
+
+/**
+ * Motion events with palm tool type are not resampled.
+ */
+TEST_F(TouchResamplingTest, PalmEventIsNotResampled) {
+ std::chrono::nanoseconds frameTime;
+ std::vector<InputEventEntry> entries, expectedEntries;
+
+ // Initial ACTION_DOWN should be separate, because the first consume event will only return
+ // InputEvent with a single action.
+ entries = {
+ // id x y
+ {0ms, {{0, 10, 20, .toolType = ToolType::PALM}}, AMOTION_EVENT_ACTION_DOWN},
+ };
+ publishInputEventEntries(entries);
+ frameTime = 5ms;
+ expectedEntries = {
+ // id x y
+ {0ms, {{0, 10, 20, .toolType = ToolType::PALM}}, AMOTION_EVENT_ACTION_DOWN},
+ };
+ consumeInputEventEntries(expectedEntries, frameTime);
+
+ // Two ACTION_MOVE events 10 ms apart that move in X direction and stay still in Y
+ entries = {
+ // id x y
+ {10ms, {{0, 20, 30, .toolType = ToolType::PALM}}, AMOTION_EVENT_ACTION_MOVE},
+ {20ms, {{0, 30, 30, .toolType = ToolType::PALM}}, AMOTION_EVENT_ACTION_MOVE},
+ };
+ publishInputEventEntries(entries);
+ frameTime = 35ms;
+ expectedEntries = {
+ // id x y
+ {10ms, {{0, 20, 30, .toolType = ToolType::PALM}}, AMOTION_EVENT_ACTION_MOVE},
+ {20ms, {{0, 30, 30, .toolType = ToolType::PALM}}, AMOTION_EVENT_ACTION_MOVE},
+ };
+ consumeInputEventEntries(expectedEntries, frameTime);
+}
+
+/**
* Event should not be resampled when sample time is equal to event time.
*/
TEST_F(TouchResamplingTest, SampleTimeEqualsEventTime) {
diff --git a/libs/input/tests/VelocityTracker_test.cpp b/libs/input/tests/VelocityTracker_test.cpp
index f9ca28083d..f50a3cdb99 100644
--- a/libs/input/tests/VelocityTracker_test.cpp
+++ b/libs/input/tests/VelocityTracker_test.cpp
@@ -24,7 +24,6 @@
#include <android-base/stringprintf.h>
#include <attestation/HmacKeyManager.h>
#include <gtest/gtest.h>
-#include <gui/constants.h>
#include <input/VelocityTracker.h>
using std::literals::chrono_literals::operator""ms;
@@ -34,7 +33,7 @@ using android::base::StringPrintf;
namespace android {
-constexpr int32_t DISPLAY_ID = ADISPLAY_ID_DEFAULT; // default display id
+constexpr ui::LogicalDisplayId DISPLAY_ID = ui::LogicalDisplayId::DEFAULT; // default display id
constexpr int32_t DEFAULT_POINTER_ID = 0; // pointer ID used for manually defined tests
@@ -156,7 +155,7 @@ static std::vector<MotionEvent> createAxisScrollMotionEventStream(
MotionEvent event;
ui::Transform identityTransform;
event.initialize(InputEvent::nextId(), /*deviceId=*/5, AINPUT_SOURCE_ROTARY_ENCODER,
- ADISPLAY_ID_NONE, INVALID_HMAC, AMOTION_EVENT_ACTION_SCROLL,
+ ui::LogicalDisplayId::INVALID, INVALID_HMAC, AMOTION_EVENT_ACTION_SCROLL,
/*actionButton=*/0, /*flags=*/0, AMOTION_EVENT_EDGE_FLAG_NONE, AMETA_NONE,
/*buttonState=*/0, MotionClassification::NONE, identityTransform,
/*xPrecision=*/0, /*yPrecision=*/0, AMOTION_EVENT_INVALID_CURSOR_POSITION,
diff --git a/libs/input/tests/VerifiedInputEvent_test.cpp b/libs/input/tests/VerifiedInputEvent_test.cpp
index 277d74dd1c..df5fe9d2d0 100644
--- a/libs/input/tests/VerifiedInputEvent_test.cpp
+++ b/libs/input/tests/VerifiedInputEvent_test.cpp
@@ -16,7 +16,6 @@
#include <attestation/HmacKeyManager.h>
#include <gtest/gtest.h>
-#include <gui/constants.h>
#include <input/Input.h>
namespace android {
@@ -24,7 +23,7 @@ namespace android {
static KeyEvent getKeyEventWithFlags(int32_t flags) {
KeyEvent event;
event.initialize(InputEvent::nextId(), /*deviceId=*/2, AINPUT_SOURCE_GAMEPAD,
- ADISPLAY_ID_DEFAULT, INVALID_HMAC, AKEY_EVENT_ACTION_DOWN, flags,
+ ui::LogicalDisplayId::DEFAULT, INVALID_HMAC, AKEY_EVENT_ACTION_DOWN, flags,
AKEYCODE_BUTTON_X, /*scanCode=*/121, AMETA_ALT_ON, /*repeatCount=*/1,
/*downTime=*/1000, /*eventTime=*/2000);
return event;
@@ -44,10 +43,11 @@ static MotionEvent getMotionEventWithFlags(int32_t flags) {
ui::Transform transform;
transform.set({2, 0, 4, 0, 3, 5, 0, 0, 1});
ui::Transform identity;
- event.initialize(InputEvent::nextId(), /*deviceId=*/0, AINPUT_SOURCE_MOUSE, ADISPLAY_ID_DEFAULT,
- INVALID_HMAC, AMOTION_EVENT_ACTION_DOWN, /*actionButton=*/0, flags,
- AMOTION_EVENT_EDGE_FLAG_NONE, AMETA_NONE, /*buttonState=*/0,
- MotionClassification::NONE, transform, /*xPrecision=*/0.1, /*yPrecision=*/0.2,
+ event.initialize(InputEvent::nextId(), /*deviceId=*/0, AINPUT_SOURCE_MOUSE,
+ ui::LogicalDisplayId::DEFAULT, INVALID_HMAC, AMOTION_EVENT_ACTION_DOWN,
+ /*actionButton=*/0, flags, AMOTION_EVENT_EDGE_FLAG_NONE, AMETA_NONE,
+ /*buttonState=*/0, MotionClassification::NONE, transform, /*xPrecision=*/0.1,
+ /*yPrecision=*/0.2,
/*xCursorPosition=*/280, /*yCursorPosition=*/540, identity, /*downTime=*/100,
/*eventTime=*/200, pointerCount, pointerProperties, pointerCoords);
return event;