summaryrefslogtreecommitdiff
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/Android.bp43
-rw-r--r--media/OWNERS1
-rw-r--r--media/aidl/android/media/audio/common/AudioContentType.aidl53
-rw-r--r--media/aidl/android/media/audio/common/AudioSource.aidl90
-rw-r--r--media/aidl/android/media/audio/common/AudioUsage.aidl109
-rw-r--r--media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioContentType.aidl43
-rw-r--r--media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioSource.aidl53
-rw-r--r--media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioUsage.aidl61
-rw-r--r--media/java/Android.bp2
-rw-r--r--media/java/android/media/AudioAttributes.java4
-rw-r--r--media/java/android/media/AudioDeviceInfo.java14
-rw-r--r--media/java/android/media/AudioDevicePort.java3
-rw-r--r--media/java/android/media/AudioManager.java201
-rw-r--r--media/java/android/media/AudioSystem.java55
-rw-r--r--media/java/android/media/BluetoothProfileConnectionInfo.aidl20
-rw-r--r--media/java/android/media/BluetoothProfileConnectionInfo.java159
-rwxr-xr-xmedia/java/android/media/IAudioService.aidl12
-rw-r--r--media/java/android/media/Image.java8
-rw-r--r--media/java/android/media/JetPlayer.java59
-rw-r--r--media/java/android/media/MediaActionSound.java25
-rw-r--r--media/java/android/media/MediaCodec.java66
-rw-r--r--media/java/android/media/MediaCodecInfo.java142
-rw-r--r--media/java/android/media/MediaFormat.java70
-rw-r--r--media/java/android/media/MediaMetadataRetriever.java4
-rw-r--r--media/java/android/media/MediaRoute2Info.java10
-rw-r--r--media/java/android/media/Ringtone.java82
-rw-r--r--media/java/android/media/session/MediaController.java2
-rw-r--r--media/java/android/media/tv/ITvInputManager.aidl2
-rwxr-xr-xmedia/java/android/media/tv/ITvInputService.aidl23
-rw-r--r--media/java/android/media/tv/OWNERS3
-rw-r--r--media/java/android/media/tv/TvContract.java81
-rw-r--r--media/java/android/media/tv/TvInputManager.java87
-rwxr-xr-xmedia/java/android/media/tv/TvInputService.java76
-rw-r--r--media/java/android/media/tv/tunerresourcemanager/OWNER5
-rwxr-xr-xmedia/java/android/mtp/MtpDatabase.java10
-rw-r--r--media/java/android/mtp/MtpStorage.java15
-rw-r--r--media/java/android/mtp/MtpStorageManager.java46
-rw-r--r--media/jni/Android.bp4
-rw-r--r--media/jni/OWNERS2
-rw-r--r--media/jni/android_media_MediaPlayer.cpp2
-rw-r--r--media/jni/android_media_Utils.cpp1
-rw-r--r--media/jni/audioeffect/android_media_AudioEffect.cpp50
-rw-r--r--media/jni/audioeffect/android_media_Visualizer.cpp92
-rw-r--r--media/jni/soundpool/SoundPool.cpp2
-rw-r--r--media/jni/soundpool/Stream.cpp202
-rw-r--r--media/jni/soundpool/Stream.h9
-rw-r--r--media/jni/soundpool/StreamManager.cpp13
-rw-r--r--media/native/midi/amidi.cpp10
-rw-r--r--media/tests/EffectsTest/AndroidManifest.xml4
-rw-r--r--media/tests/EffectsTest/res/layout/bassboosttest.xml5
-rw-r--r--media/tests/EffectsTest/res/layout/visualizertest.xml5
-rw-r--r--media/tests/EffectsTest/res/values/strings.xml2
-rw-r--r--media/tests/EffectsTest/src/com/android/effectstest/BassBoostTest.java80
-rw-r--r--media/tests/EffectsTest/src/com/android/effectstest/VisualizerTest.java60
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/BluetoothProfileConnectionInfoTest.java72
-rw-r--r--media/tests/MtpTests/src/android/mtp/MtpStorageManagerTest.java7
-rw-r--r--media/tests/TunerTest/OWNERS4
57 files changed, 1949 insertions, 416 deletions
diff --git a/media/Android.bp b/media/Android.bp
index a66236e6f4ea..b7c3a9c112d1 100644
--- a/media/Android.bp
+++ b/media/Android.bp
@@ -20,7 +20,9 @@ aidl_interface {
"aidl/android/media/audio/common/AudioFormat.aidl",
"aidl/android/media/audio/common/AudioOffloadInfo.aidl",
"aidl/android/media/audio/common/AudioStreamType.aidl",
- "aidl/android/media/audio/common/AudioUsage.aidl",
+ ],
+ imports: [
+ "android.media.audio.common.types",
],
}
@@ -67,3 +69,42 @@ aidl_interface {
"media_permission-aidl",
],
}
+
+aidl_interface {
+ name: "android.media.audio.common.types",
+ vendor_available: true,
+ host_supported: true,
+ double_loadable: true,
+ flags: [
+ "-Werror",
+ "-Weverything",
+ ],
+ local_include_dir: "aidl",
+ srcs: [
+ "aidl/android/media/audio/common/AudioContentType.aidl",
+ "aidl/android/media/audio/common/AudioSource.aidl",
+ "aidl/android/media/audio/common/AudioUsage.aidl",
+ ],
+ stability: "vintf",
+ backend: {
+ cpp: {
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ ],
+ },
+ java: {
+ },
+ ndk: {
+ vndk: {
+ enabled: true,
+ },
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.bluetooth",
+ ],
+ },
+ },
+}
diff --git a/media/OWNERS b/media/OWNERS
index 0aff43e00671..5f501372666b 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -3,7 +3,6 @@ elaurent@google.com
essick@google.com
etalvala@google.com
hdmoon@google.com
-hkuang@google.com
hunga@google.com
insun@google.com
jaewan@google.com
diff --git a/media/aidl/android/media/audio/common/AudioContentType.aidl b/media/aidl/android/media/audio/common/AudioContentType.aidl
new file mode 100644
index 000000000000..50ac181adcb2
--- /dev/null
+++ b/media/aidl/android/media/audio/common/AudioContentType.aidl
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media.audio.common;
+
+/**
+ * Content type specifies "what" is playing. The content type expresses the
+ * general category of the content: speech, music, movie audio, etc.
+ * This enum corresponds to AudioAttributes.CONTENT_TYPE_* constants in the SDK.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+@VintfStability
+enum AudioContentType {
+ /**
+ * Content type value to use when the content type is unknown, or other than
+ * the ones defined.
+ */
+ UNKNOWN = 0,
+ /**
+ * Content type value to use when the content type is speech.
+ */
+ SPEECH = 1,
+ /**
+ * Content type value to use when the content type is music.
+ */
+ MUSIC = 2,
+ /**
+ * Content type value to use when the content type is a soundtrack,
+ * typically accompanying a movie or TV program.
+ */
+ MOVIE = 3,
+ /**
+ * Content type value to use when the content type is a sound used to
+ * accompany a user action, such as a beep or sound effect expressing a key
+ * click, or event, such as the type of a sound for a bonus being received
+ * in a game. These sounds are mostly synthesized or short Foley sounds.
+ */
+ SONIFICATION = 4,
+}
diff --git a/media/aidl/android/media/audio/common/AudioSource.aidl b/media/aidl/android/media/audio/common/AudioSource.aidl
new file mode 100644
index 000000000000..527ee39b3267
--- /dev/null
+++ b/media/aidl/android/media/audio/common/AudioSource.aidl
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media.audio.common;
+
+/**
+ * Defines the audio source. An audio source defines both a default physical
+ * source of audio signal, and a recording configuration. This enum corresponds
+ * to MediaRecorder.AudioSource.* constants in the SDK.
+ *
+ * {@hide}
+ */
+@Backing(type="int")
+@VintfStability
+enum AudioSource {
+ /**
+ * Used as default value in parcelables to indicate that a value was not
+ * set. Should never be considered a valid setting, except for backward
+ * compatibility scenarios.
+ */
+ SYS_RESERVED_INVALID = -1,
+ /** Default audio source. */
+ DEFAULT = 0,
+ /** Microphone audio source. */
+ MIC = 1,
+ /** Voice call uplink (Tx) audio source. */
+ VOICE_UPLINK = 2,
+ /** Voice call downlink (Rx) audio source. */
+ VOICE_DOWNLINK = 3,
+ /** Voice call uplink + downlink (duplex) audio source. */
+ VOICE_CALL = 4,
+ /**
+ * Microphone audio source tuned for video recording, with the same
+ * orientation as the camera if available.
+ */
+ CAMCORDER = 5,
+ /** Microphone audio source tuned for voice recognition. */
+ VOICE_RECOGNITION = 6,
+ /**
+ * Microphone audio source tuned for voice communications such as VoIP. It
+ * will for instance take advantage of echo cancellation or automatic gain
+ * control if available.
+ */
+ VOICE_COMMUNICATION = 7,
+ /**
+ * Audio source for a submix of audio streams to be presented remotely. An
+ * application can use this audio source to capture a mix of audio streams
+ * that should be transmitted to a remote receiver such as a Wifi display.
+ * While recording is active, these audio streams are redirected to the
+ * remote submix instead of being played on the device speaker or headset.
+ */
+ REMOTE_SUBMIX = 8,
+ /**
+ * Microphone audio source tuned for unprocessed (raw) sound if available,
+ * behaves like DEFAULT otherwise.
+ */
+ UNPROCESSED = 9,
+ /**
+ * Source for capturing audio meant to be processed in real time and played
+ * back for live performance (e.g karaoke). The capture path will minimize
+ * latency and coupling with playback path.
+ */
+ VOICE_PERFORMANCE = 10,
+ /**
+ * Source for an echo canceller to capture the reference signal to be
+ * canceled. The echo reference signal will be captured as close as
+ * possible to the DAC in order to include all post processing applied to
+ * the playback path.
+ */
+ ECHO_REFERENCE = 1997,
+ /** Audio source for capturing broadcast FM tuner output. */
+ FM_TUNER = 1998,
+ /**
+ * A low-priority, preemptible audio source for for background software
+ * hotword detection. Same tuning as VOICE_RECOGNITION.
+ */
+ HOTWORD = 1999,
+}
diff --git a/media/aidl/android/media/audio/common/AudioUsage.aidl b/media/aidl/android/media/audio/common/AudioUsage.aidl
index ef348165b22c..bb0cc1e91757 100644
--- a/media/aidl/android/media/audio/common/AudioUsage.aidl
+++ b/media/aidl/android/media/audio/common/AudioUsage.aidl
@@ -14,27 +14,128 @@
* limitations under the License.
*/
- // This file has been semi-automatically generated using hidl2aidl from its counterpart in
- // hardware/interfaces/audio/common/5.0/types.hal
-
package android.media.audio.common;
/**
* {@hide}
*/
+@VintfStability
@Backing(type="int")
enum AudioUsage {
+ /**
+ * Used as default value in parcelables to indicate that a value was not
+ * set. Should never be considered a valid setting, except for backward
+ * compatibility scenarios.
+ */
+ INVALID = -1,
+ /**
+ * Usage value to use when the usage is unknown.
+ */
UNKNOWN = 0,
+ /**
+ * Usage value to use when the usage is media, such as music, or movie
+ * soundtracks.
+ */
MEDIA = 1,
+ /**
+ * Usage value to use when the usage is voice communications, such as
+ * telephony or VoIP.
+ */
VOICE_COMMUNICATION = 2,
+ /**
+ * Usage value to use when the usage is in-call signalling, such as with
+ * a "busy" beep, or DTMF tones.
+ */
VOICE_COMMUNICATION_SIGNALLING = 3,
+ /**
+ * Usage value to use when the usage is an alarm (e.g. wake-up alarm).
+ */
ALARM = 4,
+ /**
+ * Usage value to use when the usage is notification. See other notification
+ * usages for more specialized uses.
+ */
NOTIFICATION = 5,
+ /**
+ * Usage value to use when the usage is telephony ringtone.
+ */
NOTIFICATION_TELEPHONY_RINGTONE = 6,
+ /**
+ * Usage value to use when the usage is a request to enter/end a
+ * communication, such as a VoIP communication or video-conference.
+ *
+ * Value reserved for system use only. HALs must never return this value to
+ * the system or accept it from the system.
+ */
+ SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST = 7,
+ /**
+ * Usage value to use when the usage is notification for an "instant"
+ * communication such as a chat, or SMS.
+ *
+ * Value reserved for system use only. HALs must never return this value to
+ * the system or accept it from the system.
+ */
+ SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT = 8,
+ /**
+ * Usage value to use when the usage is notification for a
+ * non-immediate type of communication such as e-mail.
+ *
+ * Value reserved for system use only. HALs must never return this value to
+ * the system or accept it from the system.
+ */
+ SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED = 9,
+ /**
+ * Usage value to use when the usage is to attract the user's attention,
+ * such as a reminder or low battery warning.
+ */
+ NOTIFICATION_EVENT = 10,
+ /**
+ * Usage value to use when the usage is for accessibility, such as with
+ * a screen reader.
+ */
ASSISTANCE_ACCESSIBILITY = 11,
+ /**
+ * Usage value to use when the usage is driving or navigation directions.
+ */
ASSISTANCE_NAVIGATION_GUIDANCE = 12,
+ /**
+ * Usage value to use when the usage is sonification, such as with user
+ * interface sounds.
+ */
ASSISTANCE_SONIFICATION = 13,
+ /**
+ * Usage value to use when the usage is for game audio.
+ */
GAME = 14,
+ /**
+ * Usage value to use when feeding audio to the platform and replacing
+ * "traditional" audio source, such as audio capture devices.
+ */
VIRTUAL_SOURCE = 15,
+ /**
+ * Usage value to use for audio responses to user queries, audio
+ * instructions or help utterances.
+ */
ASSISTANT = 16,
-}
+ /**
+ * Usage value to use for assistant voice interaction with remote caller on
+ * Cell and VoIP calls.
+ */
+ CALL_ASSISTANT = 17,
+ /**
+ * Usage value to use when the usage is an emergency.
+ */
+ EMERGENCY = 1000,
+ /**
+ * Usage value to use when the usage is a safety sound.
+ */
+ SAFETY = 1001,
+ /**
+ * Usage value to use when the usage is a vehicle status.
+ */
+ VEHICLE_STATUS = 1002,
+ /**
+ * Usage value to use when the usage is an announcement.
+ */
+ ANNOUNCEMENT = 1003,
+} \ No newline at end of file
diff --git a/media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioContentType.aidl b/media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioContentType.aidl
new file mode 100644
index 000000000000..3798b8253766
--- /dev/null
+++ b/media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioContentType.aidl
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media.audio.common;
+/* @hide */
+@Backing(type="int") @VintfStability
+enum AudioContentType {
+ UNKNOWN = 0,
+ SPEECH = 1,
+ MUSIC = 2,
+ MOVIE = 3,
+ SONIFICATION = 4,
+}
diff --git a/media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioSource.aidl b/media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioSource.aidl
new file mode 100644
index 000000000000..d1dfe416d692
--- /dev/null
+++ b/media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioSource.aidl
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media.audio.common;
+/* @hide */
+@Backing(type="int") @VintfStability
+enum AudioSource {
+ SYS_RESERVED_INVALID = -1,
+ DEFAULT = 0,
+ MIC = 1,
+ VOICE_UPLINK = 2,
+ VOICE_DOWNLINK = 3,
+ VOICE_CALL = 4,
+ CAMCORDER = 5,
+ VOICE_RECOGNITION = 6,
+ VOICE_COMMUNICATION = 7,
+ REMOTE_SUBMIX = 8,
+ UNPROCESSED = 9,
+ VOICE_PERFORMANCE = 10,
+ ECHO_REFERENCE = 1997,
+ FM_TUNER = 1998,
+ HOTWORD = 1999,
+}
diff --git a/media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioUsage.aidl b/media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioUsage.aidl
new file mode 100644
index 000000000000..4c7245543cfd
--- /dev/null
+++ b/media/aidl_api/android.media.audio.common.types/current/android/media/audio/common/AudioUsage.aidl
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.media.audio.common;
+/* @hide */
+@Backing(type="int") @VintfStability
+enum AudioUsage {
+ INVALID = -1,
+ UNKNOWN = 0,
+ MEDIA = 1,
+ VOICE_COMMUNICATION = 2,
+ VOICE_COMMUNICATION_SIGNALLING = 3,
+ ALARM = 4,
+ NOTIFICATION = 5,
+ NOTIFICATION_TELEPHONY_RINGTONE = 6,
+ SYS_RESERVED_NOTIFICATION_COMMUNICATION_REQUEST = 7,
+ SYS_RESERVED_NOTIFICATION_COMMUNICATION_INSTANT = 8,
+ SYS_RESERVED_NOTIFICATION_COMMUNICATION_DELAYED = 9,
+ NOTIFICATION_EVENT = 10,
+ ASSISTANCE_ACCESSIBILITY = 11,
+ ASSISTANCE_NAVIGATION_GUIDANCE = 12,
+ ASSISTANCE_SONIFICATION = 13,
+ GAME = 14,
+ VIRTUAL_SOURCE = 15,
+ ASSISTANT = 16,
+ CALL_ASSISTANT = 17,
+ EMERGENCY = 1000,
+ SAFETY = 1001,
+ VEHICLE_STATUS = 1002,
+ ANNOUNCEMENT = 1003,
+}
diff --git a/media/java/Android.bp b/media/java/Android.bp
index eeaf6e9015ac..c7c1d54cd3c6 100644
--- a/media/java/Android.bp
+++ b/media/java/Android.bp
@@ -8,7 +8,7 @@ package {
}
filegroup {
- name: "framework-media-sources",
+ name: "framework-media-non-updatable-sources",
srcs: [
"**/*.java",
"**/*.aidl",
diff --git a/media/java/android/media/AudioAttributes.java b/media/java/android/media/AudioAttributes.java
index c22ab9463736..b27a00c67d06 100644
--- a/media/java/android/media/AudioAttributes.java
+++ b/media/java/android/media/AudioAttributes.java
@@ -335,6 +335,10 @@ public final class AudioAttributes implements Parcelable {
/**
* Flag defining a behavior where the audibility of the sound will be ensured by the system.
+ * To ensure sound audibility, the system only uses built-in speakers or wired headphones
+ * and specifically excludes wireless audio devices.
+ * <p>Note this flag should only be used for sounds subject to regulatory behaviors in some
+ * countries, such as for camera shutter sound, and not for routing behaviors.
*/
public final static int FLAG_AUDIBILITY_ENFORCED = 0x1 << 0;
/**
diff --git a/media/java/android/media/AudioDeviceInfo.java b/media/java/android/media/AudioDeviceInfo.java
index a186566aec0b..3edb10108497 100644
--- a/media/java/android/media/AudioDeviceInfo.java
+++ b/media/java/android/media/AudioDeviceInfo.java
@@ -177,6 +177,11 @@ public final class AudioDeviceInfo {
*/
public static final int TYPE_HDMI_EARC = 29;
+ /**
+ * A device type describing a Bluetooth Low Energy (BLE) broadcast group.
+ */
+ public static final int TYPE_BLE_BROADCAST = 30;
+
/** @hide */
@IntDef(flag = false, prefix = "TYPE", value = {
TYPE_BUILTIN_EARPIECE,
@@ -207,7 +212,8 @@ public final class AudioDeviceInfo {
TYPE_REMOTE_SUBMIX,
TYPE_BLE_HEADSET,
TYPE_BLE_SPEAKER,
- TYPE_ECHO_REFERENCE}
+ TYPE_ECHO_REFERENCE,
+ TYPE_BLE_BROADCAST}
)
@Retention(RetentionPolicy.SOURCE)
public @interface AudioDeviceType {}
@@ -264,7 +270,8 @@ public final class AudioDeviceInfo {
TYPE_HEARING_AID,
TYPE_BUILTIN_SPEAKER_SAFE,
TYPE_BLE_HEADSET,
- TYPE_BLE_SPEAKER}
+ TYPE_BLE_SPEAKER,
+ TYPE_BLE_BROADCAST}
)
@Retention(RetentionPolicy.SOURCE)
public @interface AudioDeviceTypeOut {}
@@ -296,6 +303,7 @@ public final class AudioDeviceInfo {
case TYPE_BUILTIN_SPEAKER_SAFE:
case TYPE_BLE_HEADSET:
case TYPE_BLE_SPEAKER:
+ case TYPE_BLE_BROADCAST:
return true;
default:
return false;
@@ -627,6 +635,7 @@ public final class AudioDeviceInfo {
INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_REMOTE_SUBMIX, TYPE_REMOTE_SUBMIX);
INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BLE_HEADSET, TYPE_BLE_HEADSET);
INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BLE_SPEAKER, TYPE_BLE_SPEAKER);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BLE_BROADCAST, TYPE_BLE_BROADCAST);
INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_BUILTIN_MIC, TYPE_BUILTIN_MIC);
INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_BLUETOOTH_SCO_HEADSET, TYPE_BLUETOOTH_SCO);
@@ -684,6 +693,7 @@ public final class AudioDeviceInfo {
EXT_TO_INT_DEVICE_MAPPING.put(TYPE_REMOTE_SUBMIX, AudioSystem.DEVICE_OUT_REMOTE_SUBMIX);
EXT_TO_INT_DEVICE_MAPPING.put(TYPE_BLE_HEADSET, AudioSystem.DEVICE_OUT_BLE_HEADSET);
EXT_TO_INT_DEVICE_MAPPING.put(TYPE_BLE_SPEAKER, AudioSystem.DEVICE_OUT_BLE_SPEAKER);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_BLE_BROADCAST, AudioSystem.DEVICE_OUT_BLE_BROADCAST);
// privileges mapping to input device
EXT_TO_INT_INPUT_DEVICE_MAPPING = new SparseIntArray();
diff --git a/media/java/android/media/AudioDevicePort.java b/media/java/android/media/AudioDevicePort.java
index ebe08822a0c9..9211c53a17bc 100644
--- a/media/java/android/media/AudioDevicePort.java
+++ b/media/java/android/media/AudioDevicePort.java
@@ -90,7 +90,8 @@ public class AudioDevicePort extends AudioPort {
* {@link AudioManager#DEVICE_OUT_BLE_HEADSET}, {@link AudioManager#DEVICE_OUT_BLE_SPEAKER})
* use the MAC address of the bluetooth device in the form "00:11:22:AA:BB:CC" as reported by
* {@link BluetoothDevice#getAddress()}.
- * - Deivces that do not have an address will indicate an empty string "".
+ * - Bluetooth LE broadcast group ({@link AudioManager#DEVICE_OUT_BLE_BROADCAST} use the group number.
+ * - Devices that do not have an address will indicate an empty string "".
*/
public String address() {
return mAddress;
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index 38f9607c9529..d9783e681a25 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -32,7 +32,7 @@ import android.app.NotificationManager;
import android.app.PendingIntent;
import android.bluetooth.BluetoothCodecConfig;
import android.bluetooth.BluetoothDevice;
-import android.bluetooth.BluetoothProfile;
+import android.bluetooth.BluetoothLeAudioCodecConfig;
import android.compat.annotation.UnsupportedAppUsage;
import android.content.ComponentName;
import android.content.Context;
@@ -1172,7 +1172,8 @@ public class AudioManager {
*
* @hide
*/
- @UnsupportedAppUsage
+ @SystemApi
+ @RequiresPermission("android.permission.QUERY_AUDIO_STATE")
public int getLastAudibleStreamVolume(int streamType) {
final IAudioService service = getService();
try {
@@ -3268,6 +3269,54 @@ public class AudioManager {
}
/**
+ * @hide
+ */
+ @SystemApi(client = SystemApi.Client.MODULE_LIBRARIES)
+ @RequiresPermission(android.Manifest.permission.BLUETOOTH_STACK)
+ public void setHfpEnabled(boolean enable) {
+ AudioSystem.setParameters("hfp_enable=" + enable);
+ }
+
+ /**
+ * @hide
+ */
+ @SystemApi(client = SystemApi.Client.MODULE_LIBRARIES)
+ @RequiresPermission(android.Manifest.permission.BLUETOOTH_STACK)
+ public void setHfpVolume(int volume) {
+ AudioSystem.setParameters("hfp_volume=" + volume);
+ }
+
+ /**
+ * @hide
+ */
+ @SystemApi(client = SystemApi.Client.MODULE_LIBRARIES)
+ @RequiresPermission(android.Manifest.permission.BLUETOOTH_STACK)
+ public void setHfpSamplingRate(int rate) {
+ AudioSystem.setParameters("hfp_set_sampling_rate=" + rate);
+ }
+
+ /**
+ * @hide
+ */
+ @SystemApi(client = SystemApi.Client.MODULE_LIBRARIES)
+ @RequiresPermission(android.Manifest.permission.BLUETOOTH_STACK)
+ public void setBluetoothHeadsetProperties(@NonNull String name, boolean hasNrecEnabled,
+ boolean hasWbsEnabled) {
+ AudioSystem.setParameters("bt_headset_name=" + name
+ + ";bt_headset_nrec=" + (hasNrecEnabled ? "on" : "off")
+ + ";bt_wbs=" + (hasWbsEnabled ? "on" : "off"));
+ }
+
+ /**
+ * @hide
+ */
+ @SystemApi(client = SystemApi.Client.MODULE_LIBRARIES)
+ @RequiresPermission(android.Manifest.permission.BLUETOOTH_STACK)
+ public void setA2dpSuspended(boolean enable) {
+ AudioSystem.setParameters("A2dpSuspended=" + enable);
+ }
+
+ /**
* Gets a variable number of parameter values from audio hardware.
*
* @param keys list of parameters
@@ -5216,21 +5265,6 @@ public class AudioManager {
}
}
- /**
- * @hide
- * Notifies AudioService that it is connected to an A2DP device that supports absolute volume,
- * so that AudioService can send volume change events to the A2DP device, rather than handling
- * them.
- */
- public void avrcpSupportsAbsoluteVolume(String address, boolean support) {
- final IAudioService service = getService();
- try {
- service.avrcpSupportsAbsoluteVolume(address, support);
- } catch (RemoteException e) {
- throw e.rethrowFromSystemServer();
- }
- }
-
/**
* {@hide}
*/
@@ -5383,6 +5417,10 @@ public class AudioManager {
*/
public static final int DEVICE_OUT_BLE_SPEAKER = AudioSystem.DEVICE_OUT_BLE_SPEAKER;
/** @hide
+ * The audio output device code for a BLE audio brodcast group.
+ */
+ public static final int DEVICE_OUT_BLE_BROADCAST = AudioSystem.DEVICE_OUT_BLE_BROADCAST;
+ /** @hide
* This is not used as a returned value from {@link #getDevicesForStream}, but could be
* used in the future in a set method to select whatever default device is chosen by the
* platform-specific implementation.
@@ -5776,78 +5814,26 @@ public class AudioManager {
}
}
- /**
- * Indicate Hearing Aid connection state change and eventually suppress
- * the {@link AudioManager.ACTION_AUDIO_BECOMING_NOISY} intent.
- * This operation is asynchronous but its execution will still be sequentially scheduled
- * relative to calls to {@link #setBluetoothA2dpDeviceConnectionStateSuppressNoisyIntent(
- * * BluetoothDevice, int, int, boolean, int)} and
- * and {@link #handleBluetoothA2dpDeviceConfigChange(BluetoothDevice)}.
- * @param device Bluetooth device connected/disconnected
- * @param state new connection state (BluetoothProfile.STATE_xxx)
- * @param musicDevice Default get system volume for the connecting device.
- * (either {@link android.bluetooth.BluetoothProfile.hearingaid} or
- * {@link android.bluetooth.BluetoothProfile.HEARING_AID})
- * @param suppressNoisyIntent if true the
- * {@link AudioManager.ACTION_AUDIO_BECOMING_NOISY} intent will not be sent.
- * {@hide}
- */
- public void setBluetoothHearingAidDeviceConnectionState(
- BluetoothDevice device, int state, boolean suppressNoisyIntent,
- int musicDevice) {
- final IAudioService service = getService();
- try {
- service.setBluetoothHearingAidDeviceConnectionState(device,
- state, suppressNoisyIntent, musicDevice);
- } catch (RemoteException e) {
- throw e.rethrowFromSystemServer();
- }
- }
-
- /**
- * Indicate A2DP source or sink connection state change and eventually suppress
- * the {@link AudioManager.ACTION_AUDIO_BECOMING_NOISY} intent.
- * This operation is asynchronous but its execution will still be sequentially scheduled
- * relative to calls to {@link #setBluetoothHearingAidDeviceConnectionState(BluetoothDevice,
- * int, boolean, int)} and
- * {@link #handleBluetoothA2dpDeviceConfigChange(BluetoothDevice)}.
- * @param device Bluetooth device connected/disconnected
- * @param state new connection state, {@link BluetoothProfile#STATE_CONNECTED}
- * or {@link BluetoothProfile#STATE_DISCONNECTED}
- * @param profile profile for the A2DP device
- * @param a2dpVolume New volume for the connecting device. Does nothing if disconnecting.
- * (either {@link android.bluetooth.BluetoothProfile.A2DP} or
- * {@link android.bluetooth.BluetoothProfile.A2DP_SINK})
- * @param suppressNoisyIntent if true the
- * {@link AudioManager.ACTION_AUDIO_BECOMING_NOISY} intent will not be sent.
- * {@hide}
- */
- public void setBluetoothA2dpDeviceConnectionStateSuppressNoisyIntent(
- BluetoothDevice device, int state,
- int profile, boolean suppressNoisyIntent, int a2dpVolume) {
- final IAudioService service = getService();
- try {
- service.setBluetoothA2dpDeviceConnectionStateSuppressNoisyIntent(device,
- state, profile, suppressNoisyIntent, a2dpVolume);
- } catch (RemoteException e) {
- throw e.rethrowFromSystemServer();
- }
- }
-
- /**
- * Indicate A2DP device configuration has changed.
- * This operation is asynchronous but its execution will still be sequentially scheduled
- * relative to calls to
- * {@link #setBluetoothA2dpDeviceConnectionStateSuppressNoisyIntent(BluetoothDevice, int, int,
- * boolean, int)} and
- * {@link #setBluetoothHearingAidDeviceConnectionState(BluetoothDevice, int, boolean, int)}
- * @param device Bluetooth device whose configuration has changed.
+ /**
+ * Indicate Bluetooth profile connection state change.
+ * Configuration changes for A2DP are indicated by having the same <code>newDevice</code> and
+ * <code>previousDevice</code>
+ * This operation is asynchronous.
+ *
+ * @param newDevice Bluetooth device connected or null if there is no new devices
+ * @param previousDevice Bluetooth device disconnected or null if there is no disconnected
+ * devices
+ * @param info contain all info related to the device. {@link BluetoothProfileConnectionInfo}
* {@hide}
*/
- public void handleBluetoothA2dpDeviceConfigChange(BluetoothDevice device) {
+ @SystemApi(client = SystemApi.Client.MODULE_LIBRARIES)
+ @RequiresPermission(android.Manifest.permission.BLUETOOTH_STACK)
+ public void handleBluetoothActiveDeviceChanged(@Nullable BluetoothDevice newDevice,
+ @Nullable BluetoothDevice previousDevice,
+ @NonNull BluetoothProfileConnectionInfo info) {
final IAudioService service = getService();
try {
- service.handleBluetoothA2dpDeviceConfigChange(device);
+ service.handleBluetoothActiveDeviceChanged(newDevice, previousDevice, info);
} catch (RemoteException e) {
throw e.rethrowFromSystemServer();
}
@@ -6830,11 +6816,13 @@ public class AudioManager {
* supported for offload A2DP playback
* @hide
*/
- public List<BluetoothCodecConfig> getHwOffloadEncodingFormatsSupportedForA2DP() {
- ArrayList<Integer> formatsList = new ArrayList<Integer>();
- ArrayList<BluetoothCodecConfig> codecConfigList = new ArrayList<BluetoothCodecConfig>();
+ @SystemApi(client = SystemApi.Client.MODULE_LIBRARIES)
+ public @NonNull List<BluetoothCodecConfig> getHwOffloadFormatsSupportedForA2dp() {
+ ArrayList<Integer> formatsList = new ArrayList<>();
+ ArrayList<BluetoothCodecConfig> codecConfigList = new ArrayList<>();
- int status = AudioSystem.getHwOffloadEncodingFormatsSupportedForA2DP(formatsList);
+ int status = AudioSystem.getHwOffloadFormatsSupportedForBluetoothMedia(
+ AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, formatsList);
if (status != AudioManager.SUCCESS) {
Log.e(TAG, "getHwOffloadEncodingFormatsSupportedForA2DP failed:" + status);
return codecConfigList;
@@ -6842,14 +6830,45 @@ public class AudioManager {
for (Integer format : formatsList) {
int btSourceCodec = AudioSystem.audioFormatToBluetoothSourceCodec(format);
- if (btSourceCodec
- != BluetoothCodecConfig.SOURCE_CODEC_TYPE_INVALID) {
+ if (btSourceCodec != BluetoothCodecConfig.SOURCE_CODEC_TYPE_INVALID) {
codecConfigList.add(new BluetoothCodecConfig(btSourceCodec));
}
}
return codecConfigList;
}
+ /**
+ * Returns a list of audio formats that corresponds to encoding formats
+ * supported on offload path for Le audio playback.
+ *
+ * @return a list of {@link BluetoothLeAudioCodecConfig} objects containing encoding formats
+ * supported for offload Le Audio playback
+ * @hide
+ */
+ @SystemApi(client = SystemApi.Client.MODULE_LIBRARIES)
+ @NonNull
+ public List<BluetoothLeAudioCodecConfig> getHwOffloadFormatsSupportedForLeAudio() {
+ ArrayList<Integer> formatsList = new ArrayList<>();
+ ArrayList<BluetoothLeAudioCodecConfig> leAudioCodecConfigList = new ArrayList<>();
+
+ int status = AudioSystem.getHwOffloadFormatsSupportedForBluetoothMedia(
+ AudioSystem.DEVICE_OUT_BLE_HEADSET, formatsList);
+ if (status != AudioManager.SUCCESS) {
+ Log.e(TAG, "getHwOffloadEncodingFormatsSupportedForLeAudio failed:" + status);
+ return leAudioCodecConfigList;
+ }
+
+ for (Integer format : formatsList) {
+ int btLeAudioCodec = AudioSystem.audioFormatToBluetoothLeAudioSourceCodec(format);
+ if (btLeAudioCodec != BluetoothLeAudioCodecConfig.SOURCE_CODEC_TYPE_INVALID) {
+ leAudioCodecConfigList.add(new BluetoothLeAudioCodecConfig.Builder()
+ .setCodecType(btLeAudioCodec)
+ .build());
+ }
+ }
+ return leAudioCodecConfigList;
+ }
+
// Since we need to calculate the changes since THE LAST NOTIFICATION, and not since the
// (unpredictable) last time updateAudioPortCache() was called by someone, keep a list
// of the ports that exist at the time of the last notification.
diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java
index 6ff551a68c18..f6a5ec4f02e6 100644
--- a/media/java/android/media/AudioSystem.java
+++ b/media/java/android/media/AudioSystem.java
@@ -22,6 +22,7 @@ import android.annotation.Nullable;
import android.annotation.RequiresPermission;
import android.annotation.TestApi;
import android.bluetooth.BluetoothCodecConfig;
+import android.bluetooth.BluetoothLeAudioCodecConfig;
import android.compat.annotation.UnsupportedAppUsage;
import android.content.Context;
import android.content.pm.PackageManager;
@@ -230,6 +231,9 @@ public class AudioSystem
public static final int AUDIO_FORMAT_APTX_HD = 0x21000000;
/** @hide */
public static final int AUDIO_FORMAT_LDAC = 0x23000000;
+ /** @hide */
+ public static final int AUDIO_FORMAT_LC3 = 0x2B000000;
+
/** @hide */
@IntDef(flag = false, prefix = "AUDIO_FORMAT_", value = {
@@ -239,11 +243,27 @@ public class AudioSystem
AUDIO_FORMAT_SBC,
AUDIO_FORMAT_APTX,
AUDIO_FORMAT_APTX_HD,
- AUDIO_FORMAT_LDAC }
+ AUDIO_FORMAT_LDAC,
+ AUDIO_FORMAT_LC3}
)
@Retention(RetentionPolicy.SOURCE)
public @interface AudioFormatNativeEnumForBtCodec {}
+ /** @hide */
+ @IntDef(flag = false, prefix = "AUDIO_FORMAT_", value = {
+ AUDIO_FORMAT_LC3}
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioFormatNativeEnumForBtLeAudioCodec {}
+
+ /** @hide */
+ @IntDef(flag = false, prefix = "DEVICE_", value = {
+ DEVICE_OUT_BLUETOOTH_A2DP,
+ DEVICE_OUT_BLE_HEADSET}
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface DeviceType {}
+
/**
* @hide
* Convert audio format enum values to Bluetooth codec values
@@ -256,6 +276,7 @@ public class AudioSystem
case AUDIO_FORMAT_APTX: return BluetoothCodecConfig.SOURCE_CODEC_TYPE_APTX;
case AUDIO_FORMAT_APTX_HD: return BluetoothCodecConfig.SOURCE_CODEC_TYPE_APTX_HD;
case AUDIO_FORMAT_LDAC: return BluetoothCodecConfig.SOURCE_CODEC_TYPE_LDAC;
+ case AUDIO_FORMAT_LC3: return BluetoothCodecConfig.SOURCE_CODEC_TYPE_LC3;
default:
Log.e(TAG, "Unknown audio format 0x" + Integer.toHexString(audioFormat)
+ " for conversion to BT codec");
@@ -265,6 +286,21 @@ public class AudioSystem
/**
* @hide
+ * Convert audio format enum values to Bluetooth LE audio codec values
+ */
+ public static int audioFormatToBluetoothLeAudioSourceCodec(
+ @AudioFormatNativeEnumForBtLeAudioCodec int audioFormat) {
+ switch (audioFormat) {
+ case AUDIO_FORMAT_LC3: return BluetoothLeAudioCodecConfig.SOURCE_CODEC_TYPE_LC3;
+ default:
+ Log.e(TAG, "Unknown audio format 0x" + Integer.toHexString(audioFormat)
+ + " for conversion to BT LE audio codec");
+ return BluetoothLeAudioCodecConfig.SOURCE_CODEC_TYPE_INVALID;
+ }
+ }
+
+ /**
+ * @hide
* Convert a Bluetooth codec to an audio format enum
* @param btCodec the codec to convert.
* @return the audio format, or {@link #AUDIO_FORMAT_DEFAULT} if unknown
@@ -281,6 +317,8 @@ public class AudioSystem
return AudioSystem.AUDIO_FORMAT_APTX_HD;
case BluetoothCodecConfig.SOURCE_CODEC_TYPE_LDAC:
return AudioSystem.AUDIO_FORMAT_LDAC;
+ case BluetoothCodecConfig.SOURCE_CODEC_TYPE_LC3:
+ return AudioSystem.AUDIO_FORMAT_LC3;
default:
Log.e(TAG, "Unknown BT codec 0x" + Integer.toHexString(btCodec)
+ " for conversion to audio format");
@@ -381,6 +419,8 @@ public class AudioSystem
return "AUDIO_FORMAT_LHDC_LL";
case /* AUDIO_FORMAT_APTX_TWSP */ 0x2A000000:
return "AUDIO_FORMAT_APTX_TWSP";
+ case /* AUDIO_FORMAT_LC3 */ 0x2B000000:
+ return "AUDIO_FORMAT_LC3";
/* Aliases */
case /* AUDIO_FORMAT_PCM_16_BIT */ 0x1:
@@ -943,6 +983,8 @@ public class AudioSystem
public static final int DEVICE_OUT_BLE_HEADSET = 0x20000000;
/** @hide */
public static final int DEVICE_OUT_BLE_SPEAKER = 0x20000001;
+ /** @hide */
+ public static final int DEVICE_OUT_BLE_BROADCAST = 0x20000002;
/** @hide */
public static final int DEVICE_OUT_DEFAULT = DEVICE_BIT_DEFAULT;
@@ -1003,6 +1045,7 @@ public class AudioSystem
DEVICE_OUT_ALL_SET.add(DEVICE_OUT_ECHO_CANCELLER);
DEVICE_OUT_ALL_SET.add(DEVICE_OUT_BLE_HEADSET);
DEVICE_OUT_ALL_SET.add(DEVICE_OUT_BLE_SPEAKER);
+ DEVICE_OUT_ALL_SET.add(DEVICE_OUT_BLE_BROADCAST);
DEVICE_OUT_ALL_SET.add(DEVICE_OUT_DEFAULT);
DEVICE_OUT_ALL_A2DP_SET = new HashSet<>();
@@ -1033,6 +1076,7 @@ public class AudioSystem
DEVICE_OUT_ALL_BLE_SET = new HashSet<>();
DEVICE_OUT_ALL_BLE_SET.add(DEVICE_OUT_BLE_HEADSET);
DEVICE_OUT_ALL_BLE_SET.add(DEVICE_OUT_BLE_SPEAKER);
+ DEVICE_OUT_ALL_BLE_SET.add(DEVICE_OUT_BLE_BROADCAST);
}
// input devices
@@ -1216,6 +1260,7 @@ public class AudioSystem
/** @hide */ public static final String DEVICE_OUT_ECHO_CANCELLER_NAME = "echo_canceller";
/** @hide */ public static final String DEVICE_OUT_BLE_HEADSET_NAME = "ble_headset";
/** @hide */ public static final String DEVICE_OUT_BLE_SPEAKER_NAME = "ble_speaker";
+ /** @hide */ public static final String DEVICE_OUT_BLE_BROADCAST_NAME = "ble_broadcast";
/** @hide */ public static final String DEVICE_IN_COMMUNICATION_NAME = "communication";
/** @hide */ public static final String DEVICE_IN_AMBIENT_NAME = "ambient";
@@ -1315,6 +1360,8 @@ public class AudioSystem
return DEVICE_OUT_BLE_HEADSET_NAME;
case DEVICE_OUT_BLE_SPEAKER:
return DEVICE_OUT_BLE_SPEAKER_NAME;
+ case DEVICE_OUT_BLE_BROADCAST:
+ return DEVICE_OUT_BLE_BROADCAST_NAME;
case DEVICE_OUT_DEFAULT:
default:
return "0x" + Integer.toHexString(device);
@@ -1755,10 +1802,10 @@ public class AudioSystem
/**
* @hide
- * Returns a list of audio formats (codec) supported on the A2DP offload path.
+ * Returns a list of audio formats (codec) supported on the A2DP and LE audio offload path.
*/
- public static native int getHwOffloadEncodingFormatsSupportedForA2DP(
- ArrayList<Integer> formatList);
+ public static native int getHwOffloadFormatsSupportedForBluetoothMedia(
+ @DeviceType int deviceType, ArrayList<Integer> formatList);
/** @hide */
public static native int setSurroundFormatEnabled(int audioFormat, boolean enabled);
diff --git a/media/java/android/media/BluetoothProfileConnectionInfo.aidl b/media/java/android/media/BluetoothProfileConnectionInfo.aidl
new file mode 100644
index 000000000000..0617084fd826
--- /dev/null
+++ b/media/java/android/media/BluetoothProfileConnectionInfo.aidl
@@ -0,0 +1,20 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+parcelable BluetoothProfileConnectionInfo;
+
diff --git a/media/java/android/media/BluetoothProfileConnectionInfo.java b/media/java/android/media/BluetoothProfileConnectionInfo.java
new file mode 100644
index 000000000000..c14884657ddd
--- /dev/null
+++ b/media/java/android/media/BluetoothProfileConnectionInfo.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.bluetooth.BluetoothProfile;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * Contains information about Bluetooth profile connection state changed
+ * {@hide}
+ */
+@SystemApi(client = SystemApi.Client.MODULE_LIBRARIES)
+public final class BluetoothProfileConnectionInfo implements Parcelable {
+ private final int mProfile;
+ private final boolean mSupprNoisy;
+ private final int mVolume;
+ private final boolean mIsLeOutput;
+
+ private BluetoothProfileConnectionInfo(int profile, boolean suppressNoisyIntent,
+ int volume, boolean isLeOutput) {
+ mProfile = profile;
+ mSupprNoisy = suppressNoisyIntent;
+ mVolume = volume;
+ mIsLeOutput = isLeOutput;
+ }
+
+ /**
+ * Constructor used by BtHelper when a profile is connected
+ * {@hide}
+ */
+ public BluetoothProfileConnectionInfo(int profile) {
+ this(profile, false, -1, false);
+ }
+
+ public static final @NonNull Parcelable.Creator<BluetoothProfileConnectionInfo> CREATOR =
+ new Parcelable.Creator<BluetoothProfileConnectionInfo>() {
+ @Override
+ public BluetoothProfileConnectionInfo createFromParcel(Parcel source) {
+ return new BluetoothProfileConnectionInfo(source.readInt(),
+ source.readBoolean(), source.readInt(), source.readBoolean());
+ }
+
+ @Override
+ public BluetoothProfileConnectionInfo[] newArray(int size) {
+ return new BluetoothProfileConnectionInfo[size];
+ }
+ };
+
+ @Override
+ public void writeToParcel(@NonNull Parcel dest, @WriteFlags int flags) {
+ dest.writeInt(mProfile);
+ dest.writeBoolean(mSupprNoisy);
+ dest.writeInt(mVolume);
+ dest.writeBoolean(mIsLeOutput);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ /**
+ * Constructor for A2dp info
+ *
+ * @param suppressNoisyIntent if true the {@link AudioManager.ACTION_AUDIO_BECOMING_NOISY}
+ * intent will not be sent.
+ *
+ * @param volume of device -1 to ignore value
+ */
+ public static @NonNull BluetoothProfileConnectionInfo createA2dpInfo(
+ boolean suppressNoisyIntent, int volume) {
+ return new BluetoothProfileConnectionInfo(BluetoothProfile.A2DP, suppressNoisyIntent,
+ volume, false);
+ }
+
+ /**
+ * Constructor for A2dp sink info
+ * The {@link AudioManager.ACTION_AUDIO_BECOMING_NOISY} intent will not be sent.
+ *
+ * @param volume of device -1 to ignore value
+ */
+ public static @NonNull BluetoothProfileConnectionInfo createA2dpSinkInfo(int volume) {
+ return new BluetoothProfileConnectionInfo(BluetoothProfile.A2DP_SINK, true, volume, false);
+ }
+
+ /**
+ * Constructor for hearing aid info
+ *
+ * @param suppressNoisyIntent if true the {@link AudioManager.ACTION_AUDIO_BECOMING_NOISY}
+ * intent will not be sent.
+ */
+ public static @NonNull BluetoothProfileConnectionInfo createHearingAidInfo(
+ boolean suppressNoisyIntent) {
+ return new BluetoothProfileConnectionInfo(BluetoothProfile.HEARING_AID, suppressNoisyIntent,
+ -1, false);
+ }
+
+ /**
+ * constructor for le audio info
+ *
+ * @param suppressNoisyIntent if true the {@link AudioManager.ACTION_AUDIO_BECOMING_NOISY}
+ * intent will not be sent.
+ *
+ * @param isLeOutput if true mean the device is an output device, if false it's an input device
+ */
+ public static @NonNull BluetoothProfileConnectionInfo createLeAudioInfo(
+ boolean suppressNoisyIntent, boolean isLeOutput) {
+ return new BluetoothProfileConnectionInfo(BluetoothProfile.LE_AUDIO, suppressNoisyIntent,
+ -1, isLeOutput);
+ }
+
+ /**
+ * @return The profile connection
+ */
+ public int getProfile() {
+ return mProfile;
+ }
+
+ /**
+ * @return {@code true} if {@link AudioManager.ACTION_AUDIO_BECOMING_NOISY} intent will not be
+ * sent
+ */
+ public boolean isSuppressNoisyIntent() {
+ return mSupprNoisy;
+ }
+
+ /**
+ * Only for {@link BluetoothProfile.A2DP} profile
+ * @return the volume of the connection or -1 if the value is ignored
+ */
+ public int getVolume() {
+ return mVolume;
+ }
+
+ /**
+ * Only for {@link BluetoothProfile.LE_AUDIO} profile
+ * @return {@code true} is the LE device is an output device, {@code false} if it's an input
+ * device
+ */
+ public boolean isLeOutput() {
+ return mIsLeOutput;
+ }
+}
diff --git a/media/java/android/media/IAudioService.aidl b/media/java/android/media/IAudioService.aidl
index abd067cfe2f3..ea48e4f745b5 100755
--- a/media/java/android/media/IAudioService.aidl
+++ b/media/java/android/media/IAudioService.aidl
@@ -25,6 +25,7 @@ import android.media.AudioFocusInfo;
import android.media.AudioPlaybackConfiguration;
import android.media.AudioRecordingConfiguration;
import android.media.AudioRoutesInfo;
+import android.media.BluetoothProfileConnectionInfo;
import android.media.IAudioFocusDispatcher;
import android.media.IAudioModeDispatcher;
import android.media.IAudioRoutesObserver;
@@ -176,8 +177,6 @@ interface IAudioService {
int getEncodedSurroundMode(int targetSdkVersion);
- oneway void avrcpSupportsAbsoluteVolume(String address, boolean support);
-
void setSpeakerphoneOn(IBinder cb, boolean on);
boolean isSpeakerphoneOn();
@@ -214,8 +213,6 @@ interface IAudioService {
void setWiredDeviceConnectionState(int type, int state, String address, String name,
String caller);
- void handleBluetoothA2dpDeviceConfigChange(in BluetoothDevice device);
-
@UnsupportedAppUsage
AudioRoutesInfo startWatchingRoutes(in IAudioRoutesObserver observer);
@@ -275,11 +272,8 @@ interface IAudioService {
oneway void playerHasOpPlayAudio(in int piid, in boolean hasOpPlayAudio);
- void setBluetoothHearingAidDeviceConnectionState(in BluetoothDevice device,
- int state, boolean suppressNoisyIntent, int musicDevice);
-
- void setBluetoothA2dpDeviceConnectionStateSuppressNoisyIntent(in BluetoothDevice device,
- int state, int profile, boolean suppressNoisyIntent, int a2dpVolume);
+ void handleBluetoothActiveDeviceChanged(in BluetoothDevice newDevice,
+ in BluetoothDevice previousDevice, in BluetoothProfileConnectionInfo info);
oneway void setFocusRequestResultFromExtPolicy(in AudioFocusInfo afi, int requestResult,
in IAudioPolicyCallback pcb);
diff --git a/media/java/android/media/Image.java b/media/java/android/media/Image.java
index 1616c03112a8..54e19db8e7e9 100644
--- a/media/java/android/media/Image.java
+++ b/media/java/android/media/Image.java
@@ -163,6 +163,14 @@ public abstract class Image implements AutoCloseable {
* {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
* </td>
* </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#YCBCR_P010 YCBCR_P010}</td>
+ * <td>1</td>
+ * <td>P010 is a 4:2:0 YCbCr semiplanar format comprised of a WxH Y plane
+ * followed by a Wx(H/2) CbCr plane. Each sample is represented by a 16-bit
+ * little-endian value, with the lower 6 bits set to zero.
+ * </td>
+ * </tr>
* </table>
*
* @see android.graphics.ImageFormat
diff --git a/media/java/android/media/JetPlayer.java b/media/java/android/media/JetPlayer.java
index 875c6f52d9b2..77495646ea0c 100644
--- a/media/java/android/media/JetPlayer.java
+++ b/media/java/android/media/JetPlayer.java
@@ -31,35 +31,33 @@ import java.lang.ref.WeakReference;
/**
* JetPlayer provides access to JET content playback and control.
- *
- * <p>Please refer to the JET Creator User Manual for a presentation of the JET interactive
- * music concept and how to use the JetCreator tool to create content to be player by JetPlayer.
- *
+ *
+ * <p>Please refer to the
+ * <a href="https://developer.android.com/guide/topics/media/jet/jetcreator_manual">JET Creator User
+ * Manual</a> for a presentation of the JET interactive music concept and how to use the JetCreator
+ * tool to create content to be player by JetPlayer.
+ *
* <p>Use of the JetPlayer class is based around the playback of a number of JET segments
* sequentially added to a playback FIFO queue. The rendering of the MIDI content stored in each
* segment can be dynamically affected by two mechanisms:
* <ul>
- * <li>tracks in a segment can be muted or unmuted at any moment, individually or through
- * a mask (to change the mute state of multiple tracks at once)</li>
- * <li>parts of tracks in a segment can be played at predefined points in the segment, in order
- * to maintain synchronization with the other tracks in the segment. This is achieved through
- * the notion of "clips", which can be triggered at any time, but that will play only at the
- * right time, as authored in the corresponding JET file.</li>
+ * <li>Tracks in a segment can be muted or unmuted at any moment, individually or through a mask
+ * (to change the mute state of multiple tracks at once).
+ * <li>Parts of tracks in a segment can be played at predefined points in the segment, in order to
+ * maintain synchronization with the other tracks in the segment. This is achieved through the
+ * notion of "clips", which can be triggered at any time, but that will play only at the right
+ * time, as authored in the corresponding JET file.
* </ul>
- * As a result of the rendering and playback of the JET segments, the user of the JetPlayer instance
- * can receive notifications from the JET engine relative to:
+ *
+ * <p>As a result of the rendering and playback of the JET segments, the user of the JetPlayer
+ * instance can receive notifications from the JET engine relative to:
* <ul>
- * <li>the playback state,</li>
- * <li>the number of segments left to play in the queue,</li>
- * <li>application controller events (CC80-83) to mark points in the MIDI segments.</li>
+ * <li>Playback state
+ * <li>Number of segments left to play in the queue
+ * <li>Application controller events (CC80-83) to mark points in the MIDI segments
* </ul>
- * Use {@link #getJetPlayer()} to construct a JetPlayer instance. JetPlayer is a singleton class.
- * </p>
*
- * <div class="special reference">
- * <h3>Developer Guides</h3>
- * <p>For more information about how to use JetPlayer, read the
- * <a href="{@docRoot}guide/topics/media/jetplayer.html">JetPlayer</a> developer guide.</p></div>
+ * <p>Use {@link #getJetPlayer()} to construct a JetPlayer instance. JetPlayer is a singleton class.
*/
public class JetPlayer
{
@@ -140,7 +138,7 @@ public class JetPlayer
//------------------------
/**
* Factory method for the JetPlayer class.
- * @return the singleton JetPlayer instance
+ * @return the singleton JetPlayer instance.
*/
public static JetPlayer getJetPlayer() {
if (singletonRef == null) {
@@ -203,7 +201,8 @@ public class JetPlayer
// Getters
//------------------------
/**
- * Returns the maximum number of simultaneous MIDI tracks supported by JetPlayer
+ * Gets the maximum number of simultaneous MIDI tracks supported by JetPlayer.
+ * @return the maximum number of simultaneous MIDI tracks supported by JetPlayer.
*/
public static int getMaxTracks() {
return JetPlayer.MAXTRACKS;
@@ -459,10 +458,9 @@ public class JetPlayer
//------------------------
/**
* Sets the listener JetPlayer notifies when a JET event is generated by the rendering and
- * playback engine.
- * Notifications will be received in the same thread as the one in which the JetPlayer
- * instance was created.
- * @param listener
+ * playback engine. Notifications are received in the same thread as the one in which the
+ * JetPlayer instance was created.
+ * @param listener the listener that will be notified when a JET event is generated.
*/
public void setEventListener(OnJetEventListener listener) {
setEventListener(listener, null);
@@ -470,10 +468,9 @@ public class JetPlayer
/**
* Sets the listener JetPlayer notifies when a JET event is generated by the rendering and
- * playback engine.
- * Use this method to receive JET events in the Handler associated with another
- * thread than the one in which you created the JetPlayer instance.
- * @param listener
+ * playback engine. Use this method to receive JET events in the Handler associated with
+ * another thread than the one in which you created the JetPlayer instance.
+ * @param listener the listener that will be notified when a JET event is generated.
* @param handler the Handler that will receive the event notification messages.
*/
public void setEventListener(OnJetEventListener listener, Handler handler) {
diff --git a/media/java/android/media/MediaActionSound.java b/media/java/android/media/MediaActionSound.java
index dcd4dce5f3eb..ec56d614f2b5 100644
--- a/media/java/android/media/MediaActionSound.java
+++ b/media/java/android/media/MediaActionSound.java
@@ -16,8 +16,11 @@
package android.media;
-import android.media.AudioManager;
+import android.content.Context;
import android.media.SoundPool;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.os.ServiceManager;
import android.util.Log;
/**
@@ -104,6 +107,26 @@ public class MediaActionSound {
private static final int STATE_LOADING_PLAY_REQUESTED = 2;
private static final int STATE_LOADED = 3;
+ /**
+ * <p>Returns true if the application must play the shutter sound in accordance
+ * to certain regional restrictions. </p>
+ *
+ * <p>If this method returns true, applications are strongly recommended to use
+ * MediaActionSound.play(SHUTTER_CLICK) or START_VIDEO_RECORDING whenever it captures
+ * images or video to storage or sends them over the network.</p>
+ */
+ public static boolean mustPlayShutterSound() {
+ boolean result = false;
+ IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE);
+ IAudioService audioService = IAudioService.Stub.asInterface(b);
+ try {
+ result = audioService.isCameraSoundForced();
+ } catch (RemoteException e) {
+ Log.e(TAG, "audio service is unavailable for queries, defaulting to false");
+ }
+ return result;
+ }
+
private class SoundState {
public final int name;
public int id;
diff --git a/media/java/android/media/MediaCodec.java b/media/java/android/media/MediaCodec.java
index 8b9153621165..72dd2bd43eb6 100644
--- a/media/java/android/media/MediaCodec.java
+++ b/media/java/android/media/MediaCodec.java
@@ -60,10 +60,10 @@ import java.util.concurrent.locks.ReentrantLock;
with {@link MediaExtractor}, {@link MediaSync}, {@link MediaMuxer}, {@link MediaCrypto},
{@link MediaDrm}, {@link Image}, {@link Surface}, and {@link AudioTrack}.)
<p>
- <center><object style="width: 540px; height: 205px;" type="image/svg+xml"
- data="../../../images/media/mediacodec_buffers.svg"><img
- src="../../../images/media/mediacodec_buffers.png" style="width: 540px; height: 205px"
- alt="MediaCodec buffer flow diagram"></object></center>
+ <center>
+ <img src="../../../images/media/mediacodec_buffers.svg" style="width: 540px; height: 205px"
+ alt="MediaCodec buffer flow diagram">
+ </center>
<p>
In broad terms, a codec processes input data to generate output data. It processes data
asynchronously and uses a set of input and output buffers. At a simplistic level, you request
@@ -268,10 +268,10 @@ import java.util.concurrent.locks.ReentrantLock;
Uninitialized, Configured and Error, whereas the Executing state conceptually progresses through
three sub-states: Flushed, Running and End-of-Stream.
<p>
- <center><object style="width: 516px; height: 353px;" type="image/svg+xml"
- data="../../../images/media/mediacodec_states.svg"><img
- src="../../../images/media/mediacodec_states.png" style="width: 519px; height: 356px"
- alt="MediaCodec state diagram"></object></center>
+ <center>
+ <img src="../../../images/media/mediacodec_states.svg" style="width: 519px; height: 356px"
+ alt="MediaCodec state diagram">
+ </center>
<p>
When you create a codec using one of the factory methods, the codec is in the Uninitialized
state. First, you need to configure it via {@link #configure configure(&hellip;)}, which brings
@@ -513,10 +513,10 @@ import java.util.concurrent.locks.ReentrantLock;
Similarly, upon an initial call to {@code start} the codec will move directly to the Running
sub-state and start passing available input buffers via the callback.
<p>
- <center><object style="width: 516px; height: 353px;" type="image/svg+xml"
- data="../../../images/media/mediacodec_async_states.svg"><img
- src="../../../images/media/mediacodec_async_states.png" style="width: 516px; height: 353px"
- alt="MediaCodec state diagram for asynchronous operation"></object></center>
+ <center>
+ <img src="../../../images/media/mediacodec_async_states.svg" style="width: 516px; height: 353px"
+ alt="MediaCodec state diagram for asynchronous operation">
+ </center>
<p>
MediaCodec is typically used like this in asynchronous mode:
<pre class=prettyprint>
@@ -5107,7 +5107,6 @@ final public class MediaCodec {
public MediaImage(
@NonNull ByteBuffer buffer, @NonNull ByteBuffer info, boolean readOnly,
long timestamp, int xOffset, int yOffset, @Nullable Rect cropRect) {
- mFormat = ImageFormat.YUV_420_888;
mTimestamp = timestamp;
mIsImageValid = true;
mIsReadOnly = buffer.isReadOnly();
@@ -5120,6 +5119,11 @@ final public class MediaCodec {
mBufferContext = 0;
+ int cbPlaneOffset = -1;
+ int crPlaneOffset = -1;
+ int planeOffsetInc = -1;
+ int pixelStride = -1;
+
// read media-info. See MediaImage2
if (info.remaining() == 104) {
int type = info.getInt();
@@ -5137,14 +5141,27 @@ final public class MediaCodec {
"unsupported size: " + mWidth + "x" + mHeight);
}
int bitDepth = info.getInt();
- if (bitDepth != 8) {
+ if (bitDepth != 8 && bitDepth != 10) {
throw new UnsupportedOperationException("unsupported bit depth: " + bitDepth);
}
int bitDepthAllocated = info.getInt();
- if (bitDepthAllocated != 8) {
+ if (bitDepthAllocated != 8 && bitDepthAllocated != 16) {
throw new UnsupportedOperationException(
"unsupported allocated bit depth: " + bitDepthAllocated);
}
+ if (bitDepth == 8 && bitDepthAllocated == 8) {
+ mFormat = ImageFormat.YUV_420_888;
+ planeOffsetInc = 1;
+ pixelStride = 2;
+ } else if (bitDepth == 10 && bitDepthAllocated == 16) {
+ mFormat = ImageFormat.YCBCR_P010;
+ planeOffsetInc = 2;
+ pixelStride = 4;
+ } else {
+ throw new UnsupportedOperationException("couldn't infer ImageFormat"
+ + " bitDepth: " + bitDepth + " bitDepthAllocated: " + bitDepthAllocated);
+ }
+
mPlanes = new MediaPlane[numPlanes];
for (int ix = 0; ix < numPlanes; ix++) {
int planeOffset = info.getInt();
@@ -5166,12 +5183,31 @@ final public class MediaCodec {
buffer.limit(buffer.position() + Utils.divUp(bitDepth, 8)
+ (mHeight / vert - 1) * rowInc + (mWidth / horiz - 1) * colInc);
mPlanes[ix] = new MediaPlane(buffer.slice(), rowInc, colInc);
+ if ((mFormat == ImageFormat.YUV_420_888 || mFormat == ImageFormat.YCBCR_P010)
+ && ix == 1) {
+ cbPlaneOffset = planeOffset;
+ } else if ((mFormat == ImageFormat.YUV_420_888
+ || mFormat == ImageFormat.YCBCR_P010) && ix == 2) {
+ crPlaneOffset = planeOffset;
+ }
}
} else {
throw new UnsupportedOperationException(
"unsupported info length: " + info.remaining());
}
+ // Validate chroma semiplanerness.
+ if (mFormat == ImageFormat.YCBCR_P010) {
+ if (crPlaneOffset != cbPlaneOffset + planeOffsetInc) {
+ throw new UnsupportedOperationException("Invalid plane offsets"
+ + " cbPlaneOffset: " + cbPlaneOffset + " crPlaneOffset: " + crPlaneOffset);
+ }
+ if (mPlanes[1].getPixelStride() != pixelStride
+ || mPlanes[2].getPixelStride() != pixelStride) {
+ throw new UnsupportedOperationException("Invalid pixelStride");
+ }
+ }
+
if (cropRect == null) {
cropRect = new Rect(0, 0, mWidth, mHeight);
}
diff --git a/media/java/android/media/MediaCodecInfo.java b/media/java/android/media/MediaCodecInfo.java
index c3b1bca8e143..77709d7d3823 100644
--- a/media/java/android/media/MediaCodecInfo.java
+++ b/media/java/android/media/MediaCodecInfo.java
@@ -28,6 +28,7 @@ import android.compat.annotation.UnsupportedAppUsage;
import android.os.Build;
import android.os.Process;
import android.os.SystemProperties;
+import android.sysprop.MediaProperties;
import android.util.Log;
import android.util.Pair;
import android.util.Range;
@@ -181,10 +182,15 @@ public final class MediaCodecInfo {
public String mName;
public int mValue;
public boolean mDefault;
+ public boolean mInternal;
public Feature(String name, int value, boolean def) {
+ this(name, value, def, false /* internal */);
+ }
+ public Feature(String name, int value, boolean def, boolean internal) {
mName = name;
mValue = value;
mDefault = def;
+ mInternal = internal;
}
}
@@ -196,13 +202,20 @@ public final class MediaCodecInfo {
private static final Range<Rational> POSITIVE_RATIONALS =
Range.create(new Rational(1, Integer.MAX_VALUE),
new Rational(Integer.MAX_VALUE, 1));
- private static final Range<Integer> SIZE_RANGE =
- Process.is64Bit() ? Range.create(1, 32768) : Range.create(1, 4096);
private static final Range<Integer> FRAME_RATE_RANGE = Range.create(0, 960);
private static final Range<Integer> BITRATE_RANGE = Range.create(0, 500000000);
private static final int DEFAULT_MAX_SUPPORTED_INSTANCES = 32;
private static final int MAX_SUPPORTED_INSTANCES_LIMIT = 256;
+ private static final class LazyHolder {
+ private static final Range<Integer> SIZE_RANGE = Process.is64Bit()
+ ? Range.create(1, 32768)
+ : Range.create(1, MediaProperties.resolution_limit_32bit().orElse(4096));
+ }
+ private static Range<Integer> getSizeRange() {
+ return LazyHolder.SIZE_RANGE;
+ }
+
// found stuff that is not supported by framework (=> this should not happen)
private static final int ERROR_UNRECOGNIZED = (1 << 0);
// found profile/level for which we don't have capability estimates
@@ -413,6 +426,32 @@ public final class MediaCodecInfo {
/** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
public static final int COLOR_Format24BitABGR6666 = 43;
+ /**
+ * P010 is 10-bit-per component 4:2:0 YCbCr semiplanar format.
+ * <p>
+ * This format uses 24 allocated bits per pixel with 15 bits of
+ * data per pixel. Chroma planes are subsampled by 2 both
+ * horizontally and vertically. Each chroma and luma component
+ * has 16 allocated bits in little-endian configuration with 10
+ * MSB of actual data.
+ *
+ * <pre>
+ * byte byte
+ * <--------- i --------> | <------ i + 1 ------>
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * | UNUSED | Y/Cb/Cr |
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * 0 5 6 7 0 7
+ * bit
+ * </pre>
+ *
+ * Use this format with {@link Image}. This format corresponds
+ * to {@link android.graphics.ImageFormat#YCBCR_P010}.
+ * <p>
+ */
+ @SuppressLint("AllUpper")
+ public static final int COLOR_FormatYUVP010 = 54;
+
/** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
public static final int COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
// COLOR_FormatSurface indicates that the data will be a GraphicBuffer metadata reference.
@@ -420,6 +459,25 @@ public final class MediaCodecInfo {
public static final int COLOR_FormatSurface = 0x7F000789;
/**
+ * 64 bits per pixel RGBA color format, with 16-bit signed
+ * floating point red, green, blue, and alpha components.
+ * <p>
+ *
+ * <pre>
+ * byte byte byte byte
+ * <-- i -->|<- i+1 ->|<- i+2 ->|<- i+3 ->|<- i+4 ->|<- i+5 ->|<- i+6 ->|<- i+7 ->
+ * +---------+---------+-------------------+---------+---------+---------+---------+
+ * | RED | GREEN | BLUE | ALPHA |
+ * +---------+---------+-------------------+---------+---------+---------+---------+
+ * 0 7 0 7 0 7 0 7 0 7 0 7 0 7 0 7
+ * </pre>
+ *
+ * This corresponds to {@link android.graphics.PixelFormat#RGBA_F16}.
+ */
+ @SuppressLint("AllUpper")
+ public static final int COLOR_Format64bitABGRFloat = 0x7F000F16;
+
+ /**
* 32 bits per pixel RGBA color format, with 8-bit red, green, blue, and alpha components.
* <p>
* Using 32-bit little-endian representation, colors stored as Red 7:0, Green 15:8,
@@ -437,6 +495,26 @@ public final class MediaCodecInfo {
public static final int COLOR_Format32bitABGR8888 = 0x7F00A000;
/**
+ * 32 bits per pixel RGBA color format, with 10-bit red, green,
+ * blue, and 2-bit alpha components.
+ * <p>
+ * Using 32-bit little-endian representation, colors stored as
+ * Red 9:0, Green 19:10, Blue 29:20, and Alpha 31:30.
+ * <pre>
+ * byte byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 ----->
+ * +-----------------+---+-------------+-------+---------+-----------+-----+
+ * | RED | GREEN | BLUE |ALPHA|
+ * +-----------------+---+-------------+-------+---------+-----------+-----+
+ * 0 7 0 1 2 7 0 3 4 7 0 5 6 7
+ * </pre>
+ *
+ * This corresponds to {@link android.graphics.PixelFormat#RGBA_1010102}.
+ */
+ @SuppressLint("AllUpper")
+ public static final int COLOR_Format32bitABGR2101010 = 0x7F00AAA2;
+
+ /**
* Flexible 12 bits per pixel, subsampled YUV color format with 8-bit chroma and luma
* components.
* <p>
@@ -571,6 +649,11 @@ public final class MediaCodecInfo {
public static final String FEATURE_LowLatency = "low-latency";
/**
+ * Do not include in REGULAR_CODECS list in MediaCodecList.
+ */
+ private static final String FEATURE_SpecialCodec = "special-codec";
+
+ /**
* <b>video encoder only</b>: codec supports quantization parameter bounds.
* @see MediaFormat#KEY_VIDEO_QP_MAX
* @see MediaFormat#KEY_VIDEO_QP_MIN
@@ -579,6 +662,18 @@ public final class MediaCodecInfo {
public static final String FEATURE_QpBounds = "qp-bounds";
/**
+ * <b>video encoder only</b>: codec supports exporting encoding statistics.
+ * Encoders with this feature can provide the App clients with the encoding statistics
+ * information about the frame.
+ * The scope of encoding statistics is controlled by
+ * {@link MediaFormat#KEY_VIDEO_ENCODING_STATISTICS_LEVEL}.
+ *
+ * @see MediaFormat#KEY_VIDEO_ENCODING_STATISTICS_LEVEL
+ */
+ @SuppressLint("AllUpper") // for consistency with other FEATURE_* constants
+ public static final String FEATURE_EncodingStatistics = "encoding-statistics";
+
+ /**
* Query codec feature capabilities.
* <p>
* These features are supported to be used by the codec. These
@@ -608,6 +703,8 @@ public final class MediaCodecInfo {
new Feature(FEATURE_MultipleFrames, (1 << 5), false),
new Feature(FEATURE_DynamicTimestamp, (1 << 6), false),
new Feature(FEATURE_LowLatency, (1 << 7), true),
+ // feature to exclude codec from REGULAR codec list
+ new Feature(FEATURE_SpecialCodec, (1 << 30), false, true),
};
private static final Feature[] encoderFeatures = {
@@ -615,6 +712,9 @@ public final class MediaCodecInfo {
new Feature(FEATURE_MultipleFrames, (1 << 1), false),
new Feature(FEATURE_DynamicTimestamp, (1 << 2), false),
new Feature(FEATURE_QpBounds, (1 << 3), false),
+ new Feature(FEATURE_EncodingStatistics, (1 << 4), false),
+ // feature to exclude codec from REGULAR codec list
+ new Feature(FEATURE_SpecialCodec, (1 << 30), false, true),
};
/** @hide */
@@ -622,7 +722,9 @@ public final class MediaCodecInfo {
Feature[] features = getValidFeatures();
String[] res = new String[features.length];
for (int i = 0; i < res.length; i++) {
- res[i] = features[i].mName;
+ if (!features[i].mInternal) {
+ res[i] = features[i].mName;
+ }
}
return res;
}
@@ -770,6 +872,10 @@ public final class MediaCodecInfo {
// check feature support
for (Feature feat: getValidFeatures()) {
+ if (feat.mInternal) {
+ continue;
+ }
+
Integer yesNo = (Integer)map.get(MediaFormat.KEY_FEATURE_ + feat.mName);
if (yesNo == null) {
continue;
@@ -1083,7 +1189,9 @@ public final class MediaCodecInfo {
mFlagsRequired |= feat.mValue;
}
mFlagsSupported |= feat.mValue;
- mDefaultFormat.setInteger(key, 1);
+ if (!feat.mInternal) {
+ mDefaultFormat.setInteger(key, 1);
+ }
// TODO restrict features by mFlagsVerified once all codecs reliably verify them
}
}
@@ -2234,12 +2342,12 @@ public final class MediaCodecInfo {
private void initWithPlatformLimits() {
mBitrateRange = BITRATE_RANGE;
- mWidthRange = SIZE_RANGE;
- mHeightRange = SIZE_RANGE;
+ mWidthRange = getSizeRange();
+ mHeightRange = getSizeRange();
mFrameRateRange = FRAME_RATE_RANGE;
- mHorizontalBlockRange = SIZE_RANGE;
- mVerticalBlockRange = SIZE_RANGE;
+ mHorizontalBlockRange = getSizeRange();
+ mVerticalBlockRange = getSizeRange();
// full positive ranges are supported as these get calculated
mBlockCountRange = POSITIVE_INTEGERS;
@@ -2253,7 +2361,7 @@ public final class MediaCodecInfo {
mHeightAlignment = 2;
mBlockWidth = 2;
mBlockHeight = 2;
- mSmallerDimensionUpperLimit = SIZE_RANGE.getUpper();
+ mSmallerDimensionUpperLimit = getSizeRange().getUpper();
}
private @Nullable List<PerformancePoint> getPerformancePoints(Map<String, Object> map) {
@@ -2281,12 +2389,6 @@ public final class MediaCodecInfo {
if (size == null || size.getWidth() * size.getHeight() <= 0) {
continue;
}
- if (size.getWidth() > SIZE_RANGE.getUpper()
- || size.getHeight() > SIZE_RANGE.getUpper()) {
- size = new Size(
- Math.min(size.getWidth(), SIZE_RANGE.getUpper()),
- Math.min(size.getHeight(), SIZE_RANGE.getUpper()));
- }
Range<Long> range = Utils.parseLongRange(map.get(key), null);
if (range == null || range.getLower() < 0 || range.getUpper() < 0) {
continue;
@@ -2500,10 +2602,10 @@ public final class MediaCodecInfo {
// codec supports profiles that we don't know.
// Use supplied values clipped to platform limits
if (widths != null) {
- mWidthRange = SIZE_RANGE.intersect(widths);
+ mWidthRange = getSizeRange().intersect(widths);
}
if (heights != null) {
- mHeightRange = SIZE_RANGE.intersect(heights);
+ mHeightRange = getSizeRange().intersect(heights);
}
if (counts != null) {
mBlockCountRange = POSITIVE_INTEGERS.intersect(
@@ -3923,6 +4025,12 @@ public final class MediaCodecInfo {
public static final int DolbyVisionLevelUhd30 = 0x40;
public static final int DolbyVisionLevelUhd48 = 0x80;
public static final int DolbyVisionLevelUhd60 = 0x100;
+ @SuppressLint("AllUpper")
+ public static final int DolbyVisionLevelUhd120 = 0x200;
+ @SuppressLint("AllUpper")
+ public static final int DolbyVisionLevel8k30 = 0x400;
+ @SuppressLint("AllUpper")
+ public static final int DolbyVisionLevel8k60 = 0x800;
// Profiles and levels for AV1 Codec, corresponding to the definitions in
// "AV1 Bitstream & Decoding Process Specification", Annex A
diff --git a/media/java/android/media/MediaFormat.java b/media/java/android/media/MediaFormat.java
index 0847be34d90f..522b0212e9d0 100644
--- a/media/java/android/media/MediaFormat.java
+++ b/media/java/android/media/MediaFormat.java
@@ -1115,6 +1115,76 @@ public final class MediaFormat {
public static final String KEY_VIDEO_QP_B_MIN = "video-qp-b-min";
/**
+ * A key describing the level of encoding statistics information emitted from video encoder.
+ *
+ * The associated value is an integer.
+ */
+ public static final String KEY_VIDEO_ENCODING_STATISTICS_LEVEL =
+ "video-encoding-statistics-level";
+
+ /**
+ * Encoding Statistics Level None.
+ * Encoder generates no information about Encoding statistics.
+ */
+ public static final int VIDEO_ENCODING_STATISTICS_LEVEL_NONE = 0;
+
+ /**
+ * Encoding Statistics Level 1.
+ * Encoder generates {@link MediaFormat#KEY_PICTURE_TYPE} and
+ * {@link MediaFormat#KEY_VIDEO_QP_AVERAGE} for each frame.
+ */
+ public static final int VIDEO_ENCODING_STATISTICS_LEVEL_1 = 1;
+
+ /** @hide */
+ @IntDef({
+ VIDEO_ENCODING_STATISTICS_LEVEL_NONE,
+ VIDEO_ENCODING_STATISTICS_LEVEL_1,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface VideoEncodingStatisticsLevel {}
+
+ /**
+ * A key describing the per-frame average block QP (Quantization Parameter).
+ * This is a part of a video 'Encoding Statistics' export feature.
+ * This value is emitted from video encoder for a video frame.
+ * The average value is rounded down (using floor()) to integer value.
+ *
+ * The associated value is an integer.
+ */
+ public static final String KEY_VIDEO_QP_AVERAGE = "video-qp-average";
+
+ /**
+ * A key describing the picture type of the encoded frame.
+ * This is a part of a video 'Encoding Statistics' export feature.
+ * This value is emitted from video encoder for a video frame.
+ *
+ * The associated value is an integer.
+ */
+ public static final String KEY_PICTURE_TYPE = "picture-type";
+
+ /** Picture Type is unknown. */
+ public static final int PICTURE_TYPE_UNKNOWN = 0;
+
+ /** Picture Type is I Frame. */
+ public static final int PICTURE_TYPE_I = 1;
+
+ /** Picture Type is P Frame. */
+ public static final int PICTURE_TYPE_P = 2;
+
+ /** Picture Type is B Frame. */
+ public static final int PICTURE_TYPE_B = 3;
+
+ /** @hide */
+ @IntDef({
+ PICTURE_TYPE_UNKNOWN,
+ PICTURE_TYPE_I,
+ PICTURE_TYPE_P,
+ PICTURE_TYPE_B,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PictureType {}
+
+ /**
* A key describing the audio session ID of the AudioTrack associated
* to a tunneled video codec.
* The associated value is an integer.
diff --git a/media/java/android/media/MediaMetadataRetriever.java b/media/java/android/media/MediaMetadataRetriever.java
index a15529e99d15..458821ee25f1 100644
--- a/media/java/android/media/MediaMetadataRetriever.java
+++ b/media/java/android/media/MediaMetadataRetriever.java
@@ -928,7 +928,7 @@ public class MediaMetadataRetriever implements AutoCloseable {
private @NonNull List<Bitmap> getFramesAtIndexInternal(
int frameIndex, int numFrames, @Nullable BitmapParams params) {
if (!"yes".equals(extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_VIDEO))) {
- throw new IllegalStateException("Does not contail video or image sequences");
+ throw new IllegalStateException("Does not contain video or image sequences");
}
int frameCount = Integer.parseInt(
extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_FRAME_COUNT));
@@ -1046,7 +1046,7 @@ public class MediaMetadataRetriever implements AutoCloseable {
private Bitmap getImageAtIndexInternal(int imageIndex, @Nullable BitmapParams params) {
if (!"yes".equals(extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_IMAGE))) {
- throw new IllegalStateException("Does not contail still images");
+ throw new IllegalStateException("Does not contain still images");
}
String imageCount = extractMetadata(MediaMetadataRetriever.METADATA_KEY_IMAGE_COUNT);
diff --git a/media/java/android/media/MediaRoute2Info.java b/media/java/android/media/MediaRoute2Info.java
index 7e9d2d809fdd..9c9e83b0987d 100644
--- a/media/java/android/media/MediaRoute2Info.java
+++ b/media/java/android/media/MediaRoute2Info.java
@@ -106,7 +106,7 @@ public final class MediaRoute2Info implements Parcelable {
@IntDef({
TYPE_UNKNOWN, TYPE_BUILTIN_SPEAKER, TYPE_WIRED_HEADSET,
TYPE_WIRED_HEADPHONES, TYPE_BLUETOOTH_A2DP, TYPE_HDMI, TYPE_USB_DEVICE,
- TYPE_USB_ACCESSORY, TYPE_DOCK, TYPE_USB_HEADSET, TYPE_HEARING_AID,
+ TYPE_USB_ACCESSORY, TYPE_DOCK, TYPE_USB_HEADSET, TYPE_HEARING_AID, TYPE_BLE_HEADSET,
TYPE_REMOTE_TV, TYPE_REMOTE_SPEAKER, TYPE_GROUP})
@Retention(RetentionPolicy.SOURCE)
public @interface Type {}
@@ -202,6 +202,14 @@ public final class MediaRoute2Info implements Parcelable {
public static final int TYPE_HEARING_AID = AudioDeviceInfo.TYPE_HEARING_AID;
/**
+ * A route type describing a BLE HEADSET.
+ *
+ * @see #getType
+ * @hide
+ */
+ public static final int TYPE_BLE_HEADSET = AudioDeviceInfo.TYPE_BLE_HEADSET;
+
+ /**
* A route type indicating the presentation of the media is happening on a TV.
*
* @see #getType
diff --git a/media/java/android/media/Ringtone.java b/media/java/android/media/Ringtone.java
index 3cf03417334b..86a94a9e0662 100644
--- a/media/java/android/media/Ringtone.java
+++ b/media/java/android/media/Ringtone.java
@@ -504,49 +504,51 @@ public class Ringtone {
}
private boolean playFallbackRingtone() {
- if (mAudioManager.getStreamVolume(AudioAttributes.toLegacyStreamType(mAudioAttributes))
- != 0) {
- int ringtoneType = RingtoneManager.getDefaultType(mUri);
- if (ringtoneType == -1 ||
- RingtoneManager.getActualDefaultRingtoneUri(mContext, ringtoneType) != null) {
- // Default ringtone, try fallback ringtone.
- try {
- AssetFileDescriptor afd = mContext.getResources().openRawResourceFd(
- com.android.internal.R.raw.fallbackring);
- if (afd != null) {
- mLocalPlayer = new MediaPlayer();
- if (afd.getDeclaredLength() < 0) {
- mLocalPlayer.setDataSource(afd.getFileDescriptor());
- } else {
- mLocalPlayer.setDataSource(afd.getFileDescriptor(),
- afd.getStartOffset(),
- afd.getDeclaredLength());
- }
- mLocalPlayer.setAudioAttributes(mAudioAttributes);
- synchronized (mPlaybackSettingsLock) {
- applyPlaybackProperties_sync();
- }
- if (mVolumeShaperConfig != null) {
- mVolumeShaper = mLocalPlayer.createVolumeShaper(mVolumeShaperConfig);
- }
- mLocalPlayer.prepare();
- startLocalPlayer();
- afd.close();
- return true;
- } else {
- Log.e(TAG, "Could not load fallback ringtone");
- }
- } catch (IOException ioe) {
- destroyLocalPlayer();
- Log.e(TAG, "Failed to open fallback ringtone");
- } catch (NotFoundException nfe) {
- Log.e(TAG, "Fallback ringtone does not exist");
- }
+ int streamType = AudioAttributes.toLegacyStreamType(mAudioAttributes);
+ if (mAudioManager.getStreamVolume(streamType) == 0) {
+ return false;
+ }
+ int ringtoneType = RingtoneManager.getDefaultType(mUri);
+ if (ringtoneType != -1 &&
+ RingtoneManager.getActualDefaultRingtoneUri(mContext, ringtoneType) == null) {
+ Log.w(TAG, "not playing fallback for " + mUri);
+ return false;
+ }
+ // Default ringtone, try fallback ringtone.
+ try {
+ AssetFileDescriptor afd = mContext.getResources().openRawResourceFd(
+ com.android.internal.R.raw.fallbackring);
+ if (afd == null) {
+ Log.e(TAG, "Could not load fallback ringtone");
+ return false;
+ }
+ mLocalPlayer = new MediaPlayer();
+ if (afd.getDeclaredLength() < 0) {
+ mLocalPlayer.setDataSource(afd.getFileDescriptor());
} else {
- Log.w(TAG, "not playing fallback for " + mUri);
+ mLocalPlayer.setDataSource(afd.getFileDescriptor(),
+ afd.getStartOffset(),
+ afd.getDeclaredLength());
}
+ mLocalPlayer.setAudioAttributes(mAudioAttributes);
+ synchronized (mPlaybackSettingsLock) {
+ applyPlaybackProperties_sync();
+ }
+ if (mVolumeShaperConfig != null) {
+ mVolumeShaper = mLocalPlayer.createVolumeShaper(mVolumeShaperConfig);
+ }
+ mLocalPlayer.prepare();
+ startLocalPlayer();
+ afd.close();
+ } catch (IOException ioe) {
+ destroyLocalPlayer();
+ Log.e(TAG, "Failed to open fallback ringtone");
+ return false;
+ } catch (NotFoundException nfe) {
+ Log.e(TAG, "Fallback ringtone does not exist");
+ return false;
}
- return false;
+ return true;
}
void setTitle(String title) {
diff --git a/media/java/android/media/session/MediaController.java b/media/java/android/media/session/MediaController.java
index 1da41fb87b40..33bc84636624 100644
--- a/media/java/android/media/session/MediaController.java
+++ b/media/java/android/media/session/MediaController.java
@@ -407,7 +407,7 @@ public final class MediaController {
/**
* Get the session owner's package name.
*
- * @return The package name of of the session owner.
+ * @return The package name of the session owner.
*/
public String getPackageName() {
if (mPackageName == null) {
diff --git a/media/java/android/media/tv/ITvInputManager.aidl b/media/java/android/media/tv/ITvInputManager.aidl
index d8d1ba1341bb..a0827acbaf4f 100644
--- a/media/java/android/media/tv/ITvInputManager.aidl
+++ b/media/java/android/media/tv/ITvInputManager.aidl
@@ -45,6 +45,8 @@ interface ITvInputManager {
TvInputInfo getTvInputInfo(in String inputId, int userId);
void updateTvInputInfo(in TvInputInfo inputInfo, int userId);
int getTvInputState(in String inputId, int userId);
+ List<String> getAvailableExtensionInterfaceNames(in String inputId, int userId);
+ IBinder getExtensionInterface(in String inputId, in String name, int userId);
List<TvContentRatingSystemInfo> getTvContentRatingSystemList(int userId);
diff --git a/media/java/android/media/tv/ITvInputService.aidl b/media/java/android/media/tv/ITvInputService.aidl
index 8ccf13ae2d72..64a23a2323f6 100755
--- a/media/java/android/media/tv/ITvInputService.aidl
+++ b/media/java/android/media/tv/ITvInputService.aidl
@@ -26,18 +26,21 @@ import android.view.InputChannel;
* Top-level interface to a TV input component (implemented in a Service).
* @hide
*/
-oneway interface ITvInputService {
- void registerCallback(in ITvInputServiceCallback callback);
- void unregisterCallback(in ITvInputServiceCallback callback);
- void createSession(in InputChannel channel, in ITvInputSessionCallback callback,
+interface ITvInputService {
+ oneway void registerCallback(in ITvInputServiceCallback callback);
+ oneway void unregisterCallback(in ITvInputServiceCallback callback);
+ oneway void createSession(in InputChannel channel, in ITvInputSessionCallback callback,
in String inputId, in String sessionId);
- void createRecordingSession(in ITvInputSessionCallback callback, in String inputId,
+ oneway void createRecordingSession(in ITvInputSessionCallback callback, in String inputId,
in String sessionId);
+ List<String> getAvailableExtensionInterfaceNames();
+ IBinder getExtensionInterface(in String name);
+ String getExtensionInterfacePermission(in String name);
// For hardware TvInputService
- void notifyHardwareAdded(in TvInputHardwareInfo hardwareInfo);
- void notifyHardwareRemoved(in TvInputHardwareInfo hardwareInfo);
- void notifyHdmiDeviceAdded(in HdmiDeviceInfo deviceInfo);
- void notifyHdmiDeviceRemoved(in HdmiDeviceInfo deviceInfo);
- void notifyHdmiDeviceUpdated(in HdmiDeviceInfo deviceInfo);
+ oneway void notifyHardwareAdded(in TvInputHardwareInfo hardwareInfo);
+ oneway void notifyHardwareRemoved(in TvInputHardwareInfo hardwareInfo);
+ oneway void notifyHdmiDeviceAdded(in HdmiDeviceInfo deviceInfo);
+ oneway void notifyHdmiDeviceRemoved(in HdmiDeviceInfo deviceInfo);
+ oneway void notifyHdmiDeviceUpdated(in HdmiDeviceInfo deviceInfo);
}
diff --git a/media/java/android/media/tv/OWNERS b/media/java/android/media/tv/OWNERS
index 8bccc9a9db30..fa0429350a25 100644
--- a/media/java/android/media/tv/OWNERS
+++ b/media/java/android/media/tv/OWNERS
@@ -1,5 +1,6 @@
-nchalko@google.com
quxiangfang@google.com
+shubang@google.com
+hgchen@google.com
# For android remote service
per-file ITvRemoteServiceInput.aidl = file:/media/lib/tvremote/OWNERS
diff --git a/media/java/android/media/tv/TvContract.java b/media/java/android/media/tv/TvContract.java
index 30a14c84b72e..9147c123c6f3 100644
--- a/media/java/android/media/tv/TvContract.java
+++ b/media/java/android/media/tv/TvContract.java
@@ -1658,6 +1658,25 @@ public final class TvContract {
*/
String COLUMN_CONTENT_ID = "content_id";
+ /**
+ * The start time of this TV program, in milliseconds since the epoch.
+ *
+ * <p>Should be empty if this program is not live.
+ *
+ * <p>Type: INTEGER (long)
+ * @see #COLUMN_LIVE
+ */
+ String COLUMN_START_TIME_UTC_MILLIS = "start_time_utc_millis";
+
+ /**
+ * The end time of this TV program, in milliseconds since the epoch.
+ *
+ * <p>Should be empty if this program is not live.
+ *
+ * <p>Type: INTEGER (long)
+ * @see #COLUMN_LIVE
+ */
+ String COLUMN_END_TIME_UTC_MILLIS = "end_time_utc_millis";
}
/** Column definitions for the TV channels table. */
@@ -2701,6 +2720,42 @@ public final class TvContract {
*/
public static final String COLUMN_GLOBAL_CONTENT_ID = "global_content_id";
+ /**
+ * The flag indicating whether this TV program is scrambled or not.
+ *
+ * <p>Use the same coding for scrambled in the underlying broadcast standard
+ * if {@code free_ca_mode} in EIT is defined there (e.g. ETSI EN 300 468).
+ *
+ * <p>Type: INTEGER (boolean)
+ */
+ public static final String COLUMN_SCRAMBLED = "scrambled";
+
+ /**
+ * The comma-separated series IDs of this TV program for episodic TV shows.
+ *
+ * <p>This is used to indicate the series IDs.
+ * Programs in the same series share a series ID.
+ * Use this instead of {@link #COLUMN_SERIES_ID} if more than one series IDs
+ * are assigned to the TV program.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_MULTI_SERIES_ID = "multi_series_id";
+
+ /**
+ * The internal ID used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_ID = "internal_provider_id";
+
private Programs() {}
/** Canonical genres for TV programs. */
@@ -3033,6 +3088,32 @@ public final class TvContract {
public static final String COLUMN_RECORDING_EXPIRE_TIME_UTC_MILLIS =
"recording_expire_time_utc_millis";
+ /**
+ * The comma-separated series IDs of this TV program for episodic TV shows.
+ *
+ * <p>This is used to indicate the series IDs.
+ * Programs in the same series share a series ID.
+ * Use this instead of {@link #COLUMN_SERIES_ID} if more than one series IDs
+ * are assigned to the TV program.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_MULTI_SERIES_ID = "multi_series_id";
+
+ /**
+ * The internal ID used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_ID = "internal_provider_id";
+
private RecordedPrograms() {}
}
diff --git a/media/java/android/media/tv/TvInputManager.java b/media/java/android/media/tv/TvInputManager.java
index 34e4609a71f7..420f4ef95cd5 100644
--- a/media/java/android/media/tv/TvInputManager.java
+++ b/media/java/android/media/tv/TvInputManager.java
@@ -18,6 +18,7 @@ package android.media.tv;
import android.annotation.CallbackExecutor;
import android.annotation.IntDef;
+import android.annotation.IntRange;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.annotation.RequiresPermission;
@@ -27,6 +28,8 @@ import android.annotation.TestApi;
import android.content.Context;
import android.content.Intent;
import android.graphics.Rect;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat.Encoding;
import android.media.PlaybackParams;
import android.net.Uri;
import android.os.Binder;
@@ -59,6 +62,7 @@ import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.concurrent.Executor;
/**
@@ -1390,6 +1394,57 @@ public final class TvInputManager {
}
/**
+ * Returns available extension interfaces of a given hardware TV input. This can be used to
+ * provide domain-specific features that are only known between certain hardware TV inputs
+ * and their clients.
+ *
+ * @param inputId The ID of the TV input.
+ * @return a non-null list of extension interface names available to the caller. An empty
+ * list indicates the given TV input is not found, or the given TV input is not a
+ * hardware TV input, or the given TV input doesn't support any extension
+ * interfaces, or the caller doesn't hold the required permission for the extension
+ * interfaces supported by the given TV input.
+ * @see #getExtensionInterface
+ * @hide
+ */
+ @SystemApi
+ @NonNull
+ public List<String> getAvailableExtensionInterfaceNames(@NonNull String inputId) {
+ Preconditions.checkNotNull(inputId);
+ try {
+ return mService.getAvailableExtensionInterfaceNames(inputId, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns an extension interface of a given hardware TV input. This can be used to provide
+ * domain-specific features that are only known between certain hardware TV inputs and
+ * their clients.
+ *
+ * @param inputId The ID of the TV input.
+ * @param name The extension interface name.
+ * @return an {@link IBinder} for the given extension interface, {@code null} if the given TV
+ * input is not found, or if the given TV input is not a hardware TV input, or if the
+ * given TV input doesn't support the given extension interface, or if the caller
+ * doesn't hold the required permission for the given extension interface.
+ * @see #getAvailableExtensionInterfaceNames
+ * @hide
+ */
+ @SystemApi
+ @Nullable
+ public IBinder getExtensionInterface(@NonNull String inputId, @NonNull String name) {
+ Preconditions.checkNotNull(inputId);
+ Preconditions.checkNotNull(name);
+ try {
+ return mService.getExtensionInterface(inputId, name, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
* Registers a {@link TvInputCallback}.
*
* @param callback A callback used to monitor status of the TV inputs.
@@ -2947,6 +3002,16 @@ public final class TvInputManager {
return false;
}
+ /**
+ * Override default audio sink from audio policy.
+ *
+ * @param audioType device type of the audio sink to override with.
+ * @param audioAddress device address of the audio sink to override with.
+ * @param samplingRate desired sampling rate. Use default when it's 0.
+ * @param channelMask desired channel mask. Use default when it's
+ * AudioFormat.CHANNEL_OUT_DEFAULT.
+ * @param format desired format. Use default when it's AudioFormat.ENCODING_DEFAULT.
+ */
public void overrideAudioSink(int audioType, String audioAddress, int samplingRate,
int channelMask, int format) {
try {
@@ -2956,5 +3021,27 @@ public final class TvInputManager {
throw new RuntimeException(e);
}
}
+
+ /**
+ * Override default audio sink from audio policy.
+ *
+ * @param device {@link android.media.AudioDeviceInfo} to use.
+ * @param samplingRate desired sampling rate. Use default when it's 0.
+ * @param channelMask desired channel mask. Use default when it's
+ * AudioFormat.CHANNEL_OUT_DEFAULT.
+ * @param format desired format. Use default when it's AudioFormat.ENCODING_DEFAULT.
+ */
+ public void overrideAudioSink(@NonNull AudioDeviceInfo device,
+ @IntRange(from = 0) int samplingRate,
+ int channelMask, @Encoding int format) {
+ Objects.requireNonNull(device);
+ try {
+ mInterface.overrideAudioSink(
+ AudioDeviceInfo.convertDeviceTypeToInternalDevice(device.getType()),
+ device.getAddress(), samplingRate, channelMask, format);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
}
}
diff --git a/media/java/android/media/tv/TvInputService.java b/media/java/android/media/tv/TvInputService.java
index 77fb2b236698..4a429fb551d7 100755
--- a/media/java/android/media/tv/TvInputService.java
+++ b/media/java/android/media/tv/TvInputService.java
@@ -201,6 +201,21 @@ public abstract class TvInputService extends Service {
}
@Override
+ public List<String> getAvailableExtensionInterfaceNames() {
+ return TvInputService.this.getAvailableExtensionInterfaceNames();
+ }
+
+ @Override
+ public IBinder getExtensionInterface(String name) {
+ return TvInputService.this.getExtensionInterface(name);
+ }
+
+ @Override
+ public String getExtensionInterfacePermission(String name) {
+ return TvInputService.this.getExtensionInterfacePermission(name);
+ }
+
+ @Override
public void notifyHardwareAdded(TvInputHardwareInfo hardwareInfo) {
mServiceHandler.obtainMessage(ServiceHandler.DO_ADD_HARDWARE_INPUT,
hardwareInfo).sendToTarget();
@@ -253,6 +268,67 @@ public abstract class TvInputService extends Service {
}
/**
+ * Returns available extension interfaces. This can be used to provide domain-specific
+ * features that are only known between certain hardware TV inputs and their clients.
+ *
+ * <p>Note that this service-level extension interface mechanism is only for hardware
+ * TV inputs that are bound even when sessions are not created.
+ *
+ * @return a non-null list of available extension interface names. An empty list
+ * indicates the TV input doesn't support any extension interfaces.
+ * @see #getExtensionInterface
+ * @see #getExtensionInterfacePermission
+ * @hide
+ */
+ @NonNull
+ @SystemApi
+ public List<String> getAvailableExtensionInterfaceNames() {
+ return new ArrayList<>();
+ }
+
+ /**
+ * Returns an extension interface. This can be used to provide domain-specific features
+ * that are only known between certain hardware TV inputs and their clients.
+ *
+ * <p>Note that this service-level extension interface mechanism is only for hardware
+ * TV inputs that are bound even when sessions are not created.
+ *
+ * @param name The extension interface name.
+ * @return an {@link IBinder} for the given extension interface, {@code null} if the TV input
+ * doesn't support the given extension interface.
+ * @see #getAvailableExtensionInterfaceNames
+ * @see #getExtensionInterfacePermission
+ * @hide
+ */
+ @Nullable
+ @SystemApi
+ public IBinder getExtensionInterface(@NonNull String name) {
+ return null;
+ }
+
+ /**
+ * Returns a permission for the given extension interface. This can be used to provide
+ * domain-specific features that are only known between certain hardware TV inputs and their
+ * clients.
+ *
+ * <p>Note that this service-level extension interface mechanism is only for hardware
+ * TV inputs that are bound even when sessions are not created.
+ *
+ * @param name The extension interface name.
+ * @return a name of the permission being checked for the given extension interface,
+ * {@code null} if there is no required permission, or if the TV input doesn't
+ * support the given extension interface.
+ * @see #getAvailableExtensionInterfaceNames
+ * @see #getExtensionInterface
+ * @hide
+ */
+ @Nullable
+ @SystemApi
+ public String getExtensionInterfacePermission(@NonNull String name) {
+ return null;
+ }
+
+ /**
* Returns a concrete implementation of {@link Session}.
*
* <p>May return {@code null} if this TV input service fails to create a session for some
diff --git a/media/java/android/media/tv/tunerresourcemanager/OWNER b/media/java/android/media/tv/tunerresourcemanager/OWNER
index 76b84d98046a..0eb1c311695c 100644
--- a/media/java/android/media/tv/tunerresourcemanager/OWNER
+++ b/media/java/android/media/tv/tunerresourcemanager/OWNER
@@ -1,4 +1,3 @@
-amyjojo@google.com
-nchalko@google.com
quxiangfang@google.com
-shubang@google.com \ No newline at end of file
+shubang@google.com
+kemiyagi@google.com \ No newline at end of file
diff --git a/media/java/android/mtp/MtpDatabase.java b/media/java/android/mtp/MtpDatabase.java
index d8f48c2cf0c6..20d711cf4c54 100755
--- a/media/java/android/mtp/MtpDatabase.java
+++ b/media/java/android/mtp/MtpDatabase.java
@@ -103,6 +103,7 @@ public class MtpDatabase implements AutoCloseable {
private int mDeviceType;
private String mHostType;
private boolean mSkipThumbForHost = false;
+ private volatile boolean mHostIsWindows = false;
private MtpServer mServer;
private MtpStorageManager mManager;
@@ -358,7 +359,7 @@ public class MtpDatabase implements AutoCloseable {
}
public void addStorage(StorageVolume storage) {
- MtpStorage mtpStorage = mManager.addMtpStorage(storage);
+ MtpStorage mtpStorage = mManager.addMtpStorage(storage, () -> mHostIsWindows);
mStorageMap.put(storage.getPath(), mtpStorage);
if (mServer != null) {
mServer.addStorage(mtpStorage);
@@ -413,6 +414,7 @@ public class MtpDatabase implements AutoCloseable {
}
mHostType = "";
mSkipThumbForHost = false;
+ mHostIsWindows = false;
}
@VisibleForNative
@@ -736,10 +738,12 @@ public class MtpDatabase implements AutoCloseable {
: MtpConstants.RESPONSE_GENERAL_ERROR);
case MtpConstants.DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO:
mHostType = stringValue;
+ Log.d(TAG, "setDeviceProperty." + Integer.toHexString(property)
+ + "=" + stringValue);
if (stringValue.startsWith("Android/")) {
- Log.d(TAG, "setDeviceProperty." + Integer.toHexString(property)
- + "=" + stringValue);
mSkipThumbForHost = true;
+ } else if (stringValue.startsWith("Windows/")) {
+ mHostIsWindows = true;
}
return MtpConstants.RESPONSE_OK;
}
diff --git a/media/java/android/mtp/MtpStorage.java b/media/java/android/mtp/MtpStorage.java
index 88c32a3ea72b..a3754e90a875 100644
--- a/media/java/android/mtp/MtpStorage.java
+++ b/media/java/android/mtp/MtpStorage.java
@@ -19,6 +19,8 @@ package android.mtp;
import android.compat.annotation.UnsupportedAppUsage;
import android.os.storage.StorageVolume;
+import java.util.function.Supplier;
+
/**
* This class represents a storage unit on an MTP device.
* Used only for MTP support in USB responder mode.
@@ -33,14 +35,16 @@ public class MtpStorage {
private final boolean mRemovable;
private final long mMaxFileSize;
private final String mVolumeName;
+ private final Supplier<Boolean> mIsHostWindows;
- public MtpStorage(StorageVolume volume, int storageId) {
+ public MtpStorage(StorageVolume volume, int storageId, Supplier<Boolean> isHostWindows) {
mStorageId = storageId;
mPath = volume.getPath();
mDescription = volume.getDescription(null);
mRemovable = volume.isRemovable();
mMaxFileSize = volume.getMaxFileSize();
mVolumeName = volume.getMediaStoreVolumeName();
+ mIsHostWindows = isHostWindows;
}
/**
@@ -93,4 +97,13 @@ public class MtpStorage {
public String getVolumeName() {
return mVolumeName;
}
+
+ /**
+ * Returns true if the mtp host of this storage is Windows.
+ *
+ * @return is host Windows
+ */
+ public boolean isHostWindows() {
+ return mIsHostWindows.get();
+ }
}
diff --git a/media/java/android/mtp/MtpStorageManager.java b/media/java/android/mtp/MtpStorageManager.java
index 0bede0dccbed..e9426cf2ce31 100644
--- a/media/java/android/mtp/MtpStorageManager.java
+++ b/media/java/android/mtp/MtpStorageManager.java
@@ -18,7 +18,11 @@ package android.mtp;
import android.media.MediaFile;
import android.os.FileObserver;
+import android.os.SystemProperties;
import android.os.storage.StorageVolume;
+import android.system.ErrnoException;
+import android.system.Os;
+import android.system.StructStat;
import android.util.Log;
import com.android.internal.util.Preconditions;
@@ -35,6 +39,7 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
+import java.util.function.Supplier;
/**
* MtpStorageManager provides functionality for listing, tracking, and notifying MtpServer of
@@ -199,7 +204,38 @@ public class MtpStorageManager {
}
public long getSize() {
- return mIsDir ? 0 : getPath().toFile().length();
+ return mIsDir ? 0 : maybeApplyTranscodeLengthWorkaround(getPath().toFile().length());
+ }
+
+ private long maybeApplyTranscodeLengthWorkaround(long length) {
+ // Windows truncates transferred files to the size advertised in the object property.
+ if (mStorage.isHostWindows() && isTranscodeMtpEnabled() && isFileTranscodeSupported()) {
+ // If the file supports transcoding, we double the returned size to accommodate
+ // the increase in size from transcoding to AVC. This is the same heuristic
+ // applied in the FUSE daemon (MediaProvider).
+ return length * 2;
+ }
+ return length;
+ }
+
+ private boolean isTranscodeMtpEnabled() {
+ return SystemProperties.getBoolean("sys.fuse.transcode_mtp", false);
+ }
+
+ private boolean isFileTranscodeSupported() {
+ // Check if the file supports transcoding by reading the |st_nlinks| struct stat
+ // field. This will be > 1 if the file supports transcoding. The FUSE daemon
+ // sets the field accordingly to enable the MTP stack workaround some Windows OS
+ // MTP client bug where they ignore the size returned as part of getting the MTP
+ // object, see MtpServer#doGetObject.
+ final Path path = getPath();
+ try {
+ StructStat stat = Os.stat(path.toString());
+ return stat.st_nlink > 1;
+ } catch (ErrnoException e) {
+ Log.w(TAG, "Failed to stat path: " + getPath() + ". Ignoring transcoding.");
+ return false;
+ }
}
public Path getPath() {
@@ -420,10 +456,12 @@ public class MtpStorageManager {
* @param volume Storage to add.
* @return the associated MtpStorage
*/
- public synchronized MtpStorage addMtpStorage(StorageVolume volume) {
+ public synchronized MtpStorage addMtpStorage(StorageVolume volume,
+ Supplier<Boolean> isHostWindows) {
int storageId = ((getNextStorageId() & 0x0000FFFF) << 16) + 1;
- MtpStorage storage = new MtpStorage(volume, storageId);
- MtpObject root = new MtpObject(storage.getPath(), storageId, storage, null, true);
+ MtpStorage storage = new MtpStorage(volume, storageId, isHostWindows);
+ MtpObject root = new MtpObject(storage.getPath(), storageId, storage, /* parent= */ null,
+ /* isDir= */ true);
mRoots.put(storageId, root);
return storage;
}
diff --git a/media/jni/Android.bp b/media/jni/Android.bp
index bc73f6ad1ad2..c775b6f7afc2 100644
--- a/media/jni/Android.bp
+++ b/media/jni/Android.bp
@@ -183,8 +183,8 @@ cc_library_shared {
"libmedia",
"libnativehelper",
"libutils",
- "tv_tuner_aidl_interface-ndk_platform",
- "tv_tuner_resource_manager_aidl_interface-ndk_platform",
+ "tv_tuner_aidl_interface-ndk",
+ "tv_tuner_resource_manager_aidl_interface-ndk",
],
static_libs: [
diff --git a/media/jni/OWNERS b/media/jni/OWNERS
index f1b0237d9008..445672b7ca13 100644
--- a/media/jni/OWNERS
+++ b/media/jni/OWNERS
@@ -2,4 +2,4 @@
per-file android_mtp_*.cpp=marcone@google.com,jsharkey@android.com,jameswei@google.com,rmojumder@google.com
# extra for TV related files
-per-file android_media_tv_*=nchalko@google.com,quxiangfang@google.com
+per-file android_media_tv_*=hgchen@google.com,quxiangfang@google.com
diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp
index 2636ab227646..8dcdc989ec8f 100644
--- a/media/jni/android_media_MediaPlayer.cpp
+++ b/media/jni/android_media_MediaPlayer.cpp
@@ -953,7 +953,7 @@ android_media_MediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_t
Parcel* parcel = parcelForJavaObject(env, jAttributionSource);
android::content::AttributionSourceState attributionSource;
attributionSource.readFromParcel(parcel);
- sp<MediaPlayer> mp = new MediaPlayer(attributionSource);
+ sp<MediaPlayer> mp = sp<MediaPlayer>::make(attributionSource);
if (mp == NULL) {
jniThrowException(env, "java/lang/RuntimeException", "Out of memory");
return;
diff --git a/media/jni/android_media_Utils.cpp b/media/jni/android_media_Utils.cpp
index ecd9cc1cd098..39b560b393a9 100644
--- a/media/jni/android_media_Utils.cpp
+++ b/media/jni/android_media_Utils.cpp
@@ -65,6 +65,7 @@ bool isPossiblyYUV(PixelFormat format) {
case HAL_PIXEL_FORMAT_Y8:
case HAL_PIXEL_FORMAT_Y16:
case HAL_PIXEL_FORMAT_RAW16:
+ case HAL_PIXEL_FORMAT_RAW12:
case HAL_PIXEL_FORMAT_RAW10:
case HAL_PIXEL_FORMAT_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_BLOB:
diff --git a/media/jni/audioeffect/android_media_AudioEffect.cpp b/media/jni/audioeffect/android_media_AudioEffect.cpp
index 3a8decdad18f..ada8901861ef 100644
--- a/media/jni/audioeffect/android_media_AudioEffect.cpp
+++ b/media/jni/audioeffect/android_media_AudioEffect.cpp
@@ -14,8 +14,8 @@
* limitations under the License.
*/
-
#include <stdio.h>
+#include <unordered_set>
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioEffects-JNI"
@@ -61,22 +61,15 @@ static fields_t fields;
struct effect_callback_cookie {
jclass audioEffect_class; // AudioEffect class
jobject audioEffect_ref; // AudioEffect object instance
- };
+ bool busy;
+ Condition cond;
+};
// ----------------------------------------------------------------------------
-class AudioEffectJniStorage {
- public:
- effect_callback_cookie mCallbackData;
-
- AudioEffectJniStorage() {
- }
-
- ~AudioEffectJniStorage() {
- }
-
+struct AudioEffectJniStorage {
+ effect_callback_cookie mCallbackData{};
};
-
jint AudioEffectJni::translateNativeErrorToJava(int code) {
switch(code) {
case NO_ERROR:
@@ -104,6 +97,7 @@ jint AudioEffectJni::translateNativeErrorToJava(int code) {
}
static Mutex sLock;
+static std::unordered_set<effect_callback_cookie*> sAudioEffectCallBackCookies;
// ----------------------------------------------------------------------------
static void effectCallback(int event, void* user, void *info) {
@@ -124,7 +118,13 @@ static void effectCallback(int event, void* user, void *info) {
ALOGW("effectCallback error user %p, env %p", user, env);
return;
}
-
+ {
+ Mutex::Autolock l(sLock);
+ if (sAudioEffectCallBackCookies.count(callbackInfo) == 0) {
+ return;
+ }
+ callbackInfo->busy = true;
+ }
ALOGV("effectCallback: callbackInfo %p, audioEffect_ref %p audioEffect_class %p",
callbackInfo,
callbackInfo->audioEffect_ref,
@@ -191,6 +191,11 @@ effectCallback_Exit:
env->ExceptionDescribe();
env->ExceptionClear();
}
+ {
+ Mutex::Autolock l(sLock);
+ callbackInfo->busy = false;
+ callbackInfo->cond.broadcast();
+ }
}
// ----------------------------------------------------------------------------
@@ -401,6 +406,10 @@ android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_t
setAudioEffect(env, thiz, lpAudioEffect);
}
+ {
+ Mutex::Autolock l(sLock);
+ sAudioEffectCallBackCookies.insert(&lpJniStorage->mCallbackData);
+ }
env->SetLongField(thiz, fields.fidJniData, (jlong)lpJniStorage);
return (jint) AUDIOEFFECT_SUCCESS;
@@ -432,6 +441,7 @@ setup_failure:
// ----------------------------------------------------------------------------
+#define CALLBACK_COND_WAIT_TIMEOUT_MS 1000
static void android_media_AudioEffect_native_release(JNIEnv *env, jobject thiz) {
sp<AudioEffect> lpAudioEffect = setAudioEffect(env, thiz, 0);
if (lpAudioEffect == 0) {
@@ -447,7 +457,17 @@ static void android_media_AudioEffect_native_release(JNIEnv *env, jobject thiz)
env->SetLongField(thiz, fields.fidJniData, 0);
if (lpJniStorage) {
- ALOGV("deleting pJniStorage: %p\n", lpJniStorage);
+ Mutex::Autolock l(sLock);
+ effect_callback_cookie *lpCookie = &lpJniStorage->mCallbackData;
+ ALOGV("deleting lpJniStorage: %p\n", lpJniStorage);
+ sAudioEffectCallBackCookies.erase(lpCookie);
+ while (lpCookie->busy) {
+ if (lpCookie->cond.waitRelative(sLock,
+ milliseconds(CALLBACK_COND_WAIT_TIMEOUT_MS)) !=
+ NO_ERROR) {
+ break;
+ }
+ }
env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioEffect_class);
env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioEffect_ref);
delete lpJniStorage;
diff --git a/media/jni/audioeffect/android_media_Visualizer.cpp b/media/jni/audioeffect/android_media_Visualizer.cpp
index b30f00fdf7de..7b00f9392395 100644
--- a/media/jni/audioeffect/android_media_Visualizer.cpp
+++ b/media/jni/audioeffect/android_media_Visualizer.cpp
@@ -15,6 +15,7 @@
*/
#include <stdio.h>
+#include <unordered_set>
//#define LOG_NDEBUG 0
#define LOG_TAG "visualizers-JNI"
@@ -68,6 +69,12 @@ struct visualizer_callback_cookie {
jclass visualizer_class; // Visualizer class
jobject visualizer_ref; // Visualizer object instance
+ // 'busy_count' and 'cond' together with 'sLock' are used to serialize
+ // concurrent access to the callback cookie from 'setup'/'release'
+ // and the callback.
+ int busy_count;
+ Condition cond;
+
// Lazily allocated arrays used to hold callback data provided to java
// applications. These arrays are allocated during the first callback and
// reallocated when the size of the callback data changes. Allocating on
@@ -75,14 +82,12 @@ struct visualizer_callback_cookie {
// reference to the provided data (they need to make a copy if they want to
// hold onto outside of the callback scope), but it avoids GC thrash caused
// by constantly allocating and releasing arrays to hold callback data.
+ // 'callback_data_lock' must never be held at the same time with 'sLock'.
Mutex callback_data_lock;
jbyteArray waveform_data;
jbyteArray fft_data;
- visualizer_callback_cookie() {
- waveform_data = NULL;
- fft_data = NULL;
- }
+ // Assumes use of default initialization by the client.
~visualizer_callback_cookie() {
cleanupBuffers();
@@ -107,15 +112,8 @@ struct visualizer_callback_cookie {
};
// ----------------------------------------------------------------------------
-class VisualizerJniStorage {
- public:
- visualizer_callback_cookie mCallbackData;
-
- VisualizerJniStorage() {
- }
-
- ~VisualizerJniStorage() {
- }
+struct VisualizerJniStorage {
+ visualizer_callback_cookie mCallbackData{};
};
@@ -141,6 +139,7 @@ static jint translateError(int code) {
}
static Mutex sLock;
+static std::unordered_set<visualizer_callback_cookie*> sVisualizerCallBackCookies;
// ----------------------------------------------------------------------------
static void ensureArraySize(JNIEnv *env, jbyteArray *array, uint32_t size) {
@@ -178,11 +177,19 @@ static void captureCallback(void* user,
return;
}
+ {
+ Mutex::Autolock l(sLock);
+ if (sVisualizerCallBackCookies.count(callbackInfo) == 0) {
+ return;
+ }
+ callbackInfo->busy_count++;
+ }
ALOGV("captureCallback: callbackInfo %p, visualizer_ref %p visualizer_class %p",
callbackInfo,
callbackInfo->visualizer_ref,
callbackInfo->visualizer_class);
+ {
AutoMutex lock(&callbackInfo->callback_data_lock);
if (waveformSize != 0 && waveform != NULL) {
@@ -224,11 +231,17 @@ static void captureCallback(void* user,
jArray);
}
}
+ } // callback_data_lock scope
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
env->ExceptionClear();
}
+ {
+ Mutex::Autolock l(sLock);
+ callbackInfo->busy_count--;
+ callbackInfo->cond.broadcast();
+ }
}
// ----------------------------------------------------------------------------
@@ -337,16 +350,41 @@ static void android_media_visualizer_effect_callback(int32_t event,
void *info) {
if ((event == AudioEffect::EVENT_ERROR) &&
(*((status_t*)info) == DEAD_OBJECT)) {
- VisualizerJniStorage* lpJniStorage = (VisualizerJniStorage*)user;
- visualizer_callback_cookie* callbackInfo = &lpJniStorage->mCallbackData;
+ visualizer_callback_cookie* callbackInfo =
+ (visualizer_callback_cookie *)user;
JNIEnv *env = AndroidRuntime::getJNIEnv();
+ if (!user || !env) {
+ ALOGW("effectCallback error user %p, env %p", user, env);
+ return;
+ }
+ {
+ Mutex::Autolock l(sLock);
+ if (sVisualizerCallBackCookies.count(callbackInfo) == 0) {
+ return;
+ }
+ callbackInfo->busy_count++;
+ }
+ ALOGV("effectCallback: callbackInfo %p, visualizer_ref %p visualizer_class %p",
+ callbackInfo,
+ callbackInfo->visualizer_ref,
+ callbackInfo->visualizer_class);
+
env->CallStaticVoidMethod(
callbackInfo->visualizer_class,
fields.midPostNativeEvent,
callbackInfo->visualizer_ref,
NATIVE_EVENT_SERVER_DIED,
0, NULL);
+ if (env->ExceptionCheck()) {
+ env->ExceptionDescribe();
+ env->ExceptionClear();
+ }
+ {
+ Mutex::Autolock l(sLock);
+ callbackInfo->busy_count--;
+ callbackInfo->cond.broadcast();
+ }
}
}
@@ -396,7 +434,7 @@ android_media_visualizer_native_setup(JNIEnv *env, jobject thiz, jobject weak_th
}
lpVisualizer->set(0,
android_media_visualizer_effect_callback,
- lpJniStorage,
+ &lpJniStorage->mCallbackData,
(audio_session_t) sessionId);
lStatus = translateError(lpVisualizer->initCheck());
@@ -417,6 +455,10 @@ android_media_visualizer_native_setup(JNIEnv *env, jobject thiz, jobject weak_th
setVisualizer(env, thiz, lpVisualizer);
+ {
+ Mutex::Autolock l(sLock);
+ sVisualizerCallBackCookies.insert(&lpJniStorage->mCallbackData);
+ }
env->SetLongField(thiz, fields.fidJniData, (jlong)lpJniStorage);
return VISUALIZER_SUCCESS;
@@ -439,13 +481,15 @@ setup_failure:
}
// ----------------------------------------------------------------------------
+#define CALLBACK_COND_WAIT_TIMEOUT_MS 1000
static void android_media_visualizer_native_release(JNIEnv *env, jobject thiz) {
- { //limit scope so that lpVisualizer is deleted before JNI storage data.
+ {
sp<Visualizer> lpVisualizer = setVisualizer(env, thiz, 0);
if (lpVisualizer == 0) {
return;
}
lpVisualizer->release();
+ // Visualizer can still can be held by AudioEffect::EffectClient
}
// delete the JNI data
VisualizerJniStorage* lpJniStorage =
@@ -456,9 +500,22 @@ static void android_media_visualizer_native_release(JNIEnv *env, jobject thiz)
env->SetLongField(thiz, fields.fidJniData, 0);
if (lpJniStorage) {
+ {
+ Mutex::Autolock l(sLock);
+ visualizer_callback_cookie *lpCookie = &lpJniStorage->mCallbackData;
ALOGV("deleting pJniStorage: %p\n", lpJniStorage);
+ sVisualizerCallBackCookies.erase(lpCookie);
+ while (lpCookie->busy_count > 0) {
+ if (lpCookie->cond.waitRelative(sLock,
+ milliseconds(CALLBACK_COND_WAIT_TIMEOUT_MS)) !=
+ NO_ERROR) {
+ break;
+ }
+ }
+ ALOG_ASSERT(lpCookie->busy_count == 0, "Unbalanced busy_count inc/dec");
env->DeleteGlobalRef(lpJniStorage->mCallbackData.visualizer_class);
env->DeleteGlobalRef(lpJniStorage->mCallbackData.visualizer_ref);
+ } // sLock scope
delete lpJniStorage;
}
}
@@ -714,4 +771,3 @@ int register_android_media_visualizer(JNIEnv *env)
{
return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods));
}
-
diff --git a/media/jni/soundpool/SoundPool.cpp b/media/jni/soundpool/SoundPool.cpp
index 253b4e3a8a09..00c4a9782f33 100644
--- a/media/jni/soundpool/SoundPool.cpp
+++ b/media/jni/soundpool/SoundPool.cpp
@@ -185,7 +185,7 @@ void SoundPool::stop(int32_t streamID)
auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
soundpool::Stream* stream = mStreamManager.findStream(streamID);
if (stream != nullptr && stream->requestStop(streamID)) {
- mStreamManager.moveToRestartQueue(stream);
+ mStreamManager.moveToRestartQueue(stream, streamID);
}
}
diff --git a/media/jni/soundpool/Stream.cpp b/media/jni/soundpool/Stream.cpp
index bbbef3853b23..50bb79ccaa0b 100644
--- a/media/jni/soundpool/Stream.cpp
+++ b/media/jni/soundpool/Stream.cpp
@@ -228,10 +228,9 @@ Stream* Stream::getPairStream() const
return mStreamManager->getPairStream(this);
}
-Stream* Stream::playPairStream() {
+Stream* Stream::playPairStream(std::vector<std::any>& garbage) {
Stream* pairStream = getPairStream();
LOG_ALWAYS_FATAL_IF(pairStream == nullptr, "No pair stream!");
- sp<AudioTrack> releaseTracks[2];
{
ALOGV("%s: track streamID: %d", __func__, (int)getStreamID());
// TODO: Do we really want to force a simultaneous synchronization between
@@ -260,7 +259,7 @@ Stream* Stream::playPairStream() {
const int pairState = pairStream->mState;
pairStream->play_l(pairStream->mSound, pairStream->mStreamID,
pairStream->mLeftVolume, pairStream->mRightVolume, pairStream->mPriority,
- pairStream->mLoop, pairStream->mRate, releaseTracks);
+ pairStream->mLoop, pairStream->mRate, garbage);
if (pairStream->mState == IDLE) {
return nullptr; // AudioTrack error
}
@@ -269,122 +268,111 @@ Stream* Stream::playPairStream() {
pairStream->mAudioTrack->pause();
}
}
- // release tracks outside of Stream lock
return pairStream;
}
void Stream::play_l(const std::shared_ptr<Sound>& sound, int32_t nextStreamID,
float leftVolume, float rightVolume, int32_t priority, int32_t loop, float rate,
- sp<AudioTrack> releaseTracks[2])
+ std::vector<std::any>& garbage)
{
- // These tracks are released without the lock.
- sp<AudioTrack> &oldTrack = releaseTracks[0];
- sp<AudioTrack> &newTrack = releaseTracks[1];
- status_t status = NO_ERROR;
-
- {
- ALOGV("%s(%p)(soundID=%d, streamID=%d, leftVolume=%f, rightVolume=%f,"
- " priority=%d, loop=%d, rate=%f)",
- __func__, this, sound->getSoundID(), nextStreamID, leftVolume, rightVolume,
- priority, loop, rate);
-
- // initialize track
- const audio_stream_type_t streamType =
- AudioSystem::attributesToStreamType(*mStreamManager->getAttributes());
- const int32_t channelCount = sound->getChannelCount();
- const auto sampleRate = (uint32_t)lround(double(sound->getSampleRate()) * rate);
- size_t frameCount = 0;
-
- if (loop) {
- const audio_format_t format = sound->getFormat();
- const size_t frameSize = audio_is_linear_pcm(format)
- ? channelCount * audio_bytes_per_sample(format) : 1;
- frameCount = sound->getSizeInBytes() / frameSize;
- }
+ ALOGV("%s(%p)(soundID=%d, streamID=%d, leftVolume=%f, rightVolume=%f,"
+ " priority=%d, loop=%d, rate=%f)",
+ __func__, this, sound->getSoundID(), nextStreamID, leftVolume, rightVolume,
+ priority, loop, rate);
+
+ // initialize track
+ const audio_stream_type_t streamType =
+ AudioSystem::attributesToStreamType(*mStreamManager->getAttributes());
+ const int32_t channelCount = sound->getChannelCount();
+ const auto sampleRate = (uint32_t)lround(double(sound->getSampleRate()) * rate);
+ size_t frameCount = 0;
+
+ if (loop) {
+ const audio_format_t format = sound->getFormat();
+ const size_t frameSize = audio_is_linear_pcm(format)
+ ? channelCount * audio_bytes_per_sample(format) : 1;
+ frameCount = sound->getSizeInBytes() / frameSize;
+ }
- // check if the existing track has the same sound id.
- if (mAudioTrack != nullptr && mSoundID == sound->getSoundID()) {
+ if (mAudioTrack != nullptr) {
+ if (mSoundID == sound->getSoundID()
+ && mAudioTrack->setSampleRate(sampleRate) == NO_ERROR) {
+ // Reuse the old track if the soundID matches.
// the sample rate may fail to change if the audio track is a fast track.
- if (mAudioTrack->setSampleRate(sampleRate) == NO_ERROR) {
- newTrack = mAudioTrack;
- ALOGV("%s: reusing track %p for sound %d",
- __func__, mAudioTrack.get(), sound->getSoundID());
- }
- }
- if (newTrack == nullptr) {
- // mToggle toggles each time a track is started on a given stream.
- // The toggle is concatenated with the Stream address and passed to AudioTrack
- // as callback user data. This enables the detection of callbacks received from the old
- // audio track while the new one is being started and avoids processing them with
- // wrong audio audio buffer size (mAudioBufferSize)
- auto toggle = mToggle ^ 1;
- // NOLINTNEXTLINE(performance-no-int-to-ptr)
- void* userData = reinterpret_cast<void*>((uintptr_t)this | toggle);
- audio_channel_mask_t soundChannelMask = sound->getChannelMask();
- // When sound contains a valid channel mask, use it as is.
- // Otherwise, use stream count to calculate channel mask.
- audio_channel_mask_t channelMask = soundChannelMask != AUDIO_CHANNEL_NONE
- ? soundChannelMask : audio_channel_out_mask_from_count(channelCount);
-
- // do not create a new audio track if current track is compatible with sound parameters
-
- android::content::AttributionSourceState attributionSource;
- attributionSource.packageName = mStreamManager->getOpPackageName();
- attributionSource.token = sp<BBinder>::make();
- // TODO b/182469354 make consistent with AudioRecord, add util for native source
- newTrack = new AudioTrack(streamType, sampleRate, sound->getFormat(),
- channelMask, sound->getIMemory(), AUDIO_OUTPUT_FLAG_FAST,
- staticCallback, userData,
- 0 /*default notification frames*/, AUDIO_SESSION_ALLOCATE,
- AudioTrack::TRANSFER_DEFAULT,
- nullptr /*offloadInfo*/, attributionSource,
- mStreamManager->getAttributes(),
- false /*doNotReconnect*/, 1.0f /*maxRequiredSpeed*/);
- // Set caller name so it can be logged in destructor.
- // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_SOUNDPOOL
- newTrack->setCallerName("soundpool");
- oldTrack = mAudioTrack;
- status = newTrack->initCheck();
- if (status != NO_ERROR) {
- ALOGE("%s: error creating AudioTrack", __func__);
- // newTrack goes out of scope, so reference count drops to zero
- goto exit;
- }
- // From now on, AudioTrack callbacks received with previous toggle value will be ignored.
- mToggle = toggle;
- mAudioTrack = newTrack;
- ALOGV("%s: using new track %p for sound %d",
- __func__, newTrack.get(), sound->getSoundID());
- }
- if (mMuted) {
- newTrack->setVolume(0.0f, 0.0f);
+ ALOGV("%s: reusing track %p for sound %d",
+ __func__, mAudioTrack.get(), sound->getSoundID());
} else {
- newTrack->setVolume(leftVolume, rightVolume);
+ // If reuse not possible, move mAudioTrack to garbage, set to nullptr.
+ garbage.emplace_back(std::move(mAudioTrack));
+ mAudioTrack.clear(); // move should have cleared the sp<>, but we clear just in case.
}
- newTrack->setLoop(0, frameCount, loop);
- mAudioTrack->start();
- mSound = sound;
- mSoundID = sound->getSoundID();
- mPriority = priority;
- mLoop = loop;
- mLeftVolume = leftVolume;
- mRightVolume = rightVolume;
- mRate = rate;
- mState = PLAYING;
- mStopTimeNs = 0;
- mStreamID = nextStreamID; // prefer this to be the last, as it is an atomic sync point
}
-
-exit:
- ALOGV("%s: delete oldTrack %p", __func__, oldTrack.get());
- if (status != NO_ERROR) {
- // TODO: should we consider keeping the soundID if the old track is OK?
- // Do not attempt to restart this track (should we remove the stream id?)
- mState = IDLE;
- mSoundID = 0;
- mSound.reset();
- mAudioTrack.clear(); // actual release from releaseTracks[]
+ if (mAudioTrack == nullptr) {
+ // mToggle toggles each time a track is started on a given stream.
+ // The toggle is concatenated with the Stream address and passed to AudioTrack
+ // as callback user data. This enables the detection of callbacks received from the old
+ // audio track while the new one is being started and avoids processing them with
+ // wrong audio audio buffer size (mAudioBufferSize)
+ auto toggle = mToggle ^ 1;
+ // NOLINTNEXTLINE(performance-no-int-to-ptr)
+ void* userData = reinterpret_cast<void*>((uintptr_t)this | toggle);
+ audio_channel_mask_t soundChannelMask = sound->getChannelMask();
+ // When sound contains a valid channel mask, use it as is.
+ // Otherwise, use stream count to calculate channel mask.
+ audio_channel_mask_t channelMask = soundChannelMask != AUDIO_CHANNEL_NONE
+ ? soundChannelMask : audio_channel_out_mask_from_count(channelCount);
+
+ // do not create a new audio track if current track is compatible with sound parameters
+
+ android::content::AttributionSourceState attributionSource;
+ attributionSource.packageName = mStreamManager->getOpPackageName();
+ attributionSource.token = sp<BBinder>::make();
+ // TODO b/182469354 make consistent with AudioRecord, add util for native source
+ mAudioTrack = new AudioTrack(streamType, sampleRate, sound->getFormat(),
+ channelMask, sound->getIMemory(), AUDIO_OUTPUT_FLAG_FAST,
+ staticCallback, userData,
+ 0 /*default notification frames*/, AUDIO_SESSION_ALLOCATE,
+ AudioTrack::TRANSFER_DEFAULT,
+ nullptr /*offloadInfo*/, attributionSource,
+ mStreamManager->getAttributes(),
+ false /*doNotReconnect*/, 1.0f /*maxRequiredSpeed*/);
+ // Set caller name so it can be logged in destructor.
+ // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_SOUNDPOOL
+ mAudioTrack->setCallerName("soundpool");
+
+ if (status_t status = mAudioTrack->initCheck();
+ status != NO_ERROR) {
+ ALOGE("%s: error %d creating AudioTrack", __func__, status);
+ // TODO: should we consider keeping the soundID and reusing the old track?
+ mState = IDLE;
+ mSoundID = 0;
+ mSound.reset();
+ garbage.emplace_back(std::move(mAudioTrack)); // remove mAudioTrack.
+ mAudioTrack.clear(); // move should have cleared the sp<>, but we clear just in case.
+ return;
+ }
+ // From now on, AudioTrack callbacks received with previous toggle value will be ignored.
+ mToggle = toggle;
+ ALOGV("%s: using new track %p for sound %d",
+ __func__, mAudioTrack.get(), sound->getSoundID());
+ }
+ if (mMuted) {
+ mAudioTrack->setVolume(0.f, 0.f);
+ } else {
+ mAudioTrack->setVolume(leftVolume, rightVolume);
}
+ mAudioTrack->setLoop(0, frameCount, loop);
+ mAudioTrack->start();
+ mSound = sound;
+ mSoundID = sound->getSoundID();
+ mPriority = priority;
+ mLoop = loop;
+ mLeftVolume = leftVolume;
+ mRightVolume = rightVolume;
+ mRate = rate;
+ mState = PLAYING;
+ mStopTimeNs = 0;
+ mStreamID = nextStreamID; // prefer this to be the last, as it is an atomic sync point
}
/* static */
diff --git a/media/jni/soundpool/Stream.h b/media/jni/soundpool/Stream.h
index d4e5c9fe7f8a..aa0eef5bc66e 100644
--- a/media/jni/soundpool/Stream.h
+++ b/media/jni/soundpool/Stream.h
@@ -18,6 +18,7 @@
#include "Sound.h"
+#include <any>
#include <android-base/thread_annotations.h>
#include <audio_utils/clock.h>
#include <media/AudioTrack.h>
@@ -90,8 +91,9 @@ public:
void mute(bool muting);
void dump() const NO_THREAD_SAFETY_ANALYSIS; // disable for ALOGV (see func for details).
- // returns the pair stream if successful, nullptr otherwise
- Stream* playPairStream();
+ // returns the pair stream if successful, nullptr otherwise.
+ // garbage is used to release tracks and data outside of any lock.
+ Stream* playPairStream(std::vector<std::any>& garbage);
// These parameters are explicitly checked in the SoundPool class
// so never deviate from the Java API specified values.
@@ -123,9 +125,10 @@ public:
Stream* getPairStream() const;
private:
+ // garbage is used to release tracks and data outside of any lock.
void play_l(const std::shared_ptr<Sound>& sound, int streamID,
float leftVolume, float rightVolume, int priority, int loop, float rate,
- sp<AudioTrack> releaseTracks[2]) REQUIRES(mLock);
+ std::vector<std::any>& garbage) REQUIRES(mLock);
void stop_l() REQUIRES(mLock);
void setVolume_l(float leftVolume, float rightVolume) REQUIRES(mLock);
diff --git a/media/jni/soundpool/StreamManager.cpp b/media/jni/soundpool/StreamManager.cpp
index 309d71cfd062..7f987e31d1d8 100644
--- a/media/jni/soundpool/StreamManager.cpp
+++ b/media/jni/soundpool/StreamManager.cpp
@@ -157,6 +157,7 @@ int32_t StreamManager::queueForPlay(const std::shared_ptr<Sound> &sound,
__func__, sound.get(), soundID, leftVolume, rightVolume, priority, loop, rate);
bool launchThread = false;
int32_t streamID = 0;
+ std::vector<std::any> garbage;
{ // for lock
std::unique_lock lock(mStreamManagerLock);
@@ -243,7 +244,7 @@ int32_t StreamManager::queueForPlay(const std::shared_ptr<Sound> &sound,
removeFromQueues_l(newStream);
mProcessingStreams.emplace(newStream);
lock.unlock();
- if (Stream* nextStream = newStream->playPairStream()) {
+ if (Stream* nextStream = newStream->playPairStream(garbage)) {
lock.lock();
ALOGV("%s: starting streamID:%d", __func__, nextStream->getStreamID());
addToActiveQueue_l(nextStream);
@@ -266,6 +267,7 @@ int32_t StreamManager::queueForPlay(const std::shared_ptr<Sound> &sound,
ALOGV_IF(id != 0, "%s: launched thread %d", __func__, id);
}
ALOGV("%s: returning %d", __func__, streamID);
+ // garbage is cleared here outside mStreamManagerLock.
return streamID;
}
@@ -359,6 +361,7 @@ void StreamManager::run(int32_t id)
{
ALOGV("%s(%d) entering", __func__, id);
int64_t waitTimeNs = 0; // on thread start, mRestartStreams can be non-empty.
+ std::vector<std::any> garbage; // used for garbage collection
std::unique_lock lock(mStreamManagerLock);
while (!mQuit) {
if (waitTimeNs > 0) {
@@ -388,7 +391,7 @@ void StreamManager::run(int32_t id)
if (!mLockStreamManagerStop) lock.unlock();
stream->stop();
ALOGV("%s(%d) stopping streamID:%d", __func__, id, stream->getStreamID());
- if (Stream* nextStream = stream->playPairStream()) {
+ if (Stream* nextStream = stream->playPairStream(garbage)) {
ALOGV("%s(%d) starting streamID:%d", __func__, id, nextStream->getStreamID());
if (!mLockStreamManagerStop) lock.lock();
if (nextStream->getStopTimeNs() > 0) {
@@ -405,6 +408,12 @@ void StreamManager::run(int32_t id)
}
mProcessingStreams.erase(stream);
sanityCheckQueue_l();
+ if (!garbage.empty()) {
+ lock.unlock();
+ // garbage audio tracks (etc) are cleared here outside mStreamManagerLock.
+ garbage.clear();
+ lock.lock();
+ }
}
}
ALOGV("%s(%d) exiting", __func__, id);
diff --git a/media/native/midi/amidi.cpp b/media/native/midi/amidi.cpp
index 923377c8180e..f90796e415c0 100644
--- a/media/native/midi/amidi.cpp
+++ b/media/native/midi/amidi.cpp
@@ -325,8 +325,8 @@ public:
}
uint8_t readBuffer[AMIDI_PACKET_SIZE];
- ssize_t readCount = read(mPort->ufd, readBuffer, sizeof(readBuffer));
- if (readCount == EINTR || readCount < 1) {
+ ssize_t readCount = TEMP_FAILURE_RETRY(read(mPort->ufd, readBuffer, sizeof(readBuffer)));
+ if (readCount < 1) {
return AMEDIA_ERROR_UNKNOWN;
}
@@ -407,7 +407,8 @@ ssize_t AMIDI_API AMidiInputPort_sendWithTimestamp(const AMidiInputPort *inputPo
ssize_t numTransferBytes =
AMIDI_makeSendBuffer(writeBuffer, data + numSent, blockSize, timestamp);
- ssize_t numWritten = write(((AMIDI_Port*)inputPort)->ufd, writeBuffer, numTransferBytes);
+ ssize_t numWritten = TEMP_FAILURE_RETRY(write(((AMIDI_Port*)inputPort)->ufd, writeBuffer,
+ numTransferBytes));
if (numWritten < 0) {
break; // error so bail out.
}
@@ -430,7 +431,8 @@ media_status_t AMIDI_API AMidiInputPort_sendFlush(const AMidiInputPort *inputPor
uint8_t opCode = AMIDI_OPCODE_FLUSH;
ssize_t numTransferBytes = 1;
- ssize_t numWritten = write(((AMIDI_Port*)inputPort)->ufd, &opCode, numTransferBytes);
+ ssize_t numWritten = TEMP_FAILURE_RETRY(write(((AMIDI_Port*)inputPort)->ufd, &opCode,
+ numTransferBytes));
if (numWritten < numTransferBytes) {
ALOGE("AMidiInputPort_flush Couldn't write MIDI flush. requested:%zd, written:%zd",
diff --git a/media/tests/EffectsTest/AndroidManifest.xml b/media/tests/EffectsTest/AndroidManifest.xml
index 27c903bf8c67..5916f132a424 100644
--- a/media/tests/EffectsTest/AndroidManifest.xml
+++ b/media/tests/EffectsTest/AndroidManifest.xml
@@ -13,6 +13,10 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
+<!--
+Make sure to enable access to the mic in settings and run:
+adb shell am compat enable ALLOW_TEST_API_ACCESS com.android.effectstest
+-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.android.effectstest">
diff --git a/media/tests/EffectsTest/res/layout/bassboosttest.xml b/media/tests/EffectsTest/res/layout/bassboosttest.xml
index ac912c84d107..5f9132cb8cc7 100644
--- a/media/tests/EffectsTest/res/layout/bassboosttest.xml
+++ b/media/tests/EffectsTest/res/layout/bassboosttest.xml
@@ -187,6 +187,11 @@
android:layout_height="wrap_content"
android:scaleType="fitXY"/>
+ <Button android:id="@+id/hammer_on_release_bug"
+ android:layout_width="fill_parent" android:layout_height="wrap_content"
+ android:text="@string/hammer_on_release_bug_name">
+ </Button>
+
</LinearLayout>
</ScrollView>
diff --git a/media/tests/EffectsTest/res/layout/visualizertest.xml b/media/tests/EffectsTest/res/layout/visualizertest.xml
index 18d7a3648fbf..85dabbc115f3 100644
--- a/media/tests/EffectsTest/res/layout/visualizertest.xml
+++ b/media/tests/EffectsTest/res/layout/visualizertest.xml
@@ -175,6 +175,11 @@
</LinearLayout>
+ <Button android:id="@+id/hammer_on_release_bug"
+ android:layout_width="fill_parent" android:layout_height="wrap_content"
+ android:text="@string/hammer_on_release_bug_name">
+ </Button>
+
<ImageView
android:src="@android:drawable/divider_horizontal_dark"
android:layout_width="fill_parent"
diff --git a/media/tests/EffectsTest/res/values/strings.xml b/media/tests/EffectsTest/res/values/strings.xml
index 7c12da1274e3..a44c7e93382a 100644
--- a/media/tests/EffectsTest/res/values/strings.xml
+++ b/media/tests/EffectsTest/res/values/strings.xml
@@ -37,4 +37,6 @@
<string name="send_level_name">Send Level</string>
<!-- Toggles use of a multi-threaded client for an effect [CHAR LIMIT=24] -->
<string name="effect_multithreaded">Multithreaded Use</string>
+ <!-- Runs a stress test for a bug related to simultaneous release of multiple effect instances [CHAR LIMIT=24] -->
+ <string name="hammer_on_release_bug_name">Hammer on release()</string>
</resources>
diff --git a/media/tests/EffectsTest/src/com/android/effectstest/BassBoostTest.java b/media/tests/EffectsTest/src/com/android/effectstest/BassBoostTest.java
index cce2acc5869a..a207bf1d5359 100644
--- a/media/tests/EffectsTest/src/com/android/effectstest/BassBoostTest.java
+++ b/media/tests/EffectsTest/src/com/android/effectstest/BassBoostTest.java
@@ -17,29 +17,24 @@
package com.android.effectstest;
import android.app.Activity;
-import android.content.Context;
-import android.content.Intent;
+import android.media.audiofx.AudioEffect;
+import android.media.audiofx.BassBoost;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
-import android.view.Menu;
-import android.view.View.OnClickListener;
import android.view.View;
-import android.view.ViewGroup;
+import android.view.View.OnClickListener;
import android.widget.Button;
-import android.widget.TextView;
+import android.widget.CompoundButton;
+import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.EditText;
import android.widget.SeekBar;
+import android.widget.TextView;
import android.widget.ToggleButton;
-import android.widget.CompoundButton;
-import android.widget.CompoundButton.OnCheckedChangeListener;
-import java.nio.ByteOrder;
+
import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
import java.util.HashMap;
-import java.util.Map;
-
-import android.media.audiofx.BassBoost;
-import android.media.audiofx.AudioEffect;
public class BassBoostTest extends Activity implements OnCheckedChangeListener {
@@ -78,6 +73,9 @@ public class BassBoostTest extends Activity implements OnCheckedChangeListener {
mReleaseButton = (ToggleButton)findViewById(R.id.bbReleaseButton);
mOnOffButton = (ToggleButton)findViewById(R.id.bassboostOnOff);
+ final Button hammerReleaseTest = (Button) findViewById(R.id.hammer_on_release_bug);
+ hammerReleaseTest.setEnabled(false);
+
getEffect(sSession);
if (mBassBoost != null) {
@@ -93,6 +91,14 @@ public class BassBoostTest extends Activity implements OnCheckedChangeListener {
mStrength = new BassBoostParam(mBassBoost, 0, 1000, seekBar, textView);
seekBar.setOnSeekBarChangeListener(mStrength);
mStrength.setEnabled(mBassBoost.getStrengthSupported());
+
+ hammerReleaseTest.setEnabled(true);
+ hammerReleaseTest.setOnClickListener(new OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ runHammerReleaseTest(hammerReleaseTest);
+ }
+ });
}
}
@@ -273,4 +279,52 @@ public class BassBoostTest extends Activity implements OnCheckedChangeListener {
}
}
+ // Stress-tests releasing of AudioEffect by doing repeated creation
+ // and subsequent releasing. Also forces emission of callbacks from
+ // the AudioFlinger by setting a control status listener. Since all
+ // effect instances are bound to the same session, the AF will
+ // notify them about the change in their status. This can reveal racy
+ // behavior w.r.t. releasing.
+ class HammerReleaseTest extends Thread {
+ private static final int NUM_EFFECTS = 10;
+ private static final int NUM_ITERATIONS = 100;
+ private final int mSession;
+ private final Runnable mOnComplete;
+
+ HammerReleaseTest(int session, Runnable onComplete) {
+ mSession = session;
+ mOnComplete = onComplete;
+ }
+
+ @Override
+ public void run() {
+ Log.w(TAG, "HammerReleaseTest started");
+ BassBoost[] effects = new BassBoost[NUM_EFFECTS];
+ for (int i = 0; i < NUM_ITERATIONS; i++) {
+ for (int j = 0; j < NUM_EFFECTS; j++) {
+ effects[j] = new BassBoost(0, mSession);
+ effects[j].setControlStatusListener(mEffectListener);
+ yield();
+ }
+ for (int j = NUM_EFFECTS - 1; j >= 0; j--) {
+ Log.w(TAG, "HammerReleaseTest releasing effect " + (Object) effects[j]);
+ effects[j].release();
+ effects[j] = null;
+ yield();
+ }
+ }
+ Log.w(TAG, "HammerReleaseTest ended");
+ runOnUiThread(mOnComplete);
+ }
+ }
+
+ private void runHammerReleaseTest(Button controlButton) {
+ controlButton.setEnabled(false);
+ HammerReleaseTest thread = new HammerReleaseTest(sSession,
+ () -> {
+ controlButton.setEnabled(true);
+ });
+ thread.start();
+ }
+
}
diff --git a/media/tests/EffectsTest/src/com/android/effectstest/VisualizerTest.java b/media/tests/EffectsTest/src/com/android/effectstest/VisualizerTest.java
index 2e141c5ef7c8..dcfe11a79ef9 100644
--- a/media/tests/EffectsTest/src/com/android/effectstest/VisualizerTest.java
+++ b/media/tests/EffectsTest/src/com/android/effectstest/VisualizerTest.java
@@ -17,6 +17,7 @@
package com.android.effectstest;
import android.app.Activity;
+import android.media.audiofx.Visualizer;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
@@ -24,6 +25,8 @@ import android.os.Message;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.EditText;
@@ -74,11 +77,22 @@ public class VisualizerTest extends Activity implements OnCheckedChangeListener
mCallbackOn = false;
mCallbackButton.setChecked(mCallbackOn);
+ final Button hammerReleaseTest = (Button) findViewById(R.id.hammer_on_release_bug);
+ hammerReleaseTest.setEnabled(false);
+
mMultithreadedButton.setOnCheckedChangeListener(this);
if (getEffect(sSession) != null) {
mReleaseButton.setOnCheckedChangeListener(this);
mOnOffButton.setOnCheckedChangeListener(this);
mCallbackButton.setOnCheckedChangeListener(this);
+
+ hammerReleaseTest.setEnabled(true);
+ hammerReleaseTest.setOnClickListener(new OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ runHammerReleaseTest(hammerReleaseTest);
+ }
+ });
}
}
@@ -214,4 +228,50 @@ public class VisualizerTest extends Activity implements OnCheckedChangeListener
}
}
+ // Stress-tests releasing of AudioEffect by doing repeated creation
+ // and subsequent releasing. Unlike a similar class in BassBoostTest,
+ // this one doesn't sets a control status listener because Visualizer
+ // doesn't inherit from AudioEffect and doesn't implement this method
+ // by itself.
+ class HammerReleaseTest extends Thread {
+ private static final int NUM_EFFECTS = 10;
+ private static final int NUM_ITERATIONS = 100;
+ private final int mSession;
+ private final Runnable mOnComplete;
+
+ HammerReleaseTest(int session, Runnable onComplete) {
+ mSession = session;
+ mOnComplete = onComplete;
+ }
+
+ @Override
+ public void run() {
+ Log.w(TAG, "HammerReleaseTest started");
+ Visualizer[] effects = new Visualizer[NUM_EFFECTS];
+ for (int i = 0; i < NUM_ITERATIONS; i++) {
+ for (int j = 0; j < NUM_EFFECTS; j++) {
+ effects[j] = new Visualizer(mSession);
+ yield();
+ }
+ for (int j = NUM_EFFECTS - 1; j >= 0; j--) {
+ Log.w(TAG, "HammerReleaseTest releasing effect " + (Object) effects[j]);
+ effects[j].release();
+ effects[j] = null;
+ yield();
+ }
+ }
+ Log.w(TAG, "HammerReleaseTest ended");
+ runOnUiThread(mOnComplete);
+ }
+ }
+
+ private void runHammerReleaseTest(Button controlButton) {
+ controlButton.setEnabled(false);
+ HammerReleaseTest thread = new HammerReleaseTest(sSession,
+ () -> {
+ controlButton.setEnabled(true);
+ });
+ thread.start();
+ }
+
}
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/BluetoothProfileConnectionInfoTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/BluetoothProfileConnectionInfoTest.java
new file mode 100644
index 000000000000..f23794b50543
--- /dev/null
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/BluetoothProfileConnectionInfoTest.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediaframeworktest.unit;
+
+import static org.junit.Assert.assertEquals;
+
+import android.bluetooth.BluetoothProfile;
+import android.media.BluetoothProfileConnectionInfo;
+
+import androidx.test.runner.AndroidJUnit4;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+@RunWith(AndroidJUnit4.class)
+public class BluetoothProfileConnectionInfoTest {
+
+ @Test
+ public void testCoverageA2dp() {
+ final boolean supprNoisy = false;
+ final int volume = 42;
+ final BluetoothProfileConnectionInfo info = BluetoothProfileConnectionInfo
+ .createA2dpInfo(supprNoisy, volume);
+ assertEquals(info.getProfile(), BluetoothProfile.A2DP);
+ assertEquals(info.isSuppressNoisyIntent(), supprNoisy);
+ assertEquals(info.getVolume(), volume);
+ }
+
+ @Test
+ public void testCoverageA2dpSink() {
+ final int volume = 42;
+ final BluetoothProfileConnectionInfo info = BluetoothProfileConnectionInfo
+ .createA2dpSinkInfo(volume);
+ assertEquals(info.getProfile(), BluetoothProfile.A2DP_SINK);
+ assertEquals(info.getVolume(), volume);
+ }
+
+ @Test
+ public void testCoveragehearingAid() {
+ final boolean supprNoisy = true;
+ final BluetoothProfileConnectionInfo info = BluetoothProfileConnectionInfo
+ .createHearingAidInfo(supprNoisy);
+ assertEquals(info.getProfile(), BluetoothProfile.HEARING_AID);
+ assertEquals(info.isSuppressNoisyIntent(), supprNoisy);
+ }
+
+ @Test
+ public void testCoverageLeAudio() {
+ final boolean supprNoisy = false;
+ final boolean isLeOutput = true;
+ final BluetoothProfileConnectionInfo info = BluetoothProfileConnectionInfo
+ .createLeAudioInfo(supprNoisy, isLeOutput);
+ assertEquals(info.getProfile(), BluetoothProfile.LE_AUDIO);
+ assertEquals(info.isSuppressNoisyIntent(), supprNoisy);
+ assertEquals(info.isLeOutput(), isLeOutput);
+ }
+}
+
diff --git a/media/tests/MtpTests/src/android/mtp/MtpStorageManagerTest.java b/media/tests/MtpTests/src/android/mtp/MtpStorageManagerTest.java
index fdf65823b1f3..eb357f67caea 100644
--- a/media/tests/MtpTests/src/android/mtp/MtpStorageManagerTest.java
+++ b/media/tests/MtpTests/src/android/mtp/MtpStorageManagerTest.java
@@ -159,10 +159,11 @@ public class MtpStorageManagerTest {
Log.d(TAG, "sendObjectInfoChanged: " + id);
objectsInfoChanged.add(id);
}
- }, null);
+ }, /* subdirectories= */ null);
- mainMtpStorage = manager.addMtpStorage(mainStorage);
- secondaryMtpStorage = manager.addMtpStorage(secondaryStorage);
+ mainMtpStorage = manager.addMtpStorage(mainStorage, /* isHostWindows= */ () -> false);
+ secondaryMtpStorage = manager.addMtpStorage(secondaryStorage,
+ /* isHostWindows= */ () -> false);
}
@After
diff --git a/media/tests/TunerTest/OWNERS b/media/tests/TunerTest/OWNERS
index 73ea663aa37e..75548894c9f0 100644
--- a/media/tests/TunerTest/OWNERS
+++ b/media/tests/TunerTest/OWNERS
@@ -1,4 +1,4 @@
-amyjojo@google.com
-nchalko@google.com
quxiangfang@google.com
shubang@google.com
+hgchen@google.com
+kemiyagi@google.com \ No newline at end of file