diff options
23 files changed, 738 insertions, 117 deletions
diff --git a/core/java/android/view/GLES20Canvas.java b/core/java/android/view/GLES20Canvas.java index d8fa850a17f8..fbfea95f6a72 100644 --- a/core/java/android/view/GLES20Canvas.java +++ b/core/java/android/view/GLES20Canvas.java @@ -137,9 +137,10 @@ class GLES20Canvas extends Canvas { @Override public boolean clipRect(float left, float top, float right, float bottom) { - // TODO: Implement - return false; + return nClipRect(mRenderer, left, top, right, bottom); } + + private native boolean nClipRect(int renderer, float left, float top, float right, float bottom); @Override public boolean clipRect(float left, float top, float right, float bottom, Region.Op op) { @@ -148,14 +149,14 @@ class GLES20Canvas extends Canvas { @Override public boolean clipRect(int left, int top, int right, int bottom) { - // TODO: Implement - return false; + return nClipRect(mRenderer, left, top, right, bottom); } + + private native boolean nClipRect(int renderer, int left, int top, int right, int bottom); @Override public boolean clipRect(Rect rect) { - // TODO: Implement - return false; + return clipRect(rect.left, rect.top, rect.right, rect.bottom); } @Override @@ -165,8 +166,7 @@ class GLES20Canvas extends Canvas { @Override public boolean clipRect(RectF rect) { - // TODO: Implement - return false; + return clipRect(rect.left, rect.top, rect.right, rect.bottom); } @Override @@ -198,14 +198,12 @@ class GLES20Canvas extends Canvas { @Override public boolean quickReject(Path path, EdgeType type) { - // TODO: Implement - return false; + throw new UnsupportedOperationException(); } @Override public boolean quickReject(RectF rect, EdgeType type) { - // TODO: Implement - return false; + return quickReject(rect.left, rect.top, rect.right, rect.bottom, type); } /////////////////////////////////////////////////////////////////////////// @@ -254,16 +252,16 @@ class GLES20Canvas extends Canvas { @Override public int save() { - // TODO: Implement - return 0; + return nSave(mRenderer, 0); } - + @Override public int save(int saveFlags) { - // TODO: Implement - return 0; + return nSave(mRenderer, saveFlags); } + private native int nSave(int renderer, int flags); + @Override public int saveLayer(RectF bounds, Paint paint, int saveFlags) { // TODO: Implement @@ -292,19 +290,24 @@ class GLES20Canvas extends Canvas { @Override public void restore() { - // TODO: Implement + nRestore(mRenderer); } + + private native void nRestore(int renderer); @Override public void restoreToCount(int saveCount) { - // TODO: Implement + nRestoreToCount(mRenderer, saveCount); } + private native void nRestoreToCount(int renderer, int saveCount); + @Override public int getSaveCount() { - // TODO: Implement - return 0; + return nGetSaveCount(mRenderer); } + + private native int nGetSaveCount(int renderer); /////////////////////////////////////////////////////////////////////////// // Filtering diff --git a/core/java/android/view/ViewGroup.java b/core/java/android/view/ViewGroup.java index 907306c418c6..34777cee79e6 100644 --- a/core/java/android/view/ViewGroup.java +++ b/core/java/android/view/ViewGroup.java @@ -236,8 +236,9 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager /** * When set, this ViewGroup should not intercept touch events. + * {@hide} */ - private static final int FLAG_DISALLOW_INTERCEPT = 0x80000; + protected static final int FLAG_DISALLOW_INTERCEPT = 0x80000; /** * Indicates which types of drawing caches are to be kept in memory. diff --git a/core/java/android/widget/AbsListView.java b/core/java/android/widget/AbsListView.java index c11c7e192af8..10927a7524d5 100644 --- a/core/java/android/widget/AbsListView.java +++ b/core/java/android/widget/AbsListView.java @@ -2076,6 +2076,14 @@ public abstract class AbsListView extends AdapterView<ListAdapter> implements Te } if (y != mLastY) { + // We may be here after stopping a fling and continuing to scroll. + // If so, we haven't disallowed intercepting touch events yet. + // Make sure that we do so in case we're in a parent that can intercept. + if ((mGroupFlags & FLAG_DISALLOW_INTERCEPT) == 0 && + Math.abs(deltaY) > mTouchSlop) { + requestDisallowInterceptTouchEvent(true); + } + deltaY -= mMotionCorrection; int incrementalDeltaY = mLastY != Integer.MIN_VALUE ? y - mLastY : deltaY; diff --git a/core/jni/android_media_AudioTrack.cpp b/core/jni/android_media_AudioTrack.cpp index 65c04357adfd..ce43e736b145 100644 --- a/core/jni/android_media_AudioTrack.cpp +++ b/core/jni/android_media_AudioTrack.cpp @@ -166,7 +166,7 @@ static void audioCallback(int event, void* user, void *info) { static int android_media_AudioTrack_native_setup(JNIEnv *env, jobject thiz, jobject weak_this, jint streamType, jint sampleRateInHertz, jint channels, - jint audioFormat, jint buffSizeInBytes, jint memoryMode) + jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession) { LOGV("sampleRate=%d, audioFormat(from Java)=%d, channels=%x, buffSize=%d", sampleRateInHertz, audioFormat, channels, buffSizeInBytes); @@ -253,6 +253,20 @@ android_media_AudioTrack_native_setup(JNIEnv *env, jobject thiz, jobject weak_th lpJniStorage->mStreamType = atStreamType; + jint* nSession = NULL; + if (jSession) { + nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL); + if (nSession == NULL) { + LOGE("Error creating AudioTrack: Error retrieving session id pointer"); + delete lpJniStorage; + return AUDIOTRACK_ERROR; + } + } else { + LOGE("Error creating AudioTrack: invalid session ID pointer"); + delete lpJniStorage; + return AUDIOTRACK_ERROR; + } + // create the native AudioTrack object AudioTrack* lpTrack = new AudioTrack(); if (lpTrack == NULL) { @@ -273,7 +287,8 @@ android_media_AudioTrack_native_setup(JNIEnv *env, jobject thiz, jobject weak_th audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user) 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack 0,// shared mem - true);// thread can call Java + true,// thread can call Java + nSession[0]);// audio session ID } else if (memoryMode == javaAudioTrackFields.MODE_STATIC) { // AudioTrack is using shared memory @@ -293,7 +308,8 @@ android_media_AudioTrack_native_setup(JNIEnv *env, jobject thiz, jobject weak_th audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user)); 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack lpJniStorage->mMemBase,// shared mem - true);// thread can call Java + true,// thread can call Java + nSession[0]);// audio session ID } if (lpTrack->initCheck() != NO_ERROR) { @@ -301,6 +317,12 @@ android_media_AudioTrack_native_setup(JNIEnv *env, jobject thiz, jobject weak_th goto native_init_failure; } + // read the audio session ID back from AudioTrack in case we create a new session + nSession[0] = lpTrack->getSessionId(); + + env->ReleasePrimitiveArrayCritical(jSession, nSession, 0); + nSession = NULL; + // save our newly created C++ AudioTrack in the "nativeTrackInJavaObj" field // of the Java object (in mNativeTrackInJavaObj) env->SetIntField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, (int)lpTrack); @@ -317,6 +339,9 @@ native_init_failure: env->SetIntField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, 0); native_track_failure: + if (nSession != NULL) { + env->ReleasePrimitiveArrayCritical(jSession, nSession, 0); + } env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_class); env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_ref); delete lpJniStorage; @@ -785,7 +810,7 @@ static JNINativeMethod gMethods[] = { {"native_stop", "()V", (void *)android_media_AudioTrack_stop}, {"native_pause", "()V", (void *)android_media_AudioTrack_pause}, {"native_flush", "()V", (void *)android_media_AudioTrack_flush}, - {"native_setup", "(Ljava/lang/Object;IIIIII)I", + {"native_setup", "(Ljava/lang/Object;IIIIII[I)I", (void *)android_media_AudioTrack_native_setup}, {"native_finalize", "()V", (void *)android_media_AudioTrack_native_finalize}, {"native_release", "()V", (void *)android_media_AudioTrack_native_release}, diff --git a/core/jni/android_view_GLES20Canvas.cpp b/core/jni/android_view_GLES20Canvas.cpp index 57660836093e..8c2a04cb2706 100644 --- a/core/jni/android_view_GLES20Canvas.cpp +++ b/core/jni/android_view_GLES20Canvas.cpp @@ -54,7 +54,47 @@ static void android_view_GLES20Renderer_prepare(JNIEnv* env, jobject canvas, jin } // ---------------------------------------------------------------------------- -// Draw color +// State +// ---------------------------------------------------------------------------- + +static jint android_view_GLES20Renderer_save(JNIEnv* env, jobject canvas, jint renderer, + jint flags) { + + return UI->save(flags); +} + +static jint android_view_GLES20Renderer_getSaveCount(JNIEnv* env, jobject canvas, jint renderer) { + return UI->getSaveCount(); +} + +static void android_view_GLES20Renderer_restore(JNIEnv* env, jobject canvas, jint renderer) { + UI->restore(); +} + +static void android_view_GLES20Renderer_restoreToCount(JNIEnv* env, jobject canvas, jint renderer, + jint saveCount) { + + UI->restoreToCount(saveCount); +} + +// ---------------------------------------------------------------------------- +// Clipping +// ---------------------------------------------------------------------------- + +static bool android_view_GLES20Renderer_clipRectF(JNIEnv* env, jobject canvas, jint renderer, + jfloat left, jfloat top, jfloat right, jfloat bottom) { + + return UI->clipRect(left, top, right, bottom); +} + +static bool android_view_GLES20Renderer_clipRect(JNIEnv* env, jobject canvas, jint renderer, + jint left, jint top, jint right, jint bottom) { + + return UI->clipRect(float(left), float(top), float(right), float(bottom)); +} + +// ---------------------------------------------------------------------------- +// Drawing // ---------------------------------------------------------------------------- static void android_view_GLES20Renderer_drawColor(JNIEnv* env, jobject canvas, jint renderer, @@ -70,12 +110,20 @@ static void android_view_GLES20Renderer_drawColor(JNIEnv* env, jobject canvas, j const char* const kClassPathName = "android/view/GLES20Canvas"; static JNINativeMethod gMethods[] = { - { "nCreateRenderer", "()I", (void*) android_view_GLES20Renderer_createRenderer }, - { "nDestroyRenderer", "(I)V", (void*) android_view_GLES20Renderer_destroyRenderer }, - { "nSetViewport", "(III)V", (void*) android_view_GLES20Renderer_setViewport }, - { "nPrepare", "(I)V", (void*) android_view_GLES20Renderer_prepare }, + { "nCreateRenderer", "()I", (void*) android_view_GLES20Renderer_createRenderer }, + { "nDestroyRenderer", "(I)V", (void*) android_view_GLES20Renderer_destroyRenderer }, + { "nSetViewport", "(III)V", (void*) android_view_GLES20Renderer_setViewport }, + { "nPrepare", "(I)V", (void*) android_view_GLES20Renderer_prepare }, + + { "nSave", "(II)I", (void*) android_view_GLES20Renderer_save }, + { "nRestore", "(I)V", (void*) android_view_GLES20Renderer_restore }, + { "nRestoreToCount", "(II)V", (void*) android_view_GLES20Renderer_restoreToCount }, + { "nGetSaveCount", "(I)I", (void*) android_view_GLES20Renderer_getSaveCount }, + + { "nClipRect", "(IFFFF)Z", (void*) android_view_GLES20Renderer_clipRectF }, + { "nClipRect", "(IIIII)Z", (void*) android_view_GLES20Renderer_clipRect }, - { "nDrawColor", "(III)V", (void*) android_view_GLES20Renderer_drawColor }, + { "nDrawColor", "(III)V", (void*) android_view_GLES20Renderer_drawColor }, }; int register_android_view_GLES20Canvas(JNIEnv* env) { diff --git a/docs/html/guide/topics/data/backup.jd b/docs/html/guide/topics/data/backup.jd index aad0f923c41d..4e74a833897a 100644 --- a/docs/html/guide/topics/data/backup.jd +++ b/docs/html/guide/topics/data/backup.jd @@ -15,6 +15,8 @@ page.title=Data Backup <h2>In this document</h2> <ol> <li><a href="#Basics">The Basics</a></li> + <li><a href="#BackupManifest">Declaring the Backup Agent in Your Manifest</a></li> + <li><a href="#BackupKey">Registering for Android Backup Service</a></li> <li><a href="#BackupAgent">Extending BackupAgent</a> <ol> <li><a href="#RequiredMethods">Required Methods</a></li> @@ -45,32 +47,53 @@ page.title=Data Backup </div> <p>Android's {@link android.app.backup backup} service allows you to copy your persistent -application data to a remote "cloud" storage, in order to provide a restore point for the +application data to remote "cloud" storage, in order to provide a restore point for the application data and settings. If a user performs a factory reset or converts to a new Android-powered device, the system automatically restores your backup data when the application -is re-installed. This way, your users are not required to reproduce their previous data or +is re-installed. This way, your users don't need to reproduce their previous data or application settings. This process is completely transparent to the user and does not affect the functionality or user experience in your application.</p> -<p>Android-powered devices that support the backup service provide a cloud storage area that -saves your backup data and a backup transport that delivers your data to -the storage area and back to the device. During a backup -operation, Android's Backup Manager requests backup data from your application, then delivers it to -the cloud storage using the backup transport. During a restore operation, the Backup Manager -retrieves the backup data from the backup transport and returns it to your application -so it can restore the data to the device. The backup service is <em>not</em> designed for data -synchronization (you do not have access the backup data, except during a restore operation on the -device).</p> - -<p>The cloud storage used for backup won't necessarily be the same on all Android-powered devices. -The cloud storage and backup transport may differ between devices and service providers. -Where the backup data is stored is transparent to your application, but you are assured that your -application data cannot be read by other applications.</p> +<p>During a backup operation (which your application can request), Android's Backup Manager ({@link +android.app.backup.BackupManager}) queries your application for backup data, then hands it to +a backup transport, which then delivers the data to the cloud storage. During a +restore operation, the Backup Manager retrieves the backup data from the backup transport and +returns it to your application so your application can restore the data to the device. It's +possible for your application to request a restore, but that shouldn't be necessary—Android +automatically performs a restore operation when your application is installed and there exists +backup data associated with the user. The primary scenario in which backup data is restored is when +a user resets their device or upgrades to a new device and their previously installed +applications are re-installed.</p> + +<p class="note"><strong>Note:</strong> The backup service is <em>not</em> designed for +synchronizing application data with other clients or saving data that you'd like to access during +the normal application lifecycle. You cannot read or write backup data on demand and cannot access +it in any way other than through the APIs provided by the Backup Manager.</p> + +<p>The backup transport is the client-side component of Android's backup framework, which is +customizable by +the device manufacturer and service provider. The backup transport may differ from device to device +and which backup transport is available on any given device is transparent to your application. The +Backup Manager APIs isolate your application from the actual backup transport available on a given +device—your application communicates with the Backup Manager through a fixed set of APIs, +regardless of the underlying transport.</p> + +<p>Data backup is <em>not</em> guaranteed to be available on all Android-powered +devices. However, your application is not adversely affected in the event +that a device does not provide a backup transport. If you believe that users will benefit from data +backup in your application, then you can implement it as described in this document, test it, then +publish your application without any concern about which devices actually perform backup. When your +application runs on a device that does not provide a backup transport, your application operates +normally, but will not receive callbacks from the Backup Manager to backup data.</p> + +<p>Although you cannot know what the current transport is, you are always assured that your +backup data cannot be read by other applications on the device. Only the Backup Manager and backup +transport have access to the data you provide during a backup operation.</p> <p class="caution"><strong>Caution:</strong> Because the cloud storage and transport service can differ from device to device, Android makes no guarantees about the security of your data while -using backup. You should be cautious about using backup to store sensitive data, such as usernames -and passwords.</p> +using backup. You should always be cautious about using backup to store sensitive data, such as +usernames and passwords.</p> <h2 id="Basics">The Basics</h2> @@ -78,8 +101,8 @@ and passwords.</p> <p>To backup your application data, you need to implement a backup agent. Your backup agent is called by the Backup Manager to provide the data you want to back up. It is also called to restore your backup data when the application is re-installed. The Backup Manager handles all -your data transactions with the cloud storage and your backup agent handles all your data -transactions on the device.</p> +your data transactions with the cloud storage (using the backup transport) and your backup agent +handles all your data transactions on the device.</p> <p>To implement a backup agent, you must:</p> @@ -87,6 +110,11 @@ transactions on the device.</p> <li>Declare your backup agent in your manifest file with the <a href="{@docRoot}guide/topics/manifest/application-element.html#agent">{@code android:backupAgent}</a> attribute.</li> + <li>Register your application with a backup service. Google offers <a +href="http://code.google.com/android/backup/index.html">Android Backup Service</a> as a backup +service for most Android-powered devices, which requires that you register your application in +order for it to work. Any other backup services available might also require you to register +in order to store your data on their servers.</li> <li>Define a backup agent by either:</p> <ol type="a"> <li><a href="#backupAgent">Extending BackupAgent</a> @@ -118,7 +146,6 @@ href="{@docRoot}guide/topics/data/data-storage.html#filesInternal">internal stor - <h2 id="BackupManifest">Declaring the Backup Agent in Your Manifest</h2> <p>This is the easiest step, so once you've decided on the class name for your backup agent, declare @@ -160,6 +187,50 @@ remaining compatible with older devices.</p> +<h2 id="BackupKey">Registering for Android Backup Service</h2> + +<p>Google provides a backup transport with <a +href="http://code.google.com/android/backup/index.html">Android Backup Service</a> for most +Android-powered devices running Android 2.2 or greater.</p> + +<p>In order for you application to perform backup using Android Backup Service, you must +register your application with the service to receive a Backup Service Key, then +declare the Backup Service Key in your Android manifest.</p> + +<p>To get your Backup Service Key, <a +href="http://code.google.com/android/backup/signup.html">register for Android Backup Service</a>. +When you register, you will be provided a Backup Service Key and the appropriate {@code +<meta-data>} XML code for your Android manifest file, which you must include as a child of the +{@code <application>} element. For example:</p> + +<pre> +<application android:label="MyApplication" + android:backupAgent="MyBackupAgent"> + ... + <meta-data android:name="com.google.android.backup.api_key" + android:value="AEdPqrEAAAAIDaYEVgU6DJnyJdBmU7KLH3kszDXLv_4DIsEIyQ" /> +</application> +</pre> + +<p>The <code>android:name</code> must be <code>"com.google.android.backup.api_key"</code> and +the <code>android:value</code> must be the Backup Service Key received from the Android Backup +Service registration.</p> + +<p>If you have multiple applications, you must register each one, using the respective package +name.</p> + +<p class="note"><strong>Note:</strong> The backup transport provided by Android Backup Service is +not guaranteed to be available +on all Android-powered devices that support backup. Some devices might support backup +using a different transport, some devices might not support backup at all, and there is no way for +your application to know what transport is used on the device. However, if you implement backup for +your application, you should always include a Backup Service Key for Android Backup Service so +your application can perform backup when the device uses the Android Backup Service transport. If +the device does not use Android Backup Service, then the {@code <meta-data>} element with the +Backup Service Key is ignored.</p> + + + <h2 id="BackupAgent">Extending BackupAgent</h2> diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index d956882c475e..38e3d44bbc03 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -100,6 +100,19 @@ public: typedef void (*callback_t)(int event, void* user, void *info); + /* Returns the minimum frame count required for the successful creation of + * an AudioRecord object. + * Returned status (from utils/Errors.h) can be: + * - NO_ERROR: successful operation + * - NO_INIT: audio server or audio hardware not initialized + * - BAD_VALUE: unsupported configuration + */ + + static status_t getMinFrameCount(int* frameCount, + uint32_t sampleRate, + int format, + int channelCount); + /* Constructs an uninitialized AudioRecord. No connection with * AudioFlinger takes place. */ diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h index 31c09917fc2e..e892875113fd 100644 --- a/include/media/IMediaPlayerService.h +++ b/include/media/IMediaPlayerService.h @@ -40,8 +40,11 @@ public: virtual sp<IMediaRecorder> createMediaRecorder(pid_t pid) = 0; virtual sp<IMediaMetadataRetriever> createMetadataRetriever(pid_t pid) = 0; - virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, const char* url, const KeyedVector<String8, String8> *headers = NULL) = 0; - virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length) = 0; + virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, + const char* url, const KeyedVector<String8, String8> *headers = NULL, + int audioSessionId = 0) = 0; + virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, + int fd, int64_t offset, int64_t length, int audioSessionId) = 0; virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0; virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0; virtual sp<IOMX> getOMX() = 0; diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h index dc783ce9b6ff..62a4e50a2e68 100644 --- a/include/media/mediaplayer.h +++ b/include/media/mediaplayer.h @@ -172,6 +172,8 @@ public: status_t getMetadata(bool update_only, bool apply_filter, Parcel *metadata); status_t suspend(); status_t resume(); + status_t setAudioSessionId(int sessionId); + int getAudioSessionId(); private: void clear_l(); status_t seekTo_l(int msec); @@ -198,6 +200,7 @@ private: float mRightVolume; int mVideoWidth; int mVideoHeight; + int mAudioSessionId; }; }; // namespace android diff --git a/libs/hwui/Matrix.cpp b/libs/hwui/Matrix.cpp index 68b245b7e822..638c1b8fe8ac 100644 --- a/libs/hwui/Matrix.cpp +++ b/libs/hwui/Matrix.cpp @@ -140,10 +140,12 @@ void Matrix4::loadOrtho(float left, float right, float bottom, float top, float } void Matrix4::dump() const { - LOGD("%f %f %f %f", mMat[0], mMat[4], mMat[ 8], mMat[12]); - LOGD("%f %f %f %f", mMat[1], mMat[5], mMat[ 9], mMat[13]); - LOGD("%f %f %f %f", mMat[2], mMat[6], mMat[10], mMat[14]); - LOGD("%f %f %f %f", mMat[3], mMat[7], mMat[11], mMat[15]); + LOGD("Matrix4["); + LOGD(" %f %f %f %f", mMat[0], mMat[4], mMat[ 8], mMat[12]); + LOGD(" %f %f %f %f", mMat[1], mMat[5], mMat[ 9], mMat[13]); + LOGD(" %f %f %f %f", mMat[2], mMat[6], mMat[10], mMat[14]); + LOGD(" %f %f %f %f", mMat[3], mMat[7], mMat[11], mMat[15]); + LOGD("]"); } }; diff --git a/libs/hwui/OpenGLRenderer.cpp b/libs/hwui/OpenGLRenderer.cpp index 35825d2467d7..1416ce15d7f4 100644 --- a/libs/hwui/OpenGLRenderer.cpp +++ b/libs/hwui/OpenGLRenderer.cpp @@ -35,6 +35,9 @@ namespace android { OpenGLRenderer::OpenGLRenderer() { LOGD("Create OpenGLRenderer"); + + mSnapshot = new Snapshot; + mSaveCount = 0; } OpenGLRenderer::~OpenGLRenderer() { @@ -47,13 +50,80 @@ void OpenGLRenderer::setViewport(int width, int height) { mat4 ortho; ortho.loadOrtho(0, width, height, 0, 0, 1); ortho.copyTo(mOrthoMatrix); + + mWidth = width; + mHeight = height; } void OpenGLRenderer::prepare() { glDisable(GL_SCISSOR_TEST); + glClearColor(0.0f, 0.0f, 0.0f, 0.0f); glClear(GL_COLOR_BUFFER_BIT); + glEnable(GL_SCISSOR_TEST); + mSnapshot->clipRect.set(0.0f, 0.0f, mWidth, mHeight); +} + +int OpenGLRenderer::getSaveCount() const { + return mSaveCount; +} + +int OpenGLRenderer::save(int flags) { + return saveSnapshot(); +} + +void OpenGLRenderer::restore() { + if (mSaveCount == 0) return; + + if (restoreSnapshot()) { + setScissorFromClip(); + } +} + +void OpenGLRenderer::restoreToCount(int saveCount) { + if (saveCount <= 0 || saveCount > mSaveCount) return; + + bool restoreClip = false; + + while (mSaveCount != saveCount - 1) { + restoreClip |= restoreSnapshot(); + } + + if (restoreClip) { + setScissorFromClip(); + } +} + +int OpenGLRenderer::saveSnapshot() { + mSnapshot = new Snapshot(mSnapshot); + mSaveCount++; + return mSaveCount; +} + +bool OpenGLRenderer::restoreSnapshot() { + // TODO: handle local transformations + bool restoreClip = mSnapshot->flags & Snapshot::kFlagClipSet; + + mSaveCount--; + mSnapshot = mSnapshot->previous; + + return restoreClip; +} + +void OpenGLRenderer::setScissorFromClip() { + Rect clip = mSnapshot->clipRect; + glScissor(clip.left, clip.top, clip.getWidth(), clip.getHeight()); +} + +bool OpenGLRenderer::clipRect(float left, float top, float right, float bottom) { + // TODO: take local translate transform into account + bool clipped = mSnapshot->clipRect.intersect(left, top, right, bottom); + if (clipped) { + mSnapshot->flags |= Snapshot::kFlagClipSet; + setScissorFromClip(); + } + return clipped; } void OpenGLRenderer::drawColor(int color, SkXfermode::Mode mode) { diff --git a/libs/hwui/OpenGLRenderer.h b/libs/hwui/OpenGLRenderer.h index 12363360b65f..8a541fcf3a81 100644 --- a/libs/hwui/OpenGLRenderer.h +++ b/libs/hwui/OpenGLRenderer.h @@ -19,8 +19,32 @@ #include <SkXfermode.h> +#include <utils/RefBase.h> + +#include "Rect.h" + namespace android { +class Snapshot: public LightRefBase<Snapshot> { +public: + Snapshot() { } + + Snapshot(const sp<Snapshot> s): clipRect(s->clipRect), flags(0), previous(s) { } + + enum Flags { + kFlagClipSet = 0x1, + }; + + // Clipping rectangle at the time of this snapshot + Rect clipRect; + + // Dirty flags + int flags; + + // Previous snapshot in the frames stack + sp<Snapshot> previous; +}; // struct Snapshot + class OpenGLRenderer { public: OpenGLRenderer(); @@ -29,12 +53,32 @@ public: void setViewport(int width, int height); void prepare(); + int getSaveCount() const; + int save(int flags); + void restore(); + void restoreToCount(int saveCount); + + bool clipRect(float left, float top, float right, float bottom); + void drawColor(int color, SkXfermode::Mode mode); private: + int saveSnapshot(); + bool restoreSnapshot(); + + void setScissorFromClip(); + + // Dimensions of the drawing surface + int mWidth, mHeight; + // Matrix used for ortho projection in shaders float mOrthoMatrix[16]; -}; + + // Number of saved states + int mSaveCount; + // Current state + sp<Snapshot> mSnapshot; +}; // class OpenGLRenderer }; // namespace android diff --git a/libs/hwui/Rect.h b/libs/hwui/Rect.h new file mode 100644 index 000000000000..724bd1a2b742 --- /dev/null +++ b/libs/hwui/Rect.h @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_RECT_H +#define ANDROID_RECT_H + +namespace android { + +/////////////////////////////////////////////////////////////////////////////// +// Structs +/////////////////////////////////////////////////////////////////////////////// + +struct Rect { + float left; + float top; + float right; + float bottom; + + Rect(): left(0), top(0), right(0), bottom(0) { } + + Rect(const Rect& r) { + set(r); + } + + Rect(Rect& r) { + set(r); + } + + Rect& operator=(const Rect& r) { + set(r); + return *this; + } + + Rect& operator=(Rect& r) { + set(r); + return *this; + } + + friend int operator==(const Rect& a, const Rect& b) { + return !memcmp(&a, &b, sizeof(a)); + } + + friend int operator!=(const Rect& a, const Rect& b) { + return memcmp(&a, &b, sizeof(a)); + } + + bool isEmpty() const { + return left >= right || top >= bottom; + } + + void setEmpty() { + memset(this, 0, sizeof(*this)); + } + + void set(float left, float top, float right, float bottom) { + this->left = left; + this->right = right; + this->top = top; + this->bottom = bottom; + } + + void set(const Rect& r) { + set(r.left, r.top, r.right, r.bottom); + } + + float getWidth() const { + return right - left; + } + + float getHeight() const { + return bottom - top; + } + + bool intersects(float left, float top, float right, float bottom) const { + return left < right && top < bottom && + this->left < this->right && this->top < this->bottom && + this->left < right && left < this->right && + this->top < bottom && top < this->bottom; + } + + bool intersects(const Rect& r) const { + return intersects(r.left, r.top, r.right, r.bottom); + } + + bool intersect(float left, float top, float right, float bottom) { + if (left < right && top < bottom && !this->isEmpty() && + this->left < right && left < this->right && + this->top < bottom && top < this->bottom) { + + if (this->left < left) this->left = left; + if (this->top < top) this->top = top; + if (this->right > right) this->right = right; + if (this->bottom > bottom) this->bottom = bottom; + + return true; + } + return false; + } + + bool intersect(const Rect& r) { + return intersect(r.left, r.top, r.right, r.bottom); + } + + void dump() const { + LOGD("Rect[l=%f t=%f r=%f b=%f]", left, top, right, bottom); + } + +}; // struct Rect + +}; // namespace android + +#endif // ANDROID_RECT_H diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java index bd78f9363f88..50f0674c28ab 100644 --- a/media/java/android/media/AudioService.java +++ b/media/java/android/media/AudioService.java @@ -1627,6 +1627,12 @@ public class AudioService extends IAudioService.Stub { case MSG_MEDIA_SERVER_STARTED: Log.e(TAG, "Media server started."); + // indicate to audio HAL that we start the reconfiguration phase after a media + // server crash + // Note that MSG_MEDIA_SERVER_STARTED message is only received when the media server + // process restarts after a crash, not the first time it is started. + AudioSystem.setParameters("restarting=true"); + // Restore device connection states Set set = mConnectedDevices.entrySet(); Iterator i = set.iterator(); @@ -1660,6 +1666,9 @@ public class AudioService extends IAudioService.Stub { // Restore ringer mode setRingerModeInt(getRingerMode(), false); + + // indicate the end of reconfiguration phase to audio HAL + AudioSystem.setParameters("restarting=false"); break; case MSG_PLAY_SOUND_EFFECT: diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java index 1e8d72f47a34..079c41fdfab7 100644 --- a/media/java/android/media/AudioTrack.java +++ b/media/java/android/media/AudioTrack.java @@ -210,6 +210,10 @@ public class AudioTrack * @see AudioFormat#ENCODING_PCM_16BIT */ private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT; + /** + * Audio session ID + */ + private int mSessionId = 0; //-------------------------------- @@ -258,6 +262,48 @@ public class AudioTrack public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes, int mode) throws IllegalArgumentException { + this(streamType, sampleRateInHz, channelConfig, audioFormat, + bufferSizeInBytes, mode, 0); + } + + /** + * Class constructor with audio session. Use this constructor when the AudioTrack must be + * attached to a particular audio session. The primary use of the audio session ID is to + * associate audio effects to a particular instance of AudioTrack: if an audio session ID + * is provided when creating an AudioEffect, this effect will be applied only to audio tracks + * and media players in the same session and not to the output mix. + * When an AudioTrack is created without specifying a session, it will create its own session + * which can be retreived by calling the {@link #getAudioSessionId()} method. + * If a session ID is provided, this AudioTrack will share effects attached to this session + * with all other media players or audio tracks in the same session. + * @param streamType the type of the audio stream. See + * {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM}, + * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC} and + * {@link AudioManager#STREAM_ALARM} + * @param sampleRateInHz the sample rate expressed in Hertz. Examples of rates are (but + * not limited to) 44100, 22050 and 11025. + * @param channelConfig describes the configuration of the audio channels. + * See {@link AudioFormat#CHANNEL_OUT_MONO} and + * {@link AudioFormat#CHANNEL_OUT_STEREO} + * @param audioFormat the format in which the audio data is represented. + * See {@link AudioFormat#ENCODING_PCM_16BIT} and + * {@link AudioFormat#ENCODING_PCM_8BIT} + * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read + * from for playback. If using the AudioTrack in streaming mode, you can write data into + * this buffer in smaller chunks than this size. If using the AudioTrack in static mode, + * this is the maximum size of the sound that will be played for this instance. + * See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size + * for the successful creation of an AudioTrack instance in streaming mode. Using values + * smaller than getMinBufferSize() will result in an initialization failure. + * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM} + * @param sessionId Id of audio session the AudioTrack must be attached to + * @throws java.lang.IllegalArgumentException + // FIXME: unhide. + * @hide + */ + public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, + int bufferSizeInBytes, int mode, int sessionId) + throws IllegalArgumentException { mState = STATE_UNINITIALIZED; // remember which looper is associated with the AudioTrack instanciation @@ -269,15 +315,23 @@ public class AudioTrack audioBuffSizeCheck(bufferSizeInBytes); + if (sessionId < 0) { + throw (new IllegalArgumentException("Invalid audio session ID: "+sessionId)); + } + + int[] session = new int[1]; + session[0] = sessionId; // native initialization int initResult = native_setup(new WeakReference<AudioTrack>(this), mStreamType, mSampleRate, mChannels, mAudioFormat, - mNativeBufferSizeInBytes, mDataLoadMode); + mNativeBufferSizeInBytes, mDataLoadMode, session); if (initResult != SUCCESS) { loge("Error code "+initResult+" when initializing AudioTrack."); return; // with mState == STATE_UNINITIALIZED } + mSessionId = session[0]; + if (mDataLoadMode == MODE_STATIC) { mState = STATE_NO_STATIC_DATA; } else { @@ -590,6 +644,17 @@ public class AudioTrack } } + /** + * Returns the audio session ID. + * + * @return the ID of the audio session this AudioTrack belongs to. + // FIXME: unhide. + // FIXME: link to AudioEffect class when public. + * @hide + */ + public int getAudioSessionId() { + return mSessionId; + } //-------------------------------------------------------------------------- // Initialization / configuration @@ -1012,7 +1077,7 @@ public class AudioTrack private native final int native_setup(Object audiotrack_this, int streamType, int sampleRate, int nbChannels, int audioFormat, - int buffSizeInBytes, int mode); + int buffSizeInBytes, int mode, int[] sessionId); private native final void native_finalize(); @@ -1056,6 +1121,7 @@ public class AudioTrack static private native final int native_get_min_buff_size( int sampleRateInHz, int channelConfig, int audioFormat); + private native final int native_get_session_id(); //--------------------------------------------------------- // Utility methods diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java index c9d46508aecf..8caa07a5821f 100644 --- a/media/java/android/media/MediaPlayer.java +++ b/media/java/android/media/MediaPlayer.java @@ -423,6 +423,18 @@ import java.lang.ref.WeakReference; * <td>Successful invoke of this method in a valid state transfers the * object to the <em>Stopped</em> state. Calling this method in an * invalid state transfers the object to the <em>Error</em> state.</p></td></tr> + * <tr><td>setAudioSessionId </p></td> + * <td>{Idle} </p></td> + * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted, + * Error} </p></td> + * <td>This method must be called in idle state as the audio session ID must be known before + * calling setDataSource. Calling it does not change the object state. </p></td></tr> + * <tr><td>getAudioSessionId </p></td> + * <td>any </p></td> + * <td>{} </p></td> + * <td>This method can be called in any state and calling it does not change + * the object state. </p></td></tr> + * * </table> * * <a name="Permissions"></a> @@ -1159,6 +1171,38 @@ public class MediaPlayer public native Bitmap getFrameAt(int msec) throws IllegalStateException; /** + * Sets the audio session ID. + * + * @param sessionId: the audio session ID. + * The audio session ID is a system wide unique identifier for the audio stream played by + * this MediaPlayer instance. + * The primary use of the audio session ID is to associate audio effects to a particular + * instance of MediaPlayer: if an audio session ID is provided when creating an audio effect, + * this effect will be applied only to the audio content of media players within the same + * audio session and not to the output mix. + * When created, a MediaPlayer instance automatically generates its own audio session ID. + * However, it is possible to force this player to be part of an already existing audio session + * by calling this method. + * This method must be called before one of the overloaded <code> setDataSource </code> methods. + * @throws IllegalStateException if it is called in an invalid state + * + // FIXME: unhide. + // FIXME: link to AudioEffect class when public. + * @hide + */ + public native void setAudioSessionId(int sessionId) throws IllegalArgumentException, IllegalStateException; + + /** + * Returns the audio session ID. + * + * @return the audio session ID. {@see #setAudioSessionId(int)}. + * Note that the audio session ID is 0 only if a problem occured when the MediaPlayer was contructed. + // FIXME: unhide. + * @hide + */ + public native int getAudioSessionId(); + + /** * @param request Parcel destinated to the media player. The * Interface token must be set to the IMediaPlayer * one to be routed correctly through the system. diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp index 60ff26493487..c5250d7bb44f 100644 --- a/media/jni/android_media_MediaPlayer.cpp +++ b/media/jni/android_media_MediaPlayer.cpp @@ -292,7 +292,7 @@ static void setVideoSurface(const sp<MediaPlayer>& mp, JNIEnv *env, jobject thiz if (surface != NULL) { const sp<Surface> native_surface = get_surface(env, surface); LOGV("prepare: surface=%p (id=%d)", - native_surface.get(), native_surface->ID()); + native_surface.get(), native_surface->getIdentity()); mp->setVideoSurface(native_surface); } } @@ -332,7 +332,7 @@ android_media_MediaPlayer_prepareAsync(JNIEnv *env, jobject thiz) if (surface != NULL) { const sp<Surface> native_surface = get_surface(env, surface); LOGV("prepareAsync: surface=%p (id=%d)", - native_surface.get(), native_surface->ID()); + native_surface.get(), native_surface->getIdentity()); mp->setVideoSurface(native_surface); } process_media_player_call( env, thiz, mp->prepareAsync(), "java/io/IOException", "Prepare Async failed." ); @@ -705,6 +705,27 @@ android_media_MediaPlayer_native_suspend_resume( return isSuspend ? mp->suspend() : mp->resume(); } +static void android_media_MediaPlayer_set_audio_session_id(JNIEnv *env, jobject thiz, jint sessionId) { + LOGV("set_session_id(): %d", sessionId); + sp<MediaPlayer> mp = getMediaPlayer(env, thiz); + if (mp == NULL ) { + jniThrowException(env, "java/lang/IllegalStateException", NULL); + return; + } + process_media_player_call( env, thiz, mp->setAudioSessionId(sessionId), NULL, NULL ); +} + +static jint android_media_MediaPlayer_get_audio_session_id(JNIEnv *env, jobject thiz) { + LOGV("get_session_id()"); + sp<MediaPlayer> mp = getMediaPlayer(env, thiz); + if (mp == NULL ) { + jniThrowException(env, "java/lang/IllegalStateException", NULL); + return 0; + } + + return mp->getAudioSessionId(); +} + // ---------------------------------------------------------------------------- static JNINativeMethod gMethods[] = { @@ -738,6 +759,8 @@ static JNINativeMethod gMethods[] = { {"native_finalize", "()V", (void *)android_media_MediaPlayer_native_finalize}, {"snoop", "([SI)I", (void *)android_media_MediaPlayer_snoop}, {"native_suspend_resume", "(Z)I", (void *)android_media_MediaPlayer_native_suspend_resume}, + {"getAudioSessionId", "()I", (void *)android_media_MediaPlayer_get_audio_session_id}, + {"setAudioSessionId", "(I)V", (void *)android_media_MediaPlayer_set_audio_session_id}, }; static const char* const kClassPathName = "android/media/MediaPlayer"; diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index a2436ab805f7..a6c515c51051 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -41,6 +41,38 @@ #define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false )) namespace android { +// --------------------------------------------------------------------------- + +// static +status_t AudioRecord::getMinFrameCount( + int* frameCount, + uint32_t sampleRate, + int format, + int channelCount) +{ + size_t size = 0; + if (AudioSystem::getInputBufferSize(sampleRate, format, channelCount, &size) + != NO_ERROR) { + LOGE("AudioSystem could not query the input buffer size."); + return NO_INIT; + } + + if (size == 0) { + LOGE("Unsupported configuration: sampleRate %d, format %d, channelCount %d", + sampleRate, format, channelCount); + return BAD_VALUE; + } + + // We double the size of input buffer for ping pong use of record buffer. + size <<= 1; + + if (AudioSystem::isLinearPCM(format)) { + size /= channelCount * (format == AudioSystem::PCM_16_BIT ? 2 : 1); + } + + *frameCount = size; + return NO_ERROR; +} // --------------------------------------------------------------------------- @@ -132,29 +164,11 @@ status_t AudioRecord::set( } // validate framecount - size_t inputBuffSizeInBytes = -1; - if (AudioSystem::getInputBufferSize(sampleRate, format, channelCount, &inputBuffSizeInBytes) - != NO_ERROR) { - LOGE("AudioSystem could not query the input buffer size."); - return NO_INIT; - } - - if (inputBuffSizeInBytes == 0) { - LOGE("Recording parameters are not supported: sampleRate %d, channelCount %d, format %d", - sampleRate, channelCount, format); - return BAD_VALUE; - } - - int frameSizeInBytes = channelCount * (format == AudioSystem::PCM_16_BIT ? 2 : 1); - if (AudioSystem::isLinearPCM(format)) { - frameSizeInBytes = channelCount * (format == AudioSystem::PCM_16_BIT ? sizeof(int16_t) : sizeof(int8_t)); - } else { - frameSizeInBytes = sizeof(int8_t); + int minFrameCount = 0; + status_t status = getMinFrameCount(&minFrameCount, sampleRate, format, channelCount); + if (status != NO_ERROR) { + return status; } - - - // We use 2* size of input buffer for ping pong use of record buffer. - int minFrameCount = 2 * inputBuffSizeInBytes / frameSizeInBytes; LOGV("AudioRecord::set() minFrameCount = %d", minFrameCount); if (frameCount == 0) { @@ -170,9 +184,8 @@ status_t AudioRecord::set( mSessionId = sessionId; // create the IAudioRecord - status_t status = openRecord(sampleRate, format, channelCount, - frameCount, flags, input); - + status = openRecord(sampleRate, format, channelCount, + frameCount, flags, input); if (status != NO_ERROR) { return status; } diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 2118f8f350c2..0f2093a176dd 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -92,7 +92,8 @@ AudioTrack::AudioTrack( : mStatus(NO_INIT) { mStatus = set(streamType, sampleRate, format, channels, - frameCount, flags, cbf, user, notificationFrames, 0); + frameCount, flags, cbf, user, notificationFrames, + 0, false, sessionId); } AudioTrack::AudioTrack( @@ -109,7 +110,8 @@ AudioTrack::AudioTrack( : mStatus(NO_INIT) { mStatus = set(streamType, sampleRate, format, channels, - 0, flags, cbf, user, notificationFrames, sharedBuffer); + 0, flags, cbf, user, notificationFrames, + sharedBuffer, false, sessionId); } AudioTrack::~AudioTrack() diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index 71c5f862cdad..1ae222ea4638 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -58,7 +58,7 @@ public: virtual sp<IMediaPlayer> create( pid_t pid, const sp<IMediaPlayerClient>& client, - const char* url, const KeyedVector<String8, String8> *headers) { + const char* url, const KeyedVector<String8, String8> *headers, int audioSessionId) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); data.writeInt32(pid); @@ -75,8 +75,10 @@ public: data.writeString8(headers->valueAt(i)); } } + data.writeInt32(audioSessionId); remote()->transact(CREATE_URL, data, &reply); + return interface_cast<IMediaPlayer>(reply.readStrongBinder()); } @@ -89,7 +91,8 @@ public: return interface_cast<IMediaRecorder>(reply.readStrongBinder()); } - virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length) + virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, + int64_t offset, int64_t length, int audioSessionId) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); @@ -98,8 +101,11 @@ public: data.writeFileDescriptor(fd); data.writeInt64(offset); data.writeInt64(length); + data.writeInt32(audioSessionId); + remote()->transact(CREATE_FD, data, &reply); - return interface_cast<IMediaPlayer>(reply.readStrongBinder()); + + return interface_cast<IMediaPlayer>(reply.readStrongBinder());; } virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) @@ -166,9 +172,10 @@ status_t BnMediaPlayerService::onTransact( String8 value = data.readString8(); headers.add(key, value); } + int audioSessionId = data.readInt32(); sp<IMediaPlayer> player = create( - pid, client, url, numHeaders > 0 ? &headers : NULL); + pid, client, url, numHeaders > 0 ? &headers : NULL, audioSessionId); reply->writeStrongBinder(player->asBinder()); return NO_ERROR; @@ -180,7 +187,9 @@ status_t BnMediaPlayerService::onTransact( int fd = dup(data.readFileDescriptor()); int64_t offset = data.readInt64(); int64_t length = data.readInt64(); - sp<IMediaPlayer> player = create(pid, client, fd, offset, length); + int audioSessionId = data.readInt32(); + + sp<IMediaPlayer> player = create(pid, client, fd, offset, length, audioSessionId); reply->writeStrongBinder(player->asBinder()); return NO_ERROR; } break; diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index c6bbbcc9cf2d..d5a3c133e816 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -55,6 +55,7 @@ MediaPlayer::MediaPlayer() mLeftVolume = mRightVolume = 1.0; mVideoWidth = mVideoHeight = 0; mLockThreadId = 0; + mAudioSessionId = AudioSystem::newAudioSessionId(); } MediaPlayer::~MediaPlayer() @@ -137,7 +138,7 @@ status_t MediaPlayer::setDataSource( const sp<IMediaPlayerService>& service(getMediaPlayerService()); if (service != 0) { sp<IMediaPlayer> player( - service->create(getpid(), this, url, headers)); + service->create(getpid(), this, url, headers, mAudioSessionId)); err = setDataSource(player); } } @@ -150,7 +151,7 @@ status_t MediaPlayer::setDataSource(int fd, int64_t offset, int64_t length) status_t err = UNKNOWN_ERROR; const sp<IMediaPlayerService>& service(getMediaPlayerService()); if (service != 0) { - sp<IMediaPlayer> player(service->create(getpid(), this, fd, offset, length)); + sp<IMediaPlayer> player(service->create(getpid(), this, fd, offset, length, mAudioSessionId)); err = setDataSource(player); } return err; @@ -501,6 +502,27 @@ status_t MediaPlayer::setVolume(float leftVolume, float rightVolume) return OK; } +status_t MediaPlayer::setAudioSessionId(int sessionId) +{ + LOGV("MediaPlayer::setAudioSessionId(%d)", sessionId); + Mutex::Autolock _l(mLock); + if (!(mCurrentState & MEDIA_PLAYER_IDLE)) { + LOGE("setAudioSessionId called in state %d", mCurrentState); + return INVALID_OPERATION; + } + if (sessionId < 0) { + return BAD_VALUE; + } + mAudioSessionId = sessionId; + return NO_ERROR; +} + +int MediaPlayer::getAudioSessionId() +{ + Mutex::Autolock _l(mLock); + return mAudioSessionId; +} + void MediaPlayer::notify(int msg, int ext1, int ext2) { LOGV("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2); diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index d7ca6356300d..d45c17b1e5ad 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -252,11 +252,12 @@ sp<IMediaMetadataRetriever> MediaPlayerService::createMetadataRetriever(pid_t pi sp<IMediaPlayer> MediaPlayerService::create( pid_t pid, const sp<IMediaPlayerClient>& client, const char* url, - const KeyedVector<String8, String8> *headers) + const KeyedVector<String8, String8> *headers, int audioSessionId) { int32_t connId = android_atomic_inc(&mNextConnId); - sp<Client> c = new Client(this, pid, connId, client); - LOGV("Create new client(%d) from pid %d, url=%s, connId=%d", connId, pid, url, connId); + sp<Client> c = new Client(this, pid, connId, client, audioSessionId); + LOGV("Create new client(%d) from pid %d, url=%s, connId=%d, audioSessionId=%d", + connId, pid, url, connId, audioSessionId); if (NO_ERROR != c->setDataSource(url, headers)) { c.clear(); @@ -269,12 +270,12 @@ sp<IMediaPlayer> MediaPlayerService::create( } sp<IMediaPlayer> MediaPlayerService::create(pid_t pid, const sp<IMediaPlayerClient>& client, - int fd, int64_t offset, int64_t length) + int fd, int64_t offset, int64_t length, int audioSessionId) { int32_t connId = android_atomic_inc(&mNextConnId); - sp<Client> c = new Client(this, pid, connId, client); - LOGV("Create new client(%d) from pid %d, fd=%d, offset=%lld, length=%lld", - connId, pid, fd, offset, length); + sp<Client> c = new Client(this, pid, connId, client, audioSessionId); + LOGV("Create new client(%d) from pid %d, fd=%d, offset=%lld, length=%lld, audioSessionId=%d", + connId, pid, fd, offset, length, audioSessionId); if (NO_ERROR != c->setDataSource(fd, offset, length)) { c.clear(); } else { @@ -609,7 +610,7 @@ void MediaPlayerService::removeClient(wp<Client> client) } MediaPlayerService::Client::Client(const sp<MediaPlayerService>& service, pid_t pid, - int32_t connId, const sp<IMediaPlayerClient>& client) + int32_t connId, const sp<IMediaPlayerClient>& client, int audioSessionId) { LOGV("Client(%d) constructor", connId); mPid = pid; @@ -618,6 +619,8 @@ MediaPlayerService::Client::Client(const sp<MediaPlayerService>& service, pid_t mClient = client; mLoop = false; mStatus = NO_INIT; + mAudioSessionId = audioSessionId; + #if CALLBACK_ANTAGONIZER LOGD("create Antagonizer"); mAntagonizer = new Antagonizer(notify, this); @@ -871,7 +874,7 @@ status_t MediaPlayerService::Client::setDataSource( if (p == NULL) return NO_INIT; if (!p->hardwareOutput()) { - mAudioOutput = new AudioOutput(); + mAudioOutput = new AudioOutput(mAudioSessionId); static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput); } @@ -921,7 +924,7 @@ status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64 if (p == NULL) return NO_INIT; if (!p->hardwareOutput()) { - mAudioOutput = new AudioOutput(); + mAudioOutput = new AudioOutput(mAudioSessionId); static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput); } @@ -1412,9 +1415,11 @@ sp<IMemory> MediaPlayerService::snoop() #undef LOG_TAG #define LOG_TAG "AudioSink" -MediaPlayerService::AudioOutput::AudioOutput() +MediaPlayerService::AudioOutput::AudioOutput(int sessionId) : mCallback(NULL), - mCallbackCookie(NULL) { + mCallbackCookie(NULL), + mSessionId(sessionId) { + LOGV("AudioOutput(%d)", sessionId); mTrack = 0; mStreamType = AudioSystem::MUSIC; mLeftVolume = 1.0; @@ -1504,7 +1509,7 @@ status_t MediaPlayerService::AudioOutput::open( bufferCount = mMinBufferCount; } - LOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount); + LOGV("open(%u, %d, %d, %d, %d)", sampleRate, channelCount, format, bufferCount,mSessionId); if (mTrack) close(); int afSampleRate; int afFrameCount; @@ -1529,14 +1534,21 @@ status_t MediaPlayerService::AudioOutput::open( frameCount, 0 /* flags */, CallbackWrapper, - this); + this, + 0, + mSessionId); } else { t = new AudioTrack( mStreamType, sampleRate, format, (channelCount == 2) ? AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO, - frameCount); + frameCount, + 0, + NULL, + NULL, + 0, + mSessionId); } if ((t == 0) || (t->initCheck() != NO_ERROR)) { diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index 2408c621b9b5..60b91c6907d0 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -65,7 +65,7 @@ class MediaPlayerService : public BnMediaPlayerService class AudioOutput : public MediaPlayerBase::AudioSink { public: - AudioOutput(); + AudioOutput(int sessionId); virtual ~AudioOutput(); virtual bool ready() const { return mTrack != NULL; } @@ -108,6 +108,7 @@ class MediaPlayerService : public BnMediaPlayerService float mRightVolume; float mMsecsPerFrame; uint32_t mLatency; + int mSessionId; static bool mIsOnEmulator; static int mMinBufferCount; // 12 for emulator; otherwise 4 @@ -185,9 +186,9 @@ public: // House keeping for media player clients virtual sp<IMediaPlayer> create( pid_t pid, const sp<IMediaPlayerClient>& client, const char* url, - const KeyedVector<String8, String8> *headers); + const KeyedVector<String8, String8> *headers, int audioSessionId); - virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length); + virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length, int audioSessionId); virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat); virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat); virtual sp<IMemory> snoop(); @@ -237,12 +238,15 @@ private: pid_t pid() const { return mPid; } virtual status_t dump(int fd, const Vector<String16>& args) const; + int getAudioSessionId() { return mAudioSessionId; } + private: friend class MediaPlayerService; Client( const sp<MediaPlayerService>& service, pid_t pid, int32_t connId, - const sp<IMediaPlayerClient>& client); + const sp<IMediaPlayerClient>& client, + int audioSessionId); Client(); virtual ~Client(); @@ -271,6 +275,7 @@ private: status_t mStatus; bool mLoop; int32_t mConnId; + int mAudioSessionId; // Metadata filters. media::Metadata::Filter mMetadataAllow; // protected by mLock |