summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--api/current.xml13
-rw-r--r--core/java/android/hardware/Camera.java66
-rw-r--r--core/java/android/nfc/NdefRecord.java14
-rw-r--r--core/java/android/os/StrictMode.java20
-rw-r--r--core/java/android/webkit/HTML5VideoViewProxy.java22
-rw-r--r--core/jni/android_nfc_NdefRecord.cpp84
-rw-r--r--include/camera/CameraHardwareInterface.h127
-rw-r--r--media/libstagefright/AwesomePlayer.cpp53
-rw-r--r--media/libstagefright/include/AwesomePlayer.h2
-rw-r--r--services/java/com/android/server/PackageManagerService.java25
-rw-r--r--voip/java/com/android/server/sip/SipSessionGroup.java3
11 files changed, 326 insertions, 103 deletions
diff --git a/api/current.xml b/api/current.xml
index 4e3dfc6c7815..29de36dca6f4 100644
--- a/api/current.xml
+++ b/api/current.xml
@@ -112097,6 +112097,8 @@
>
<parameter name="data" type="byte[]">
</parameter>
+<exception name="FormatException" type="android.nfc.FormatException">
+</exception>
</constructor>
<method name="describeContents"
return="int"
@@ -140669,6 +140671,17 @@
visibility="public"
>
</method>
+<method name="enableDefaults"
+ return="void"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="true"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+</method>
<method name="getThreadPolicy"
return="android.os.StrictMode.ThreadPolicy"
abstract="false"
diff --git a/core/java/android/hardware/Camera.java b/core/java/android/hardware/Camera.java
index a27ba84c8a5d..6ff5a408bc98 100644
--- a/core/java/android/hardware/Camera.java
+++ b/core/java/android/hardware/Camera.java
@@ -175,13 +175,18 @@ public class Camera {
* camera image needs to be rotated clockwise so it shows correctly on
* the display in its natural orientation. It should be 0, 90, 180, or 270.
*
- * For example, suppose a device has a naturally tall screen, but the camera
- * sensor is mounted in landscape. If the top side of the camera sensor is
- * aligned with the right edge of the display in natural orientation, the
- * value should be 90.
+ * For example, suppose a device has a naturally tall screen. The
+ * back-facing camera sensor is mounted in landscape. You are looking at
+ * the screen. If the top side of the camera sensor is aligned with the
+ * right edge of the screen in natural orientation, the value should be
+ * 90. If the top side of a front-facing camera sensor is aligned with
+ * the right of the screen, the value should be 270.
*
* @see #setDisplayOrientation(int)
* @see #setRotation(int)
+ * @see #setPreviewSize(int, int)
+ * @see #setPictureSize(int, int)
+ * @see #setJpegThumbnailSize(int, int)
*/
public int orientation;
};
@@ -771,13 +776,16 @@ public class Camera {
public native final void stopSmoothZoom();
/**
- * Set the display orientation. This affects the preview frames and the
- * picture displayed after snapshot. This method is useful for portrait
- * mode applications.
+ * Set the clockwise rotation of preview display in degrees. This affects
+ * the preview frames and the picture displayed after snapshot. This method
+ * is useful for portrait mode applications. Note that preview display of
+ * front-facing cameras is flipped horizontally, that is, the image is
+ * reflected along the central vertical axis of the camera sensor. So the
+ * users can see themselves as looking into a mirror.
*
- * This does not affect the order of byte array passed in
- * {@link PreviewCallback#onPreviewFrame}. This method is not allowed to
- * be called during preview.
+ * This does not affect the order of byte array passed in {@link
+ * PreviewCallback#onPreviewFrame}, JPEG pictures, or recorded videos. This
+ * method is not allowed to be called during preview.
*
* If you want to make the camera image show in the same orientation as
* the display, you can use the following code.<p>
@@ -797,13 +805,20 @@ public class Camera {
* case Surface.ROTATION_270: degrees = 270; break;
* }
*
- * int result = (info.orientation - degrees + 360) % 360;
+ * int result;
+ * if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+ * result = (info.orientation + degrees) % 360;
+ * result = (360 - result) % 360; // compensate the mirror
+ * } else { // back-facing
+ * result = (info.orientation - degrees + 360) % 360;
+ * }
* camera.setDisplayOrientation(result);
* }
* </pre>
* @param degrees the angle that the picture will be rotated clockwise.
* Valid values are 0, 90, 180, and 270. The starting
* position is 0 (landscape).
+ * @see #setPreviewDisplay(SurfaceHolder)
*/
public native final void setDisplayOrientation(int degrees);
@@ -1749,20 +1764,23 @@ public class Camera {
* the orientation in the EXIF header will be missing or 1 (row #0 is
* top and column #0 is left side).
*
- * If appplications want to rotate the picture to match the
- * orientation of what users see, apps should use {@link
+ * If applications want to rotate the picture to match the orientation
+ * of what users see, apps should use {@link
* android.view.OrientationEventListener} and {@link CameraInfo}.
* The value from OrientationEventListener is relative to the natural
- * orientation of the device. CameraInfo.mOrientation is the angle
- * between camera orientation and natural device orientation. The sum
- * of the two is the angle for rotation.
+ * orientation of the device. CameraInfo.orientation is the angle
+ * between camera orientation and natural device orientation. The sum or
+ * of the two is the rotation angle for back-facing camera. The
+ * difference of the two is the rotation angle for front-facing camera.
+ * Note that the JPEG pictures of front-facing cameras are not mirrored
+ * as in preview display.
*
* For example, suppose the natural orientation of the device is
* portrait. The device is rotated 270 degrees clockwise, so the device
- * orientation is 270. Suppose the camera sensor is mounted in landscape
- * and the top side of the camera sensor is aligned with the right edge
- * of the display in natural orientation. So the camera orientation is
- * 90. The rotation should be set to 0 (270 + 90).
+ * orientation is 270. Suppose a back-facing camera sensor is mounted in
+ * landscape and the top side of the camera sensor is aligned with the
+ * right edge of the display in natural orientation. So the camera
+ * orientation is 90. The rotation should be set to 0 (270 + 90).
*
* The reference code is as follows.
*
@@ -1772,7 +1790,13 @@ public class Camera {
* new android.hardware.Camera.CameraInfo();
* android.hardware.Camera.getCameraInfo(cameraId, info);
* orientation = (orientation + 45) / 90 * 90;
- * mParameters.setRotation((orientation + info.mOrientation) % 360);
+ * int rotation = 0;
+ * if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
+ * rotation = (info.orientation - orientation + 360) % 360;
+ * } else { // back-facing camera
+ * rotation = (info.orientation + orientation) % 360;
+ * }
+ * mParameters.setRotation(rotation);
* }
*
* @param rotation The rotation angle in degrees relative to the
diff --git a/core/java/android/nfc/NdefRecord.java b/core/java/android/nfc/NdefRecord.java
index 23fd2ca9b8b9..edc5ab921f1d 100644
--- a/core/java/android/nfc/NdefRecord.java
+++ b/core/java/android/nfc/NdefRecord.java
@@ -200,8 +200,17 @@ public class NdefRecord implements Parcelable {
*
* @throws FormatException if the data is not a valid NDEF record
*/
- public NdefRecord(byte[] data) {
- throw new UnsupportedOperationException();
+ public NdefRecord(byte[] data) throws FormatException {
+ /* Prevent compiler to complain about unassigned final fields */
+ mFlags = 0;
+ mTnf = 0;
+ mType = null;
+ mId = null;
+ mPayload = null;
+ /* Perform actual parsing */
+ if (parseNdefRecord(data) == -1) {
+ throw new FormatException("Error while parsing NDEF record");
+ }
}
/**
@@ -280,5 +289,6 @@ public class NdefRecord implements Parcelable {
}
};
+ private native int parseNdefRecord(byte[] data);
private native byte[] generate(short flags, short tnf, byte[] type, byte[] id, byte[] data);
} \ No newline at end of file
diff --git a/core/java/android/os/StrictMode.java b/core/java/android/os/StrictMode.java
index de5b7b9c5f89..e9120891b73d 100644
--- a/core/java/android/os/StrictMode.java
+++ b/core/java/android/os/StrictMode.java
@@ -66,7 +66,7 @@ import java.util.HashMap;
* .penaltyLog()
* .build());
* StrictMode.setVmPolicy(new {@link VmPolicy.Builder StrictMode.VmPolicy.Builder}()
- * .detectLeakedSqlLiteCursors()
+ * .detectLeakedSqlLiteObjects()
* .penaltyLog()
* .penaltyDeath()
* .build());
@@ -962,6 +962,24 @@ public final class StrictMode {
}
/**
+ * Enable the recommended StrictMode defaults, with violations just being logged.
+ *
+ * <p>This catches disk and network access on the main thread, as
+ * well as leaked SQLite cursors. This is simply a wrapper around
+ * {@link #setVmPolicy} and {@link #setThreadPolicy}.
+ */
+ public static void enableDefaults() {
+ StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder()
+ .detectAll()
+ .penaltyLog()
+ .build());
+ StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder()
+ .detectLeakedSqlLiteObjects()
+ .penaltyLog()
+ .build());
+ }
+
+ /**
* @hide
*/
public static boolean vmSqliteObjectLeaksEnabled() {
diff --git a/core/java/android/webkit/HTML5VideoViewProxy.java b/core/java/android/webkit/HTML5VideoViewProxy.java
index eda2e72af376..6769563d4392 100644
--- a/core/java/android/webkit/HTML5VideoViewProxy.java
+++ b/core/java/android/webkit/HTML5VideoViewProxy.java
@@ -72,6 +72,7 @@ class HTML5VideoViewProxy extends Handler
private static final int PREPARED = 200;
private static final int ENDED = 201;
private static final int POSTER_FETCHED = 202;
+ private static final int PAUSED = 203;
private static final String COOKIE = "Cookie";
@@ -118,6 +119,7 @@ class HTML5VideoViewProxy extends Handler
}
// The spec says the timer should fire every 250 ms or less.
private static final int TIMEUPDATE_PERIOD = 250; // ms
+ static boolean isVideoSelfEnded = false;
private static final WebChromeClient.CustomViewCallback mCallback =
new WebChromeClient.CustomViewCallback() {
@@ -132,7 +134,11 @@ class HTML5VideoViewProxy extends Handler
if (mVideoView.isPlaying()) {
mVideoView.stopPlayback();
}
- mCurrentProxy.dispatchOnEnded();
+ if (isVideoSelfEnded)
+ mCurrentProxy.dispatchOnEnded();
+ else
+ mCurrentProxy.dispatchOnPaused();
+ isVideoSelfEnded = false;
mCurrentProxy = null;
mLayout.removeView(mVideoView);
mVideoView = null;
@@ -249,7 +255,8 @@ class HTML5VideoViewProxy extends Handler
// The video ended by itself, so we need to
// send a message to the UI thread to dismiss
// the video view and to return to the WebView.
- sendMessage(obtainMessage(ENDED));
+ // arg1 == 1 means the video ends by itself.
+ sendMessage(obtainMessage(ENDED, 1, 0));
}
// MediaPlayer.OnErrorListener
@@ -263,6 +270,11 @@ class HTML5VideoViewProxy extends Handler
mWebCoreHandler.sendMessage(msg);
}
+ public void dispatchOnPaused() {
+ Message msg = Message.obtain(mWebCoreHandler, PAUSED);
+ mWebCoreHandler.sendMessage(msg);
+ }
+
public void onTimeupdate() {
sendMessage(obtainMessage(TIMEUPDATE));
}
@@ -291,6 +303,8 @@ class HTML5VideoViewProxy extends Handler
break;
}
case ENDED:
+ if (msg.arg1 == 1)
+ VideoPlayer.isVideoSelfEnded = true;
case ERROR: {
WebChromeClient client = mWebView.getWebChromeClient();
if (client != null) {
@@ -476,6 +490,9 @@ class HTML5VideoViewProxy extends Handler
case ENDED:
nativeOnEnded(mNativePointer);
break;
+ case PAUSED:
+ nativeOnPaused(mNativePointer);
+ break;
case POSTER_FETCHED:
Bitmap poster = (Bitmap) msg.obj;
nativeOnPosterFetched(poster, mNativePointer);
@@ -584,6 +601,7 @@ class HTML5VideoViewProxy extends Handler
private native void nativeOnPrepared(int duration, int width, int height, int nativePointer);
private native void nativeOnEnded(int nativePointer);
+ private native void nativeOnPaused(int nativePointer);
private native void nativeOnPosterFetched(Bitmap poster, int nativePointer);
private native void nativeOnTimeupdate(int position, int nativePointer);
}
diff --git a/core/jni/android_nfc_NdefRecord.cpp b/core/jni/android_nfc_NdefRecord.cpp
index 8ce1837fbf08..9d20d6df714f 100644
--- a/core/jni/android_nfc_NdefRecord.cpp
+++ b/core/jni/android_nfc_NdefRecord.cpp
@@ -80,8 +80,92 @@ end:
return result;
}
+static jint android_nfc_NdefRecord_parseNdefRecord(JNIEnv *e, jobject o,
+ jbyteArray array)
+{
+ uint16_t status;
+ jbyte *raw_record;
+ jsize raw_record_size;
+ jint ret = -1;
+ phFriNfc_NdefRecord_t record;
+
+ jfieldID mType, mId, mPayload, mTnf;
+ jbyteArray type = NULL;
+ jbyteArray id = NULL;
+ jbyteArray payload = NULL;
+
+ jclass record_cls = e->GetObjectClass(o);
+
+ raw_record_size = e->GetArrayLength(array);
+ raw_record = e->GetByteArrayElements(array, NULL);
+ if (raw_record == NULL) {
+ goto clean_and_return;
+ }
+
+ LOGD("phFriNfc_NdefRecord_Parse()");
+ status = phFriNfc_NdefRecord_Parse(&record, (uint8_t *)raw_record);
+ if (status) {
+ LOGE("phFriNfc_NdefRecord_Parse() returned 0x%04x", status);
+ goto clean_and_return;
+ }
+ LOGD("phFriNfc_NdefRecord_Parse() returned 0x%04x", status);
+
+ /* Set TNF field */
+ mTnf = e->GetFieldID(record_cls, "mTnf", "S");
+ e->SetShortField(o, mTnf, record.Tnf);
+
+ /* Set type field */
+ mType = e->GetFieldID(record_cls, "mType", "[B");
+ type = e->NewByteArray(record.TypeLength);
+ if (type == NULL) {
+ goto clean_and_return;
+ }
+ e->SetByteArrayRegion(type, 0, record.TypeLength,
+ (jbyte *)record.Type);
+ e->SetObjectField(o, mType, type);
+
+ /* Set id field */
+ mId = e->GetFieldID(record_cls, "mId", "[B");
+ id = e->NewByteArray(record.IdLength);
+ if (id == NULL) {
+ goto clean_and_return;
+ }
+ e->SetByteArrayRegion(id, 0, record.IdLength,
+ (jbyte *)record.Id);
+ e->SetObjectField(o, mId, id);
+
+ /* Set payload field */
+ mPayload = e->GetFieldID(record_cls, "mPayload", "[B");
+ payload = e->NewByteArray(record.PayloadLength);
+ if (payload == NULL) {
+ goto clean_and_return;
+ }
+ e->SetByteArrayRegion(payload, 0, record.PayloadLength,
+ (jbyte *)record.PayloadData);
+ e->SetObjectField(o, mPayload, payload);
+
+ ret = 0;
+
+clean_and_return:
+ if (type != NULL) {
+ e->DeleteLocalRef(type);
+ }
+ if (id != NULL) {
+ e->DeleteLocalRef(id);
+ }
+ if (payload != NULL) {
+ e->DeleteLocalRef(payload);
+ }
+ if (raw_record != NULL) {
+ e->ReleaseByteArrayElements(array, raw_record, JNI_ABORT);
+ }
+
+ return ret;
+}
+
static JNINativeMethod gMethods[] = {
{"generate", "(SS[B[B[B)[B", (void *)android_nfc_NdefRecord_generate},
+ {"parseNdefRecord", "([B)I", (void *)android_nfc_NdefRecord_parseNdefRecord},
};
int register_android_nfc_NdefRecord(JNIEnv *e)
diff --git a/include/camera/CameraHardwareInterface.h b/include/camera/CameraHardwareInterface.h
index 561a46ddfd92..5465441c2008 100644
--- a/include/camera/CameraHardwareInterface.h
+++ b/include/camera/CameraHardwareInterface.h
@@ -49,17 +49,6 @@ typedef void (*data_callback)(int32_t msgType,
const sp<IMemory>& dataPtr,
void* user);
-#ifdef USE_GRAPHIC_VIDEO_BUFFERS
-/**
- * Replace data_callback_timestamp. Once we are done, this
- * should be renamed as data_callback_timestamp, and the existing
- * data_callback_timestamp should be deleted.
- */
-typedef void (*videobuffer_callback_timestamp)(nsecs_t timestamp,
- int32_t msgType,
- const sp<android_native_buffer_t>& buf,
- void* user);
-#endif
typedef void (*data_callback_timestamp)(nsecs_t timestamp,
int32_t msgType,
const sp<IMemory>& dataPtr,
@@ -100,46 +89,6 @@ class CameraHardwareInterface : public virtual RefBase {
public:
virtual ~CameraHardwareInterface() { }
-#ifdef USE_GRAPHIC_VIDEO_BUFFERS
- /**
- * Replace existing setCallbacks() method. Once we are done, the
- * videobuffer_callback_timestamp parameter will be renamed to
- * data_callback_timestamp, but its signature will be the same
- * as videobuffer_callback_timestamp, which will be renamed
- * to data_callback_timestamp and the exiting data_callback_timestamp
- * will be deleted.
- */
- virtual void setCallbacks(notify_callback notify_cb,
- data_callback data_cb,
- videobuffer_callback_timestamp data_cb_timestamp,
- void* user) = 0;
-
- /**
- * Replace releaseRecordingFrame(). releaseRecordingFrame() should be
- * changed so that it has the same signature of releaseVideoBuffer(),
- * once we are done, and releaseVideoBuffer() will be deleted.
- */
- virtual void releaseVideoBuffer(const sp<android_native_buffer_t>& buf) = 0;
-
- /**
- * This method should be called after startRecording().
- *
- * @param nBuffers the total number of video buffers allocated by the camera
- * hal
- * @param buffers an array allocated by the camera hal to hold the pointers
- * to the individual video buffers. The video buffers and the buffers array
- * should NOT be modified/released by camera hal until stopRecording() is
- * called and all outstanding video buffers previously sent out via
- * CAMERA_MSG_VIDEO_FRAME have been released via releaseVideoBuffer().
- * Camera hal client must not release the individual buffers and the buffers
- * array.
- * @return no error if OK.
- */
- virtual status_t getVideoBufferInfo(
- sp<android_native_buffer_t>** buffers,
- size_t *nBuffers) = 0;
-#endif
-
/** Set the ANativeWindow to which preview frames are sent */
virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) = 0;
@@ -197,6 +146,82 @@ public:
virtual bool previewEnabled() = 0;
/**
+ * Retrieve the total number of available buffers from camera hal for passing
+ * video frame data in a recording session. Must be called again if a new
+ * recording session is started.
+ *
+ * This method should be called after startRecording(), since
+ * the some camera hal may choose to allocate the video buffers only after
+ * recording is started.
+ *
+ * Some camera hal may not implement this method, and 0 can be returned to
+ * indicate that this feature is not available.
+ *
+ * @return the number of video buffers that camera hal makes available.
+ * Zero (0) is returned to indicate that camera hal does not support
+ * this feature.
+ */
+ virtual int32_t getNumberOfVideoBuffers() const { return 0; }
+
+ /**
+ * Retrieve the video buffer corresponding to the given index in a
+ * recording session. Must be called again if a new recording session
+ * is started.
+ *
+ * It allows a client to retrieve all video buffers that camera hal makes
+ * available to passing video frame data by calling this method with all
+ * valid index values. The valid index value ranges from 0 to n, where
+ * n = getNumberOfVideoBuffers() - 1. With an index outside of the valid
+ * range, 0 must be returned. This method should be called after
+ * startRecording().
+ *
+ * The video buffers should NOT be modified/released by camera hal
+ * until stopRecording() is called and all outstanding video buffers
+ * previously sent out via CAMERA_MSG_VIDEO_FRAME have been released
+ * via releaseVideoBuffer().
+ *
+ * @param index an index to retrieve the corresponding video buffer.
+ *
+ * @return the video buffer corresponding to the given index.
+ */
+ virtual sp<IMemory> getVideoBuffer(int32_t index) const { return 0; }
+
+ /**
+ * Request the camera hal to store meta data or real YUV data in
+ * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
+ * recording session. If it is not called, the default camera
+ * hal behavior is to store real YUV data in the video buffers.
+ *
+ * This method should be called before startRecording() in order
+ * to be effective.
+ *
+ * If meta data is stored in the video buffers, it is up to the
+ * receiver of the video buffers to interpret the contents and
+ * to find the actual frame data with the help of the meta data
+ * in the buffer. How this is done is outside of the scope of
+ * this method.
+ *
+ * Some camera hal may not support storing meta data in the video
+ * buffers, but all camera hal should support storing real YUV data
+ * in the video buffers. If the camera hal does not support storing
+ * the meta data in the video buffers when it is requested to do
+ * do, INVALID_OPERATION must be returned. It is very useful for
+ * the camera hal to pass meta data rather than the actual frame
+ * data directly to the video encoder, since the amount of the
+ * uncompressed frame data can be very large if video size is large.
+ *
+ * @param enable if true to instruct the camera hal to store
+ * meta data in the video buffers; false to instruct
+ * the camera hal to store real YUV data in the video
+ * buffers.
+ *
+ * @return OK on success.
+ */
+ virtual status_t storeMetaDataInBuffers(bool enable) {
+ return enable? INVALID_OPERATION: OK;
+ }
+
+ /**
* Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
* message is sent with the corresponding frame. Every record frame must be released
* by calling releaseRecordingFrame().
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index a0b7f702d8e9..ad6cb5b8ff74 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -1079,6 +1079,32 @@ status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {
return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
}
+void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
+ if (!mSeeking) {
+ return;
+ }
+
+ if (mAudioPlayer != NULL) {
+ LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6);
+
+ // If we don't have a video time, seek audio to the originally
+ // requested seek time instead.
+
+ mAudioPlayer->seekTo(videoTimeUs < 0 ? mSeekTimeUs : videoTimeUs);
+ mAudioPlayer->resume();
+ mWatchForAudioSeekComplete = true;
+ mWatchForAudioEOS = true;
+ } else if (!mSeekNotificationSent) {
+ // If we're playing video only, report seek complete now,
+ // otherwise audio player will notify us later.
+ notifyListener_l(MEDIA_SEEK_COMPLETE);
+ }
+
+ mFlags |= FIRST_FRAME;
+ mSeeking = false;
+ mSeekNotificationSent = false;
+}
+
void AwesomePlayer::onVideoEvent() {
Mutex::Autolock autoLock(mLock);
if (!mVideoEventPending) {
@@ -1142,6 +1168,14 @@ void AwesomePlayer::onVideoEvent() {
continue;
}
+ // So video playback is complete, but we may still have
+ // a seek request pending that needs to be applied
+ // to the audio track.
+ if (mSeeking) {
+ LOGV("video stream ended while seeking!");
+ }
+ finishSeekIfNecessary(-1);
+
mFlags |= VIDEO_AT_EOS;
postStreamDoneEvent_l(err);
return;
@@ -1168,24 +1202,7 @@ void AwesomePlayer::onVideoEvent() {
mVideoTimeUs = timeUs;
}
- if (mSeeking) {
- if (mAudioPlayer != NULL) {
- LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6);
-
- mAudioPlayer->seekTo(timeUs);
- mAudioPlayer->resume();
- mWatchForAudioSeekComplete = true;
- mWatchForAudioEOS = true;
- } else if (!mSeekNotificationSent) {
- // If we're playing video only, report seek complete now,
- // otherwise audio player will notify us later.
- notifyListener_l(MEDIA_SEEK_COMPLETE);
- }
-
- mFlags |= FIRST_FRAME;
- mSeeking = false;
- mSeekNotificationSent = false;
- }
+ finishSeekIfNecessary(timeUs);
TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 5a1d7e71962a..0837be8dadbd 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -261,6 +261,8 @@ private:
bool getBitrate(int64_t *bitrate);
+ void finishSeekIfNecessary(int64_t videoTimeUs);
+
AwesomePlayer(const AwesomePlayer &);
AwesomePlayer &operator=(const AwesomePlayer &);
};
diff --git a/services/java/com/android/server/PackageManagerService.java b/services/java/com/android/server/PackageManagerService.java
index cdddaed3d056..666bb2671c06 100644
--- a/services/java/com/android/server/PackageManagerService.java
+++ b/services/java/com/android/server/PackageManagerService.java
@@ -3286,7 +3286,9 @@ class PackageManagerService extends IPackageManager.Stub {
* only for non-system apps and system app upgrades.
*/
if (pkg.applicationInfo.nativeLibraryDir != null) {
- final File sharedLibraryDir = new File(pkg.applicationInfo.nativeLibraryDir);
+ final File nativeLibraryDir = new File(pkg.applicationInfo.nativeLibraryDir);
+ final String dataPathString = dataPath.getPath();
+
if (isSystemApp(pkg) && !isUpdatedSystemApp(pkg)) {
/*
* Upgrading from a previous version of the OS sometimes
@@ -3295,15 +3297,24 @@ class PackageManagerService extends IPackageManager.Stub {
* Recent changes in the JNI library search path
* necessitates we remove those to match previous behavior.
*/
- if (NativeLibraryHelper.removeNativeBinariesFromDirLI(sharedLibraryDir)) {
+ if (NativeLibraryHelper.removeNativeBinariesFromDirLI(nativeLibraryDir)) {
Log.i(TAG, "removed obsolete native libraries for system package " + path);
}
- } else if (!isExternal(pkg)) {
- Log.i(TAG, path + " changed; unpacking");
- mInstaller.unlinkNativeLibraryDirectory(dataPath.getPath());
- NativeLibraryHelper.copyNativeBinariesLI(scanFile, sharedLibraryDir);
+ } else if (nativeLibraryDir.getParent().equals(dataPathString)) {
+ /*
+ * If this is an internal application or our
+ * nativeLibraryPath points to our data directory, unpack
+ * the libraries. The native library path pointing to the
+ * data directory for an application in an ASEC container
+ * can happen for older apps that existed before an OTA to
+ * Gingerbread.
+ */
+ Slog.i(TAG, "Unpacking native libraries for " + path);
+ mInstaller.unlinkNativeLibraryDirectory(dataPathString);
+ NativeLibraryHelper.copyNativeBinariesLI(scanFile, nativeLibraryDir);
} else {
- mInstaller.linkNativeLibraryDirectory(dataPath.getPath(),
+ Slog.i(TAG, "Linking native library dir for " + path);
+ mInstaller.linkNativeLibraryDirectory(dataPathString,
pkg.applicationInfo.nativeLibraryDir);
}
}
diff --git a/voip/java/com/android/server/sip/SipSessionGroup.java b/voip/java/com/android/server/sip/SipSessionGroup.java
index 578bd9b645c8..bb246a607b5f 100644
--- a/voip/java/com/android/server/sip/SipSessionGroup.java
+++ b/voip/java/com/android/server/sip/SipSessionGroup.java
@@ -134,7 +134,8 @@ class SipSessionGroup implements SipListener {
SipFactory sipFactory = SipFactory.getInstance();
Properties properties = new Properties();
properties.setProperty("javax.sip.STACK_NAME", getStackName());
- properties.setProperty("javax.sip.THREAD_POOL_SIZE", THREAD_POOL_SIZE);
+ properties.setProperty(
+ "gov.nist.javax.sip.THREAD_POOL_SIZE", THREAD_POOL_SIZE);
String outboundProxy = myself.getProxyAddress();
if (!TextUtils.isEmpty(outboundProxy)) {
Log.v(TAG, "outboundProxy is " + outboundProxy);