summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--api/current.xml13
-rw-r--r--core/java/android/net/InterfaceConfiguration.java24
-rw-r--r--core/java/android/provider/MediaStore.java1
-rw-r--r--core/java/android/server/BluetoothService.java14
-rw-r--r--core/java/android/view/GLES20Layer.java7
-rw-r--r--core/java/android/view/View.java66
-rw-r--r--core/java/android/webkit/WebView.java2
-rw-r--r--core/java/com/android/internal/statusbar/StatusBarNotification.java13
-rw-r--r--core/jni/android_media_AudioSystem.cpp6
-rw-r--r--core/res/res/drawable-hdpi/ic_audio_alarm.pngbin0 -> 1252 bytes
-rw-r--r--core/res/res/drawable-hdpi/ic_audio_alarm_mute.pngbin0 -> 1378 bytes
-rw-r--r--core/res/res/drawable-hdpi/ic_audio_notification.pngbin0 -> 1015 bytes
-rw-r--r--core/res/res/drawable-hdpi/ic_audio_notification_mute.pngbin0 -> 1184 bytes
-rw-r--r--core/res/res/drawable-hdpi/ic_audio_vol.pngbin0 -> 1144 bytes
-rw-r--r--core/res/res/drawable-hdpi/ic_audio_vol_mute.pngbin0 -> 1168 bytes
-rw-r--r--core/res/res/drawable-mdpi/ic_audio_alarm.pngbin0 -> 847 bytes
-rw-r--r--core/res/res/drawable-mdpi/ic_audio_alarm_mute.pngbin0 -> 896 bytes
-rw-r--r--core/res/res/drawable-mdpi/ic_audio_notification.pngbin0 -> 758 bytes
-rw-r--r--core/res/res/drawable-mdpi/ic_audio_notification_mute.pngbin0 -> 842 bytes
-rw-r--r--core/res/res/drawable-mdpi/ic_audio_vol.pngbin0 -> 841 bytes
-rw-r--r--core/res/res/drawable-mdpi/ic_audio_vol_mute.pngbin0 -> 809 bytes
-rw-r--r--core/res/res/drawable-xlarge-nodpi/default_wallpaper.jpgbin114414 -> 815422 bytes
-rwxr-xr-xdata/sounds/AudioPackage6.mk1
-rw-r--r--data/sounds/effects/ogg/Effect_Tick.oggbin4825 -> 4348 bytes
-rwxr-xr-xdata/sounds/ringtones/ogg/Iridium.oggbin0 -> 58226 bytes
-rw-r--r--docs/html/guide/topics/fundamentals/index.jd496
-rw-r--r--docs/html/guide/topics/fundamentals/processes-and-threads.jd425
-rw-r--r--docs/html/images/sdk_manager_packages.pngbin114564 -> 115486 bytes
-rw-r--r--docs/html/sdk/installing.jd7
-rw-r--r--include/media/AudioSystem.h5
-rw-r--r--include/media/IAudioFlinger.h3
-rw-r--r--include/media/IAudioPolicyService.h1
-rw-r--r--include/media/stagefright/MediaDefs.h1
-rw-r--r--libs/hwui/Caches.cpp8
-rw-r--r--libs/hwui/LayerCache.cpp25
-rw-r--r--libs/hwui/LayerCache.h11
-rw-r--r--libs/hwui/LayerRenderer.cpp128
-rw-r--r--libs/hwui/Properties.h6
-rw-r--r--media/java/android/media/AudioManager.java44
-rw-r--r--media/java/android/media/AudioService.java182
-rw-r--r--media/java/android/media/AudioSystem.java2
-rw-r--r--media/java/android/media/IAudioService.aidl6
-rw-r--r--media/java/android/media/MediaFile.java6
-rwxr-xr-xmedia/java/android/media/videoeditor/VideoEditorImpl.java456
-rw-r--r--media/java/android/mtp/MtpDatabase.java1
-rw-r--r--media/libmedia/AudioSystem.cpp17
-rw-r--r--media/libmedia/IAudioFlinger.cpp16
-rw-r--r--media/libmedia/IAudioPolicyService.cpp20
-rw-r--r--media/libstagefright/Android.mk3
-rw-r--r--media/libstagefright/AwesomePlayer.cpp13
-rw-r--r--media/libstagefright/DataSource.cpp2
-rw-r--r--media/libstagefright/FLACExtractor.cpp813
-rw-r--r--media/libstagefright/MPEG4Extractor.cpp14
-rw-r--r--media/libstagefright/MediaDefs.cpp1
-rw-r--r--media/libstagefright/MediaExtractor.cpp3
-rw-r--r--media/libstagefright/OMXCodec.cpp2
-rw-r--r--media/libstagefright/OggExtractor.cpp38
-rw-r--r--media/libstagefright/SampleIterator.cpp2
-rw-r--r--media/libstagefright/SampleTable.cpp73
-rw-r--r--media/libstagefright/StagefrightMediaScanner.cpp2
-rw-r--r--media/libstagefright/include/FLACExtractor.h64
-rw-r--r--media/libstagefright/include/OggExtractor.h3
-rw-r--r--media/libstagefright/include/SampleTable.h8
-rw-r--r--opengl/java/android/opengl/GLUtils.java3
-rw-r--r--packages/SettingsProvider/src/com/android/providers/settings/DatabaseHelper.java29
-rw-r--r--packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_default.pngbin6409 -> 6277 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_ime_default.pngbin2839 -> 2848 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/ic_sysbar_home_default.pngbin6519 -> 6531 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/ic_sysbar_menu_default.pngbin3240 -> 3154 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/ic_sysbar_recent_default.pngbin5093 -> 5163 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth.pngbin700 -> 944 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth_connected.pngbin826 -> 1448 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/stat_sys_signal_in.pngbin0 -> 1492 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/stat_sys_signal_out.pngbin0 -> 1488 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_in.pngbin0 -> 1579 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_inout.pngbin0 -> 1729 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_out.pngbin0 -> 1669 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_default.pngbin3900 -> 3763 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_ime_default.pngbin1914 -> 1914 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/ic_sysbar_home_default.pngbin3991 -> 3962 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/ic_sysbar_menu_default.pngbin2340 -> 2192 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/ic_sysbar_recent_default.pngbin3140 -> 3064 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth.pngbin484 -> 677 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth_connected.pngbin550 -> 988 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/stat_sys_signal_in.pngbin0 -> 1015 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/stat_sys_signal_inout.pngbin0 -> 1205 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/stat_sys_signal_out.pngbin0 -> 992 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_in.pngbin0 -> 1057 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_inout.pngbin0 -> 1141 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_out.pngbin0 -> 1068 bytes
-rw-r--r--packages/SystemUI/res/layout-xlarge/status_bar_notification_area.xml15
-rw-r--r--packages/SystemUI/res/layout-xlarge/status_bar_notification_panel_title.xml122
-rw-r--r--packages/SystemUI/res/values-xlarge/colors.xml2
-rw-r--r--packages/SystemUI/src/com/android/systemui/power/PowerUI.java2
-rw-r--r--packages/SystemUI/src/com/android/systemui/statusbar/NotificationData.java7
-rw-r--r--packages/SystemUI/src/com/android/systemui/statusbar/policy/BluetoothController.java79
-rw-r--r--packages/SystemUI/src/com/android/systemui/statusbar/policy/NetworkController.java57
-rw-r--r--packages/SystemUI/src/com/android/systemui/statusbar/tablet/TabletStatusBar.java20
-rw-r--r--services/audioflinger/AudioFlinger.cpp25
-rw-r--r--services/audioflinger/AudioFlinger.h4
-rw-r--r--services/audioflinger/AudioPolicyManagerBase.cpp27
-rw-r--r--services/audioflinger/AudioPolicyService.cpp9
-rw-r--r--services/audioflinger/AudioPolicyService.h5
-rw-r--r--services/java/com/android/server/NetworkManagementService.java24
-rw-r--r--services/java/com/android/server/ScreenRotationAnimation.java112
-rw-r--r--services/java/com/android/server/WindowManagerService.java16
-rw-r--r--services/java/com/android/server/connectivity/Tethering.java6
-rw-r--r--tests/HwAccelerationTest/AndroidManifest.xml11
-rw-r--r--tests/HwAccelerationTest/res/layout/view_layers_6.xml60
-rw-r--r--tests/HwAccelerationTest/src/com/android/test/hwui/ViewLayersActivity6.java131
-rw-r--r--wifi/java/android/net/wifi/WifiStateMachine.java7
111 files changed, 3196 insertions, 631 deletions
diff --git a/api/current.xml b/api/current.xml
index 8366600d7a50..a01a3a88a6d1 100644
--- a/api/current.xml
+++ b/api/current.xml
@@ -163248,6 +163248,19 @@
visibility="public"
>
</method>
+<method name="getVersion"
+ return="java.lang.String"
+ abstract="false"
+ native="false"
+ synchronized="false"
+ static="true"
+ final="false"
+ deprecated="not deprecated"
+ visibility="public"
+>
+<parameter name="context" type="android.content.Context">
+</parameter>
+</method>
<field name="ACTION_IMAGE_CAPTURE"
type="java.lang.String"
transient="false"
diff --git a/core/java/android/net/InterfaceConfiguration.java b/core/java/android/net/InterfaceConfiguration.java
index d1bbaa49abbd..89b5915b2544 100644
--- a/core/java/android/net/InterfaceConfiguration.java
+++ b/core/java/android/net/InterfaceConfiguration.java
@@ -28,8 +28,7 @@ import java.net.UnknownHostException;
*/
public class InterfaceConfiguration implements Parcelable {
public String hwAddr;
- public InetAddress addr;
- public InetAddress mask;
+ public LinkAddress addr;
public String interfaceFlags;
public InterfaceConfiguration() {
@@ -41,8 +40,6 @@ public class InterfaceConfiguration implements Parcelable {
str.append("ipddress ");
str.append((addr != null) ? addr.toString() : "NULL");
- str.append(" netmask ");
- str.append((mask != null) ? mask.toString() : "NULL");
str.append(" flags ").append(interfaceFlags);
str.append(" hwaddr ").append(hwAddr);
@@ -59,7 +56,7 @@ public class InterfaceConfiguration implements Parcelable {
public boolean isActive() {
try {
if(interfaceFlags.contains("up")) {
- for (byte b : addr.getAddress()) {
+ for (byte b : addr.getAddress().getAddress()) {
if (b != 0) return true;
}
}
@@ -79,13 +76,7 @@ public class InterfaceConfiguration implements Parcelable {
dest.writeString(hwAddr);
if (addr != null) {
dest.writeByte((byte)1);
- dest.writeByteArray(addr.getAddress());
- } else {
- dest.writeByte((byte)0);
- }
- if (mask != null) {
- dest.writeByte((byte)1);
- dest.writeByteArray(mask.getAddress());
+ dest.writeParcelable(addr, flags);
} else {
dest.writeByte((byte)0);
}
@@ -99,14 +90,7 @@ public class InterfaceConfiguration implements Parcelable {
InterfaceConfiguration info = new InterfaceConfiguration();
info.hwAddr = in.readString();
if (in.readByte() == 1) {
- try {
- info.addr = InetAddress.getByAddress(in.createByteArray());
- } catch (UnknownHostException e) {}
- }
- if (in.readByte() == 1) {
- try {
- info.mask = InetAddress.getByAddress(in.createByteArray());
- } catch (UnknownHostException e) {}
+ info.addr = in.readParcelable(null);
}
info.interfaceFlags = in.readString();
return info;
diff --git a/core/java/android/provider/MediaStore.java b/core/java/android/provider/MediaStore.java
index b3746da91c73..b59421e28858 100644
--- a/core/java/android/provider/MediaStore.java
+++ b/core/java/android/provider/MediaStore.java
@@ -2029,7 +2029,6 @@ public final class MediaStore {
* as needed. No other assumptions should be made about the meaning of the version.
* @param context Context to use for performing the query.
* @return A version string, or null if the version could not be determined.
- * @hide
*/
public static String getVersion(Context context) {
Cursor c = context.getContentResolver().query(
diff --git a/core/java/android/server/BluetoothService.java b/core/java/android/server/BluetoothService.java
index 78d47fbf9ad4..df5097e18350 100644
--- a/core/java/android/server/BluetoothService.java
+++ b/core/java/android/server/BluetoothService.java
@@ -47,6 +47,7 @@ import android.content.SharedPreferences;
import android.content.res.Resources.NotFoundException;
import android.net.ConnectivityManager;
import android.net.InterfaceConfiguration;
+import android.net.LinkAddress;
import android.os.Binder;
import android.os.Handler;
import android.os.IBinder;
@@ -1703,12 +1704,15 @@ public class BluetoothService extends IBluetooth.Stub {
try {
ifcg = service.getInterfaceConfig(iface);
if (ifcg != null) {
- ifcg.mask = InetAddress.getByName(BLUETOOTH_NETMASK);
-
- if (ifcg.addr == null || ifcg.addr.equals(InetAddress.getByName("0.0.0.0"))) {
- ifcg.addr = InetAddress.getByName(address);
- ifcg.interfaceFlags = ifcg.interfaceFlags.replace("down", "up");
+ InetAddress mask = InetAddress.getByName(BLUETOOTH_NETMASK);
+ InetAddress addr = null;
+ if (ifcg.addr == null || (addr = ifcg.addr.getAddress()) == null ||
+ addr.equals(InetAddress.getByName("0.0.0.0")) ||
+ addr.equals(InetAddress.getByName("::0"))) {
+ addr = InetAddress.getByName(address);
}
+ ifcg.interfaceFlags = ifcg.interfaceFlags.replace("down", "up");
+ ifcg.addr = new LinkAddress(addr, mask);
ifcg.interfaceFlags = ifcg.interfaceFlags.replace("running", "");
ifcg.interfaceFlags = ifcg.interfaceFlags.replace(" "," ");
service.setInterfaceConfig(iface, ifcg);
diff --git a/core/java/android/view/GLES20Layer.java b/core/java/android/view/GLES20Layer.java
index 02304304c78d..6000a4a53bff 100644
--- a/core/java/android/view/GLES20Layer.java
+++ b/core/java/android/view/GLES20Layer.java
@@ -66,10 +66,11 @@ class GLES20Layer extends HardwareLayer {
@Override
void resize(int width, int height) {
if (!isValid() || width <= 0 || height <= 0) return;
- if (width > mLayerWidth || height > mLayerHeight) {
- mWidth = width;
- mHeight = height;
+ mWidth = width;
+ mHeight = height;
+
+ if (width != mLayerWidth || height != mLayerHeight) {
int[] layerInfo = new int[2];
GLES20Canvas.nResizeLayer(mLayer, width, height, layerInfo);
diff --git a/core/java/android/view/View.java b/core/java/android/view/View.java
index 6abfe8154e25..cac345dbcaff 100644
--- a/core/java/android/view/View.java
+++ b/core/java/android/view/View.java
@@ -6450,6 +6450,53 @@ public class View implements Drawable.Callback, KeyEvent.Callback, Accessibility
}
/**
+ * @hide
+ */
+ public void setFastX(float x) {
+ mTranslationX = x - mLeft;
+ mMatrixDirty = true;
+ }
+
+ /**
+ * @hide
+ */
+ public void setFastY(float y) {
+ mTranslationY = y - mTop;
+ mMatrixDirty = true;
+ }
+
+ /**
+ * @hide
+ */
+ public void setFastScaleX(float x) {
+ mScaleX = x;
+ mMatrixDirty = true;
+ }
+
+ /**
+ * @hide
+ */
+ public void setFastScaleY(float y) {
+ mScaleY = y;
+ mMatrixDirty = true;
+ }
+
+ /**
+ * @hide
+ */
+ public void setFastAlpha(float alpha) {
+ mAlpha = alpha;
+ }
+
+ /**
+ * @hide
+ */
+ public void setFastRotationY(float y) {
+ mRotationY = y;
+ mMatrixDirty = true;
+ }
+
+ /**
* Hit rectangle in parent's coordinates
*
* @param outRect The hit rectangle of the view.
@@ -6990,6 +7037,25 @@ public class View implements Drawable.Callback, KeyEvent.Callback, Accessibility
}
/**
+ * @hide
+ */
+ public void fastInvalidate() {
+ if ((mPrivateFlags & (DRAWN | HAS_BOUNDS)) == (DRAWN | HAS_BOUNDS) ||
+ (mPrivateFlags & DRAWING_CACHE_VALID) == DRAWING_CACHE_VALID ||
+ (mPrivateFlags & INVALIDATED) != INVALIDATED) {
+ if (mParent instanceof View) {
+ ((View) mParent).mPrivateFlags |= INVALIDATED;
+ }
+ mPrivateFlags &= ~DRAWN;
+ mPrivateFlags |= INVALIDATED;
+ mPrivateFlags &= ~DRAWING_CACHE_VALID;
+ if (mParent != null && mAttachInfo != null && mAttachInfo.mHardwareAccelerated) {
+ mParent.invalidateChild(this, null);
+ }
+ }
+ }
+
+ /**
* Used to indicate that the parent of this view should clear its caches. This functionality
* is used to force the parent to rebuild its display list (when hardware-accelerated),
* which is necessary when various parent-managed properties of the view change, such as
diff --git a/core/java/android/webkit/WebView.java b/core/java/android/webkit/WebView.java
index e493785e5354..599d167b4855 100644
--- a/core/java/android/webkit/WebView.java
+++ b/core/java/android/webkit/WebView.java
@@ -1464,6 +1464,7 @@ public class WebView extends AbsoluteLayout
mListBoxDialog.dismiss();
mListBoxDialog = null;
}
+ if (mNativeClass != 0) nativeStopGL();
if (mWebViewCore != null) {
// Set the handlers to null before destroying WebViewCore so no
// more messages will be posted.
@@ -8323,6 +8324,7 @@ public class WebView extends AbsoluteLayout
private native void nativeSetSelectionPointer(boolean set,
float scale, int x, int y);
private native boolean nativeStartSelection(int x, int y);
+ private native void nativeStopGL();
private native Rect nativeSubtractLayers(Rect content);
private native int nativeTextGeneration();
// Never call this version except by updateCachedTextfield(String) -
diff --git a/core/java/com/android/internal/statusbar/StatusBarNotification.java b/core/java/com/android/internal/statusbar/StatusBarNotification.java
index 2b96bf69fff3..cb791be03164 100644
--- a/core/java/com/android/internal/statusbar/StatusBarNotification.java
+++ b/core/java/com/android/internal/statusbar/StatusBarNotification.java
@@ -35,12 +35,18 @@ if (truncatedTicker != null && truncatedTicker.length() > maxTickerLen) {
*/
public class StatusBarNotification implements Parcelable {
+ public static int PRIORITY_JIFFY_EXPRESS = -100;
+ public static int PRIORITY_NORMAL = 0;
+ public static int PRIORITY_ONGOING = 100;
+ public static int PRIORITY_SYSTEM = 200;
+
public String pkg;
public int id;
public String tag;
public int uid;
public int initialPid;
public Notification notification;
+ public int priority = PRIORITY_NORMAL;
public StatusBarNotification() {
}
@@ -56,6 +62,9 @@ public class StatusBarNotification implements Parcelable {
this.uid = uid;
this.initialPid = initialPid;
this.notification = notification;
+
+ this.priority = ((notification.flags & Notification.FLAG_ONGOING_EVENT) != 0)
+ ? PRIORITY_ONGOING : PRIORITY_NORMAL;
}
public StatusBarNotification(Parcel in) {
@@ -72,6 +81,7 @@ public class StatusBarNotification implements Parcelable {
}
this.uid = in.readInt();
this.initialPid = in.readInt();
+ this.priority = in.readInt();
this.notification = new Notification(in);
}
@@ -86,6 +96,7 @@ public class StatusBarNotification implements Parcelable {
}
out.writeInt(this.uid);
out.writeInt(this.initialPid);
+ out.writeInt(this.priority);
this.notification.writeToParcel(out, flags);
}
@@ -114,7 +125,7 @@ public class StatusBarNotification implements Parcelable {
public String toString() {
return "StatusBarNotification(package=" + pkg + " id=" + id + " tag=" + tag
- + " notification=" + notification + ")";
+ + " notification=" + notification + " priority=" + priority + ")";
}
public boolean isOngoing() {
diff --git a/core/jni/android_media_AudioSystem.cpp b/core/jni/android_media_AudioSystem.cpp
index 40b933456f85..5147cfabc66b 100644
--- a/core/jni/android_media_AudioSystem.cpp
+++ b/core/jni/android_media_AudioSystem.cpp
@@ -64,10 +64,10 @@ android_media_AudioSystem_isMicrophoneMuted(JNIEnv *env, jobject thiz)
}
static jboolean
-android_media_AudioSystem_isStreamActive(JNIEnv *env, jobject thiz, jint stream)
+android_media_AudioSystem_isStreamActive(JNIEnv *env, jobject thiz, jint stream, jint inPastMs)
{
bool state = false;
- AudioSystem::isStreamActive(stream, &state);
+ AudioSystem::isStreamActive(stream, &state, inPastMs);
return state;
}
@@ -199,7 +199,7 @@ static JNINativeMethod gMethods[] = {
{"getParameters", "(Ljava/lang/String;)Ljava/lang/String;", (void *)android_media_AudioSystem_getParameters},
{"muteMicrophone", "(Z)I", (void *)android_media_AudioSystem_muteMicrophone},
{"isMicrophoneMuted", "()Z", (void *)android_media_AudioSystem_isMicrophoneMuted},
- {"isStreamActive", "(I)Z", (void *)android_media_AudioSystem_isStreamActive},
+ {"isStreamActive", "(II)Z", (void *)android_media_AudioSystem_isStreamActive},
{"setDeviceConnectionState", "(IILjava/lang/String;)I", (void *)android_media_AudioSystem_setDeviceConnectionState},
{"getDeviceConnectionState", "(ILjava/lang/String;)I", (void *)android_media_AudioSystem_getDeviceConnectionState},
{"setPhoneState", "(I)I", (void *)android_media_AudioSystem_setPhoneState},
diff --git a/core/res/res/drawable-hdpi/ic_audio_alarm.png b/core/res/res/drawable-hdpi/ic_audio_alarm.png
new file mode 100644
index 000000000000..1b41de4f70cb
--- /dev/null
+++ b/core/res/res/drawable-hdpi/ic_audio_alarm.png
Binary files differ
diff --git a/core/res/res/drawable-hdpi/ic_audio_alarm_mute.png b/core/res/res/drawable-hdpi/ic_audio_alarm_mute.png
new file mode 100644
index 000000000000..e31fdb857e18
--- /dev/null
+++ b/core/res/res/drawable-hdpi/ic_audio_alarm_mute.png
Binary files differ
diff --git a/core/res/res/drawable-hdpi/ic_audio_notification.png b/core/res/res/drawable-hdpi/ic_audio_notification.png
new file mode 100644
index 000000000000..00e8f8aec728
--- /dev/null
+++ b/core/res/res/drawable-hdpi/ic_audio_notification.png
Binary files differ
diff --git a/core/res/res/drawable-hdpi/ic_audio_notification_mute.png b/core/res/res/drawable-hdpi/ic_audio_notification_mute.png
new file mode 100644
index 000000000000..a350e1679fba
--- /dev/null
+++ b/core/res/res/drawable-hdpi/ic_audio_notification_mute.png
Binary files differ
diff --git a/core/res/res/drawable-hdpi/ic_audio_vol.png b/core/res/res/drawable-hdpi/ic_audio_vol.png
new file mode 100644
index 000000000000..cf3f3f54c514
--- /dev/null
+++ b/core/res/res/drawable-hdpi/ic_audio_vol.png
Binary files differ
diff --git a/core/res/res/drawable-hdpi/ic_audio_vol_mute.png b/core/res/res/drawable-hdpi/ic_audio_vol_mute.png
new file mode 100644
index 000000000000..c4ac4ef56618
--- /dev/null
+++ b/core/res/res/drawable-hdpi/ic_audio_vol_mute.png
Binary files differ
diff --git a/core/res/res/drawable-mdpi/ic_audio_alarm.png b/core/res/res/drawable-mdpi/ic_audio_alarm.png
new file mode 100644
index 000000000000..fab95aadf09b
--- /dev/null
+++ b/core/res/res/drawable-mdpi/ic_audio_alarm.png
Binary files differ
diff --git a/core/res/res/drawable-mdpi/ic_audio_alarm_mute.png b/core/res/res/drawable-mdpi/ic_audio_alarm_mute.png
new file mode 100644
index 000000000000..ca3ed933983e
--- /dev/null
+++ b/core/res/res/drawable-mdpi/ic_audio_alarm_mute.png
Binary files differ
diff --git a/core/res/res/drawable-mdpi/ic_audio_notification.png b/core/res/res/drawable-mdpi/ic_audio_notification.png
new file mode 100644
index 000000000000..b41ccd09cb15
--- /dev/null
+++ b/core/res/res/drawable-mdpi/ic_audio_notification.png
Binary files differ
diff --git a/core/res/res/drawable-mdpi/ic_audio_notification_mute.png b/core/res/res/drawable-mdpi/ic_audio_notification_mute.png
new file mode 100644
index 000000000000..f0b6d8ab18db
--- /dev/null
+++ b/core/res/res/drawable-mdpi/ic_audio_notification_mute.png
Binary files differ
diff --git a/core/res/res/drawable-mdpi/ic_audio_vol.png b/core/res/res/drawable-mdpi/ic_audio_vol.png
new file mode 100644
index 000000000000..049e92ab4126
--- /dev/null
+++ b/core/res/res/drawable-mdpi/ic_audio_vol.png
Binary files differ
diff --git a/core/res/res/drawable-mdpi/ic_audio_vol_mute.png b/core/res/res/drawable-mdpi/ic_audio_vol_mute.png
new file mode 100644
index 000000000000..be71492984b3
--- /dev/null
+++ b/core/res/res/drawable-mdpi/ic_audio_vol_mute.png
Binary files differ
diff --git a/core/res/res/drawable-xlarge-nodpi/default_wallpaper.jpg b/core/res/res/drawable-xlarge-nodpi/default_wallpaper.jpg
index 5acad947cc83..8729fe1678a2 100644
--- a/core/res/res/drawable-xlarge-nodpi/default_wallpaper.jpg
+++ b/core/res/res/drawable-xlarge-nodpi/default_wallpaper.jpg
Binary files differ
diff --git a/data/sounds/AudioPackage6.mk b/data/sounds/AudioPackage6.mk
index 67174171e80d..fd6bf17b8668 100755
--- a/data/sounds/AudioPackage6.mk
+++ b/data/sounds/AudioPackage6.mk
@@ -53,6 +53,7 @@ PRODUCT_COPY_FILES += \
$(LOCAL_PATH)/ringtones/ogg/Draco.ogg:system/media/audio/ringtones/Draco.ogg \
$(LOCAL_PATH)/ringtones/ogg/Eridani.ogg:system/media/audio/ringtones/Eridani.ogg \
$(LOCAL_PATH)/ringtones/ogg/Hydra.ogg:system/media/audio/ringtones/hydra.ogg \
+ $(LOCAL_PATH)/ringtones/ogg/Iridium.ogg:system/media/audio/ringtones/Iridium.ogg \
$(LOCAL_PATH)/ringtones/ogg/Lyra.ogg:system/media/audio/ringtones/Lyra.ogg \
$(LOCAL_PATH)/ringtones/ogg/Machina.ogg:system/media/audio/ringtones/Machina.ogg \
$(LOCAL_PATH)/ringtones/ogg/Orion.ogg:system/media/audio/ringtones/Orion.ogg \
diff --git a/data/sounds/effects/ogg/Effect_Tick.ogg b/data/sounds/effects/ogg/Effect_Tick.ogg
index b3790199b8b1..a997fe164ee4 100644
--- a/data/sounds/effects/ogg/Effect_Tick.ogg
+++ b/data/sounds/effects/ogg/Effect_Tick.ogg
Binary files differ
diff --git a/data/sounds/ringtones/ogg/Iridium.ogg b/data/sounds/ringtones/ogg/Iridium.ogg
new file mode 100755
index 000000000000..3de61fb95f28
--- /dev/null
+++ b/data/sounds/ringtones/ogg/Iridium.ogg
Binary files differ
diff --git a/docs/html/guide/topics/fundamentals/index.jd b/docs/html/guide/topics/fundamentals/index.jd
new file mode 100644
index 000000000000..de2e31249aaa
--- /dev/null
+++ b/docs/html/guide/topics/fundamentals/index.jd
@@ -0,0 +1,496 @@
+page.title=Application Fundamentals
+@jd:body
+
+<div id="qv-wrapper">
+<div id="qv">
+
+<h2>Quickview</h2>
+<ul>
+ <li>Android applications are composed of one or more application components (activities,
+services, content providers, and broadcast receivers)</li>
+ <li>Each component performs a different role in the overall application behavior, and each
+one can be activated individually (even by other applications)</li>
+ <li>The manifest file must declare all components in the application and should also declare
+all application requirements, such as the minimum version of Android required and any hardware
+configurations required</li>
+ <li>Non-code application resources (images, strings, layout files, etc.) should include
+alternatives for different device configurations (such as different strings for different
+languages and different layouts for different screen sizes)</li>
+</ul>
+
+
+<h2>In this document</h2>
+<ol>
+<li><a href="#Components">Application Components</a>
+ <ol>
+ <li><a href="#ActivatingComponents">Activating components</a></li>
+ </ol>
+</li>
+<li><a href="#Manifest">The Manifest File</a>
+ <ol>
+ <li><a href="#DeclaringComponents">Declaring components</a></li>
+ <li><a href="#DeclaringRequirements">Declaring application requirements</a></li>
+ </ol>
+</li>
+<li><a href="#Resources">Application Resources</a></li>
+</ol>
+</div>
+</div>
+
+<p>Android applications are written in the Java programming language. The Android SDK tools compile
+the code&mdash;along with any data and resource files&mdash;into an <i>Android package</i>, an
+archive file with an {@code .apk} suffix. All the code in a single {@code .apk} file is considered
+to be one application and is the file that Android-powered devices use to install the
+application.</p>
+
+<p>Once installed on a device, each Android application lives in its own security sandbox: </p>
+
+<ul>
+ <li>The Android operating system is a multi-user Linux system in which each application is a
+different user.</li>
+
+<li>By default, the system assigns each application a unique Linux user ID (the ID is used only by
+the system and is unknown to the application). The system sets permissions for all the files in an
+application so that only the user ID assigned to that application can access them. </li>
+
+<li>Each process has its own virtual machine (VM), so an application's code runs in isolation from
+other applications.</li>
+
+<li>By default, every application runs in its own Linux process. Android starts the process when any
+of the application's components need to be executed, then shuts down the process when it's no longer
+needed or when the system must recover memory for other applications.</li>
+</ul>
+
+<p>In this way, the Android system implements the <em>principle of least privilege</em>. That is,
+each application, by default, has access only to the components that it requires to do its work and
+no more. This creates a very secure environment in which an application cannot access parts of
+the system for which it is not given permission.</p>
+
+<p>However, there are ways for an application to share data with other applications and for an
+application to access system services:</p>
+
+<ul>
+ <li>It's possible to arrange for two applications to share the same Linux user ID, in which case
+they are able to access each other's files. To conserve system resources, applications with the
+same user ID can also arrange to run in the same Linux process and share the same VM (the
+applications must also be signed with the same certificate).</li>
+ <li>An application can request permission to access device data such as the user's
+contacts, SMS messages, the mountable storage (SD card), camera, Bluetooth, and more. All
+application permissions must be granted by the user at install time.</li>
+</ul>
+
+<p>That covers the basics regarding how an Android application exists within the system. The rest of
+this document introduces you to:</p>
+<ul>
+ <li>The core framework components that define your application.</li>
+ <li>The manifest file in which you declare components and required device features for your
+application.</li>
+ <li>Resources that are separate from the application code and allow your application to
+gracefully optimize its behavior for a variety of device configurations.</li>
+</ul>
+
+<p class="note"><strong>Tip:</strong> If you're new to Android development, we suggest that you
+follow the Beginner's Path link at the bottom of this page. For each document in the Application
+Fundamentals, the Beginner's Path points you to the document we suggest you read next, in order
+to get up to speed on the core Android concepts.</p>
+
+
+
+<h2 id="Components">Application Components</h2>
+
+<p>Application components are the essential building blocks of an Android application. Each
+component is a different point through which the system can enter your application. Not all
+components are actual entry points for the user and some depend on each other, but each one exists
+as its own entity and plays a specific role&mdash;each one is a unique building block that
+helps define your application's overall behavior.</p>
+
+<p>There are four different types of application components. Each type serves a distinct purpose
+and has a distinct lifecycle that defines how the component is created and destroyed.</p>
+
+<p>Here are the four types of application components:</p>
+
+<dl>
+
+<dt><b>Activities</b></dt>
+
+<dd>An <i>activity</i> represents a single screen with a user interface. For example,
+an email application might have one activity that shows a list of new
+emails, another activity to compose an email, and another activity for reading emails. Although
+the activities work together to form a cohesive user experience in the email application, each one
+is independent of the others. As such, a different application can start any one of these
+activities (if the email application allows it). For example, a camera application can start the
+activity in the email application that composes new mail, in order for the user to share a picture.
+
+<p>An activity is implemented as a subclass of {@link android.app.Activity} and you can learn more
+about it in the <a href="{@docRoot}guide/topics/fundamentals/activities.html">Activities</a>
+developer guide.</p>
+</dd>
+
+
+<dt><b>Services</b></dt>
+
+<dd>A <i>service</i> is a component that runs in the background to perform long-running
+operations or to perform work for remote processes. A service
+does not provide a user interface. For example, a service might play music in the background while
+the user is in a different application, or it might fetch data over the network without
+blocking user interaction with an activity. Another component, such as an activity, can start the
+service and let it run or bind to it in order to interact with it.
+
+<p>A service is implemented as a subclass of {@link android.app.Service} and you can learn more
+about it in the <a href="{@docRoot}guide/topics/fundamentals/services.html">Services</a> developer
+guide.</p>
+</dd>
+
+
+<dt><b>Content providers</b></dt>
+
+<dd>A <i>content provider</i> manages a shared set of application data. You can store the data in
+the file system, an SQLite database, on the web, or any other persistent storage location your
+application can access. Through the content provider, other applications can query or even modify
+the data (if the content provider allows it). For example, the Android system provides a content
+provider that manages the user's contact information. As such, any application with the proper
+permissions can query part of the content provider (such as {@link
+android.provider.ContactsContract.Data}) to read and write information about a particular person.
+
+<p>Content providers are also useful for reading and writing data that is private to your
+application and not shared. For example, the <a
+href="{@docRoot}resources/samples/NotePad/index.html">Note Pad</a> sample application uses a
+content provider to save notes.</p>
+
+<p>A content provider is implemented as a subclass of {@link android.content.ContentProvider}
+and must implement a standard set of APIs that enable other applications to perform
+transactions. For more information, see the <a
+href="{@docRoot}guide/topics/providers/content-providers.html">Content Providers</a> developer
+guide.</p>
+</dd>
+
+
+<dt><b>Broadcast receivers</b></dt>
+
+<dd>A <i>broadcast receiver</i> is a component that responds to system-wide broadcast
+announcements. Many broadcasts originate from the system&mdash;for example, a broadcast announcing
+that the screen has turned off, the battery is low, or a picture was captured.
+Applications can also initiate broadcasts&mdash;for example, to let other applications know that
+some data has been downloaded to the device and is available for them to use. Although broadcast
+receivers don't display a user interface, they may <a
+href="{@docRoot}guide/topics/ui/notifiers/notifications.html">create a status bar notification</a>
+to alert the user when a broadcast event occurs. More commonly, though, a broadcast receiver is
+just a "gateway" to other components and is intended to do a very minimal amount of work. For
+instance, it might initiate a service to perform some work based on the event.
+
+<p>A broadcast receiver is implemented as a subclass of {@link android.content.BroadcastReceiver}
+and each broadcast is delivered as an {@link android.content.Intent} object. For more information,
+see the <a
+href="{@docRoot}guide/topics/intents/intents-filters.html">Intents and Intent Filters</a>
+developer guide.</p>
+</dd>
+
+</dl>
+
+
+
+<p>A unique aspect of the Android system design is that any application can start another
+application’s component. For example, if you want the user to capture a
+photo with the device camera, there's probably another application that does that and your
+application can use it, instead of developing an activity to capture a photo yourself. You don't
+need to incorporate or even link to the code from the camera application.
+Instead, you can simply start the activity in the camera application that captures a
+photo. When complete, the photo is even returned to your application so you can use it. To the user,
+it seems as if the camera is actually a part of your application.</p>
+
+<p>When the system starts a component, it starts the process for that application (if it's not
+already running) and instantiates the classes needed for the component. For example, if your
+application starts the activity in the camera application that captures a photo, that activity
+runs in the process that belongs to the camera application, not in your application's process.
+Therefore, unlike applications on most other systems, Android applications don't have a single entry
+point (there's no {@code main()} function, for example).</p>
+
+<p>Because the system runs each application in a separate process with file permissions that
+restrict access to other applications, your application cannot directly activate a component from
+another application. The Android system, however, can. So, to activate a component in
+another application, you must deliver a message to the system that specifies your <em>intent</em> to
+start a particular component. The system then activates the component for you.</p>
+
+
+<h3 id="ActivatingComponents">Activating Components</h3>
+
+<p>Three of the four component types&mdash;activities, services, and
+broadcast receivers&mdash;are activated by an asynchronous message called an <em>intent</em>.
+Intents bind individual components to each other at runtime (you can think of them
+as the messengers that request an action from other components), whether the component belongs
+to your application or another.</p>
+
+<p>An intent is defined by an {@link android.content.Intent} object, which defines a message to
+activate either a specific component or a specific <em>type</em> of component&mdash;an intent
+can be either explicit or implicit, respectively.</p>
+
+<p>For activities and services, an intent defines the action to perform (for example, to "view" or
+"send" something) and may specify the URI of the data to act on (among other things that the
+component being started might need to know). For example, an intent might convey a request for an
+activity to present an image to the user or to open a web page. In some cases, you can start a
+component in order to receive a result, in which case, the component that is started also returns
+the result in an {@link android.content.Intent} object (for example, you can issue an intent to let
+the user pick a personal contact and have it returned to you&mdash;the return intent includes a
+URI pointing to the chosen contact). For broadcast receivers, the intent simply defines the
+announcement being broadcast (for example, a broadcast to indicate the device battery is low
+includes only a known action string that indicates "battery is low").</p>
+
+<p>The remaining type of component, content provider, is not activated by intents. Rather, it is
+activated when targeted by a request from a {@link android.content.ContentResolver}. The content
+resolver handles all direct transactions with the content provider so that the component that's
+performing transactions with the provider doesn't need to and instead calls methods on the {@link
+android.content.ContentResolver} object. This leaves a layer of abstraction between the content
+provider and the component requesting information (for security).</p>
+
+<p>For more information about using intents, see the <a
+href="{@docRoot}guide/topics/intents/intents-filters.html">Intents and
+Intent Filters</a> document. More information about activating specific components is also provided
+in the <a href="{@docRoot}guide/topics/fundamentals/activities.html">Activities</a>, <a
+href="{@docRoot}guide/topics/fundamentals/services.html">Services</a>, and <a
+href="{@docRoot}guide/topics/providers/content-providers.html">Content Providers</a> developer
+guides.</p>
+
+
+<h2 id="Manifest">The Manifest File</h2>
+
+<p>Before the Android system can start an application component, the system must know that the
+component exists by reading the application's {@code AndroidManifest.xml} file (the "manifest"
+file). Your application must declare all its components in this file, which must be at the root of
+the application project directory.</p>
+
+<p>The manifest does a number of things in addition to declaring the application's components,
+such as:</p>
+<ul>
+ <li>Identify any user permissions the application requires, such as Internet access or
+read-access to the user's contacts.</li>
+ <li>Declare the minimum <a href="{@docRoot}guide/appendix/api-levels.html">API Level</a>
+required by the application, based on which APIs the application uses.</li>
+ <li>Declare hardware and software features used or required by the application, such as a camera,
+bluetooth services, or a multitouch screen.</li>
+ <li>API libraries the application needs to be linked against (other than the Android framework
+APIs), such as the <a
+href="http://code.google.com/android/add-ons/google-apis/maps-overview.html">Google Maps
+library</a>.</li>
+ <li>And more</li>
+</ul>
+
+
+<h3 id="DeclaringComponents">Declaring components</h3>
+
+<p>The primary task of the manifest is to inform the system about the application's components. For
+example, a manifest file can declare an activity as follows: </p>
+
+<pre>
+&lt;?xml version="1.0" encoding="utf-8"?&gt;
+&lt;manifest ... &gt;
+ &lt;application android:icon="@drawable/app_icon.png" ... &gt;
+ &lt;activity android:name="com.example.project.ExampleActivity"
+ android:label="@string/example_label" ... &gt;
+ &lt;/activity&gt;
+ ...
+ &lt;/application&gt;
+&lt;/manifest&gt;</pre>
+
+<p>In the <code><a
+href="{@docRoot}guide/topics/manifest/application-element.html">&lt;application&gt;</a></code>
+element, the {@code android:icon} attribute points to resources for an icon that identifies the
+application.</p>
+
+<p>In the <code><a
+href="{@docRoot}guide/topics/manifest/activity-element.html">&lt;activity&gt;</a></code> element,
+the {@code android:name} attribute specifies the fully qualified class name of the {@link
+android.app.Activity} subclass and the {@code android:label} attributes specifies a string
+to use as the user-visible label for the activity.</p>
+
+<p>You must declare all application components this way:</p>
+<ul>
+ <li><code><a
+href="{@docRoot}guide/topics/manifest/activity-element.html">&lt;activity&gt;</a></code> elements
+for activities</li>
+ <li><code><a
+href="{@docRoot}guide/topics/manifest/service-element.html">&lt;service&gt;</a></code> elements for
+services</li>
+ <li><code><a
+href="{@docRoot}guide/topics/manifest/receiver-element.html">&lt;receiver&gt;</a></code> elements
+for broadcast receivers</li>
+ <li><code><a
+href="{@docRoot}guide/topics/manifest/provider-element.html">&lt;provider&gt;</a></code> elements
+for content providers</li>
+</ul>
+
+<p>Activities, services, and content providers that you include in your source but do not declare
+in the manifest are not visible to the system and, consequently, can never run. However,
+broadcast
+receivers can be either declared in the manifest or created dynamically in code (as
+{@link android.content.BroadcastReceiver} objects) and registered with the system by calling
+{@link android.content.Context#registerReceiver registerReceiver()}.</p>
+
+<p>For more about how to structure the manifest file for your application, see the <a
+href="{@docRoot}guide/topics/manifest/manifest-intro.html">The AndroidManifest.xml File</a>
+documentation. </p>
+
+
+
+<h3 id="DeclaringComponentCapabilities">Declaring component capabilities</h3>
+
+<p>As discussed above, in <a href="#ActivatingComponents">Activating Components</a>, you can use an
+{@link android.content.Intent} to start activities, services, and broadcast receivers. You can do so
+by explicitly naming the target component (using the component class name) in the intent. However,
+the real power of intents lies in the concept of intent actions. With intent actions, you simply
+describe the type of action you want to perform (and optionally, the data upon which you’d like to
+perform the action) and allow the system to find a component on the device that can perform the
+action and start it. If there are multiple components that can perform the action described by the
+intent, then the user selects which one to use.</p>
+
+<p>The way the system identifies the components that can respond to an intent is by comparing the
+intent received to the <i>intent filters</i> provided in the manifest file of other applications on
+the device.</p>
+
+<p>When you declare a component in your application's manifest, you can optionally include
+intent filters that declare the capabilities of the component so it can respond to intents
+from other applications. You can declare an intent filter for your component by
+adding an <a href="{@docRoot}guide/topics/manifest/intent-filter-element.html">{@code
+&lt;intent-filter&gt;}</a> element as a child of the component's declaration element.</p>
+
+<p>For example, an email application with an activity for composing a new email might declare an
+intent filter in its manifest entry to respond to "send" intents (in order to send email). An
+activity in your application can then create an intent with the “send” action ({@link
+android.content.Intent#ACTION_SEND}), which the system matches to the email application’s “send”
+activity and launches it when you invoke the intent with {@link android.app.Activity#startActivity
+startActivity()}.</p>
+
+<p>For more about creating intent filters, see the <a
+href="{@docRoot}guide/topics/intents/intents-filters.html">Intents and Intent Filters</a> document.
+</p>
+
+
+
+<h3 id="DeclaringRequirements">Declaring application requirements</h3>
+
+<p>There are a variety of devices powered by Android and not all of them provide the
+same features and capabilities. In order to prevent your application from being installed on devices
+that lack features needed by your application, it's important that you clearly define a profile for
+the types of devices your application supports by declaring device and software requirements in your
+manifest file. Most of these declarations are informational only and the system does not read
+them, but external services such as Android Market do read them in order to provide filtering
+for users when they search for applications from their device.</p>
+
+<p>For example, if your application requires a camera and uses APIs introduced in Android 2.1 (<a
+href="{@docRoot}guide/appendix/api-levels.html">API Level</a> 7), you should declare these as
+requirements in your manifest file. That way, devices that do <em>not</em> have a camera and have an
+Android version <em>lower</em> than 2.1 cannot install your application from Android Market.</p>
+
+<p>However, you can also declare that your applicaiton uses the camera, but does not
+<em>require</em> it. In that case, your application must perform a check at runtime to determine
+if the device has a camera and disable any features that use the camera if one is not available.</p>
+
+<p>Here are some of the important device characteristics that you should consider as you design and
+develop your application:</p>
+
+<dl>
+ <dt>Screen size and density</dt>
+ <dd>In order to categorize devices by their screen type, Android defines two characteristics for
+each device: screen size (the physical dimensions of the screen) and screen density (the physical
+density of the pixels on the screen, or dpi&mdash;dots per inch). To simplify all the different
+types of screen configurations, the Android system generalizes them into select groups that make
+them easier to target.
+<p>The screen sizes are: small, normal, large, and extra large.<br/>
+The screen densities are: low density, medium density, high density, and extra high density.</p>
+
+<p>By default, your application is compatible with all screen sizes and densities,
+because the Android system makes the appropriate adjustments to your UI layout and image
+resources. However, you should create specialized layouts for certain screen sizes and provide
+specialized images for certain densities, using alternative layout resources, and by declaring in
+your manifest exactly which screen sizes your application supports with the <a
+href="{@docRoot}guide/topics/manifest/supports-screens.html">{@code
+&lt;supports-screens&gt;}</a> element.</p>
+<p>For more information, see the <a
+href="{@docRoot}guide/practices/screens_support.html">Supporting Multiple Screens</a>
+document.</p></dd>
+
+ <dt>Input configurations</dt>
+ <dd>Many devices provide a different type of user input mechanism, such as a hardware keyboard, a
+trackball, or a five-way navigation pad. If your application requires a particular kind of input
+hardware, then you should declare it in your manifest with the <a
+href="{@docRoot}guide/topics/manifest/uses-configuration-element.html">{@code
+&lt;uses-configuration&gt;}</a> element. However, it is rare that an application should require
+a certain input configuration.</dd>
+
+ <dt>Device features</dt>
+ <dd>There are many hardware and software features that may or may not exist on a given
+Android-powered device, such as a camera, a light sensor, bluetooth, a certain
+version of OpenGL, or the fidelity of the touchscreen. You should never assume that a certain
+feature is available on all Android-powered devices (other than the availability of the standard
+Android library), so you should declare any features used by your application with the <a
+href="{@docRoot}guide/topics/manifest/uses-feature-element.html">{@code &lt;uses-feature&gt;}</a>
+element.</dd>
+
+ <dt>Platform Version</dt>
+ <dd>Different Android-powered devices often run different versions of the Android platform,
+such as Android 1.6 or Android 2.3. Each successive version often includes additional APIs not
+available in the previous version. In order to indicate which set of APIs are available, each
+platform version specifies an <a
+href="{@docRoot}guide/appendix/api-levels.html">API Level</a> (for example, Android 1.0 is API Level
+1 and Android 2.3 is API Level 9). If you use any APIs that were added to the platform after
+version 1.0, you should declare the minimum API Level in which those APIs were introduced using the
+<a href="{@docRoot}guide/topics/manifest/uses-sdk.html">{@code &lt;uses-sdk&gt;}</a> element.</dd>
+</dl>
+
+<p>It's important that you declare all such requirements for your application, because, when you
+distribute your application on Android Market, Market uses these declarations to filter which
+applications are available on each device. As such, your application should be available only to
+devices that meet all your application requirements.</p>
+
+<p>For more information about how Android Market filters applications based on these (and other)
+requirements, see the <a href="{@docRoot}guide/appendix/market-filters.html">Market Filters</a>
+document.</p>
+
+
+
+<h2 id="Resources">Application Resources</h2>
+
+<p>An Android application is composed of more than just code&mdash;it requires resources that are
+separate from the source code, such as images, audio files, and anything relating to the visual
+presentation of the application. For example, you should define animations, menus, styles, colors,
+and the layout of activity user interfaces with XML files. Using application resources makes it easy
+to update various characteristics of your application without modifying code and&mdash;by providing
+sets of alternative resources&mdash;enables you to optimize your application for a variety of
+device configurations (such as different languages and screen sizes).</p>
+
+<p>For every resource that you include in your Android project, the SDK build tools define a unique
+integer ID, which you can use to reference the resource from your application code or from
+other resources defined in XML. For example, if your application contains an image file named {@code
+logo.png} (saved in the {@code res/drawable/} directory), the SDK tools generate a resource ID
+named {@code R.drawable.logo}, which you can use to reference the image and insert it in your
+user interface.</p>
+
+<p>One of the most important aspects of providing resources separate from your source code
+is the ability for you to provide alternative resources for different device
+configurations. For example, by defining UI strings in XML, you can translate the strings into other
+languages and save those strings in separate files. Then, based on a language <em>qualifier</em>
+that you append to the resource directory's name (such as {@code res/values-fr/} for French string
+values) and the user's language setting, the Android system applies the appropriate language strings
+to your UI.</p>
+
+<p>Android supports many different <em>qualifiers</em> for your alternative resources. The
+qualifier is a short string that you include in the name of your resource directories in order to
+define the device configuration for which those resources should be used. As another
+example, you should often create different layouts for your activities, depending on the
+device's screen orientation and size. For example, when the device screen is in portrait
+orientation (tall), you might want a layout with buttons to be vertical, but when the screen is in
+landscape orientation (wide), the buttons should be aligned horizontally. To change the layout
+depending on the orientation, you can define two different layouts and apply the appropriate
+qualifier to each layout's directory name. Then, the system automatically applies the appropriate
+layout depending on the current device orientation.</p>
+
+<p>For more about the different kinds of resources you can include in your application and how
+to create alternative resources for various device configurations, see the <a
+href="{@docRoot}guide/topics/resources/index.html">Application Resources</a> developer guide.</p>
+
+
+<h2>Beginner's Path</h2>
+
+<p>For a close look at implementing activities&mdash;the components your users use to
+interact with your application&mdash;continue with the <b><a
+href="{@docRoot}guide/topics/fundamentals/activities.html">Activities</a></b> document.</p>
+
diff --git a/docs/html/guide/topics/fundamentals/processes-and-threads.jd b/docs/html/guide/topics/fundamentals/processes-and-threads.jd
new file mode 100644
index 000000000000..c35108ef1d90
--- /dev/null
+++ b/docs/html/guide/topics/fundamentals/processes-and-threads.jd
@@ -0,0 +1,425 @@
+page.title=Processes and Threads
+parent.title=Application Fundamentals
+parent.link=index.html
+@jd:body
+
+<div id="qv-wrapper">
+<div id="qv">
+<h2>Quickview</h2>
+<ul>
+ <li>Every application runs in its own process and all components of the application run in that
+process, by default</li>
+ <li>Any slow, blocking operations in an activity should be done in a new thread, to avoid slowing
+down the user interface</li>
+</ul>
+
+<h2>In this document</h2>
+<ol>
+<li><a href="#Processes">Processes</a>
+ <ol>
+ <li><a href="#Lifecycle">Process lifecycle</a></li>
+ </ol>
+</li>
+<li><a href="#Threads">Threads</a>
+ <ol>
+ <li><a href="#WorkerThreads">Worker threads</a></li>
+ <li><a href="#ThreadSafe">Thread-safe methods</a></li>
+ </ol>
+</li>
+<li><a href="#IPC">Interprocess Communication</a></li>
+</ol>
+
+</div>
+</div>
+
+<p>When an application component starts and the application does not have any other components
+running, the Android system starts a new Linux process for the application with a single thread of
+execution. By default, all components of the same application run in the same process and thread
+(called the "main" thread). If an application component starts and there already exists a process
+for that application (because another component from the application exists), then the component is
+started within that process and uses the same thread of execution. However, you can arrange for
+different components in your application to run in separate processes, and you can create additional
+threads for any process.</p>
+
+<p>This document discusses how processes and threads work in an Android application.</p>
+
+
+<h2 id="Processes">Processes</h2>
+
+<p>By default, all components of the same application run in the same process and most applications
+should not change this. However, if you find that you need to control which process a certain
+component belongs to, you can do so in the manifest file.</p>
+
+<p>The manifest entry for each type of component element&mdash;<a
+href="{@docRoot}guide/topics/manifest/activity-element.html">{@code
+&lt;activity&gt;}</a>, <a href="{@docRoot}guide/topics/manifest/service-element.html">{@code
+&lt;service&gt;}</a>, <a href="{@docRoot}guide/topics/manifest/receiver-element.html">{@code
+&lt;receiver&gt;}</a>, and <a href="{@docRoot}guide/topics/manifest/provider-element.html">{@code
+&lt;provider&gt;}</a>&mdash;supports an {@code android:process} attribute that can specify a
+process in which that component should run. You can set this attribute so that each component runs
+in its own process or so that some components share a process while others do not. You can also set
+{@code android:process} so that components of different applications run in the same
+process&mdash;provided that the applications share the same Linux user ID and are signed with the
+same certificates.</p>
+
+<p>The <a href="{@docRoot}guide/topics/manifest/application-element.html">{@code
+&lt;application&gt;}</a> element also supports an {@code android:process} attribute, to set a
+default value that applies to all components.</p>
+
+<p>Android might decide to shut down a process at some point, when memory is low and required by
+other processes that are more immediately serving the user. Application
+components running in the process that's killed are consequently destroyed. A process is started
+again for those components when there's again work for them to do.</p>
+
+<p>When deciding which processes to kill, the Android system weighs their relative importance to
+the user. For example, it more readily shuts down a process hosting activities that are no longer
+visible on screen, compared to a process hosting visible activities. The decision whether to
+terminate a process, therefore, depends on the state of the components running in that process. The
+rules used to decide which processes to terminate is discussed below. </p>
+
+
+<h3 id="Lifecycle">Process lifecycle</h3>
+
+<p>The Android system tries to maintain an application process for as long as possible, but
+eventually needs to remove old processes to reclaim memory for new or more important processes. To
+determine which processes to keep
+and which to kill, the system places each process into an "importance hierarchy" based on the
+components running in the process and the state of those components. Processes with the lowest
+importance are eliminated first, then those with the next lowest importance, and so on, as necessary
+to recover system resources.</p>
+
+<p>There are five levels in the importance hierarchy. The following list presents the different
+types of processes in order of importance (the first process is <em>most important</em> and is
+<em>killed last</em>):</p>
+
+<ol>
+ <li><b>Foreground process</b>
+ <p>A process that is required for what the user is currently doing. A
+ process is considered to be in the foreground if any of the following conditions are true:</p>
+
+ <ul>
+ <li>It hosts an {@link android.app.Activity} that the user is interacting with (the {@link
+android.app.Activity}'s {@link android.app.Activity#onResume onResume()} method has been
+called).</li>
+
+ <li>It hosts a {@link android.app.Service} that's bound to the activity that the user is
+interacting with.</li>
+
+ <li>It hosts a {@link android.app.Service} that's running "in the foreground"&mdash;the
+service has called {@link android.app.Service#startForeground startForeground()}.
+
+ <li>It hosts a {@link android.app.Service} that's executing one of its lifecycle
+callbacks ({@link android.app.Service#onCreate onCreate()}, {@link android.app.Service#onStart
+onStart()}, or {@link android.app.Service#onDestroy onDestroy()}).</li>
+
+ <li>It hosts a {@link android.content.BroadcastReceiver} that's executing its {@link
+ android.content.BroadcastReceiver#onReceive onReceive()} method.</li>
+ </ul>
+
+ <p>Generally, only a few foreground processes exist at any given time. They are killed only as
+a last resort&mdash;if memory is so low that they cannot all continue to run. Generally, at that
+point, the device has reached a memory paging state, so killing some foreground processes is
+required to keep the user interface responsive.</p></li>
+
+ <li><b>Visible process</b>
+ <p>A process that doesn't have any foreground components, but still can
+ affect what the user sees on screen. A process is considered to be visible if either of the
+ following conditions are true:</p>
+
+ <ul>
+ <li>It hosts an {@link android.app.Activity} that is not in the foreground, but is still
+visible to the user (its {@link android.app.Activity#onPause onPause()} method has been called).
+This might occur, for example, if the foreground activity started a dialog, which allows the
+previous activity to be seen behind it.</li>
+
+ <li>It hosts a {@link android.app.Service} that's bound to a visible (or foreground)
+activity.</li>
+ </ul>
+
+ <p>A visible process is considered extremely important and will not be killed unless doing so
+is required to keep all foreground processes running. </p>
+ </li>
+
+ <li><b>Service process</b>
+ <p>A process that is running a service that has been started with the {@link
+android.content.Context#startService startService()} method and does not fall into either of the two
+higher categories. Although service processes are not directly tied to anything the user sees, they
+are generally doing things that the user cares about (such as playing music in the background or
+downloading data on the network), so the system keeps them running unless there's not enough memory
+to retain them along with all foreground and visible processes. </p>
+ </li>
+
+ <li><b>Background process</b>
+ <p>A process holding an activity that's not currently visible to the user (the activity's
+{@link android.app.Activity#onStop onStop()} method has been called). These processes have no direct
+impact on the user experience, and the system can kill them at any time to reclaim memory for a
+foreground,
+visible, or service process. Usually there are many background processes running, so they are kept
+in an LRU (least recently used) list to ensure that the process with the activity that was most
+recently seen by the user is the last to be killed. If an activity implements its lifecycle methods
+correctly, and saves its current state, killing its process will not have a visible effect on
+the user experience, because when the user navigates back to the activity, the activity restores
+all of its visible state. See the <a
+href="{@docRoot}guide/topics/fundamentals/activities.html#SavingActivityState">Activities</a>
+document for information about saving and restoring state.</p>
+ </li>
+
+ <li><b>Empty process</b>
+ <p>A process that doesn't hold any active application components. The only reason to keep this
+kind of process alive is for caching purposes, to improve startup time the next time a component
+needs to run in it. The system often kills these processes in order to balance overall system
+resources between process caches and the underlying kernel caches.</p>
+ </li>
+</ol>
+
+
+ <p>Android ranks a process at the highest level it can, based upon the importance of the
+components currently active in the process. For example, if a process hosts a service and a visible
+activity, the process is ranked as a visible process, not a service process.</p>
+
+ <p>In addition, a process's ranking might be increased because other processes are dependent on
+it&mdash;a process that is serving another process can never be ranked lower than the process it is
+serving. For example, if a content provider in process A is serving a client in process B, or if a
+service in process A is bound to a component in process B, process A is always considered at least
+as important as process B.</p>
+
+ <p>Because a process running a service is ranked higher than a process with background activities,
+an activity that initiates a long-running operation might do well to start a <a
+href="{@docRoot}guide/topics/fundamentals/services.html">service</a> for that operation, rather than
+simply create a worker thread&mdash;particularly if the operation will likely outlast the activity.
+For example, an activity that's uploading a picture to a web site should start a service to perform
+the upload so that the upload can continue in the background even if the user leaves the activity.
+Using a service guarantees that the operation will have at least "service process" priority,
+regardless of what happens to the activity. This is the same reason that broadcast receivers should
+employ services rather than simply put time-consuming operations in a thread.</p>
+
+
+
+
+<h2 id="Threads">Threads</h2>
+
+<p>When an application is launched, the system creates a thread of execution for the application,
+called "main." This thread is very important because it is in charge of dispatching events to
+the appropriate user interface widgets, including drawing events. It is also the thread in which
+your application interacts with components from the Android UI toolkit (components from the {@link
+android.widget} and {@link android.view} packages). As such, the main thread is also sometimes
+called the UI thread.</p>
+
+<p>The system does <em>not</em> create a separate thread for each instance of a component. All
+components that run in the same process are instantiated in the UI thread, and system calls to
+each component are dispatched from that thread. Consequently, methods that respond to system
+callbacks (such as {@link android.view.View#onKeyDown onKeyDown()} to report user actions
+or a lifecycle callback method) always run in the UI thread of the process.</p>
+
+<p>For instance, when the user touches a button on the screen, your app's UI thread dispatches the
+touch event to the widget, which in turn sets its pressed state and posts an invalidate request to
+the event queue. The UI thread dequeues the request and notifies the widget that it should redraw
+itself.</p>
+
+<p>When your app performs intensive work in response to user interaction, this single thread model
+can yield poor performance unless you implement your application properly. Specifically, if
+everything is happening in the UI thread, performing long operations such as network access or
+database queries will block the whole UI. When the thread is blocked, no events can be dispatched,
+including drawing events. From the user's perspective, the
+application appears to hang. Even worse, if the UI thread is blocked for more than a few seconds
+(about 5 seconds currently) the user is presented with the infamous "<a
+href="http://developer.android.com/guide/practices/design/responsiveness.html">application not
+responding</a>" (ANR) dialog. The user might then decide to quit your application and uninstall it
+if they are unhappy.</p>
+
+<p>Additionally, the Andoid UI toolkit is <em>not</em> thread-safe. So, you must not manipulate
+your UI from a worker thread&mdash;you must do all manipulation to your user interface from the UI
+thread. Thus, there are simply two rules to Android's single thread model:</p>
+
+<ol>
+<li>Do not block the UI thread
+<li>Do not access the Android UI toolkit from outside the UI thread
+</ol>
+
+<h3 id="WorkerThreads">Worker threads</h3>
+
+<p>Because of the single thread model described above, it's vital to the responsiveness of your
+application's UI that you do not block the UI thread. If you have operations to perform
+that are not instantaneous, you should make sure to do them in separate threads ("background" or
+"worker" threads).</p>
+
+<p>For example, below is some code for a click listener that downloads an image from a separate
+thread and displays it in an {@link android.widget.ImageView}:</p>
+
+<pre>
+public void onClick(View v) {
+ new Thread(new Runnable() {
+ public void run() {
+ Bitmap b = loadImageFromNetwork("http://example.com/image.png");
+ mImageView.setImageBitmap(b);
+ }
+ }).start();
+}
+</pre>
+
+<p>At first, this seems to work fine, because it creates a new thread to handle the network
+operation. However, it violates the second rule of the single-threaded model: <em>do not access the
+Android UI toolkit from outside the UI thread</em>&mdash;this sample modifies the {@link
+android.widget.ImageView} from the worker thread instead of the UI thread. This can result in
+undefined and unexpected behavior, which can be difficult and time-consuming to track down.</p>
+
+<p>To fix this problem, Android offers several ways to access the UI thread from other
+threads. Here is a list of methods that can help:</p>
+
+<ul>
+<li>{@link android.app.Activity#runOnUiThread(java.lang.Runnable)
+Activity.runOnUiThread(Runnable)}</li>
+<li>{@link android.view.View#post(java.lang.Runnable) View.post(Runnable)}</li>
+<li>{@link android.view.View#postDelayed(java.lang.Runnable, long) View.postDelayed(Runnable,
+long)}</li>
+</ul>
+
+<p>For example, you can fix the above code by using the {@link
+android.view.View#post(java.lang.Runnable) View.post(Runnable)} method:</p>
+
+<pre>
+public void onClick(View v) {
+ new Thread(new Runnable() {
+ public void run() {
+ final Bitmap bitmap = loadImageFromNetwork("http://example.com/image.png");
+ mImageView.post(new Runnable() {
+ public void run() {
+ mImageView.setImageBitmap(bitmap);
+ }
+ });
+ }
+ }).start();
+}
+</pre>
+
+<p>Now this implementation is thread-safe: the network operation is done from a separate thread
+while the {@link android.widget.ImageView} is manipulated from the UI thread.</p>
+
+<p>However, as the complexity of the operation grows, this kind of code can get complicated and
+difficult to maintain. To handle more complex interactions with a worker thread, you might consider
+using a {@link android.os.Handler} in your worker thread, to process messages delivered from the UI
+thread. Perhaps the best solution, though, is to extend the {@link android.os.AsyncTask} class,
+which simplifies the execution of worker thread tasks that need to interact with the UI.</p>
+
+
+<h4 id="AsyncTask">Using AsyncTask</h4>
+
+<p>{@link android.os.AsyncTask} allows you to perform asynchronous work on your user
+interface. It performs the blocking operations in a worker thread and then publishes the results on
+the UI thread, without requiring you to handle threads and/or handlers yourself.</p>
+
+<p>To use it, you must subclass {@link android.os.AsyncTask} and implement the {@link
+android.os.AsyncTask#doInBackground doInBackground()} callback method, which runs in a pool of
+background threads. To update your UI, you should implement {@link
+android.os.AsyncTask#onPostExecute onPostExecute()}, which delivers the result from {@link
+android.os.AsyncTask#doInBackground doInBackground()} and runs in the UI thread, so you can safely
+update your UI. You can then run the task by calling {@link android.os.AsyncTask#execute execute()}
+from the UI thread.</p>
+
+<p>For example, you can implement the previous example using {@link android.os.AsyncTask} this
+way:</p>
+
+<pre>
+public void onClick(View v) {
+ new DownloadImageTask().execute("http://example.com/image.png");
+}
+
+private class DownloadImageTask extends AsyncTask&lt;String, Void, Bitmap&gt; {
+ /** The system calls this to perform work in a worker thread and
+ * delivers it the parameters given to AsyncTask.execute() */
+ protected Bitmap doInBackground(String... urls) {
+ return loadImageFromNetwork(urls[0]);
+ }
+
+ /** The system calls this to perform work in the UI thread and delivers
+ * the result from doInBackground() */
+ protected void onPostExecute(Bitmap result) {
+ mImageView.setImageBitmap(result);
+ }
+}
+</pre>
+
+<p>Now the UI is safe and the code is simpler, because it separates the work into the
+part that should be done on a worker thread and the part that should be done on the UI thread.</p>
+
+<p>You should read the {@link android.os.AsyncTask} reference for a full understanding on
+how to use this class, but here is a quick overview of how it works:</p>
+
+<ul>
+<li>You can specify the type of the parameters, the progress values, and the final
+value of the task, using generics</li>
+<li>The method {@link android.os.AsyncTask#doInBackground doInBackground()} executes automatically
+on a worker thread</li>
+<li>{@link android.os.AsyncTask#onPreExecute onPreExecute()}, {@link
+android.os.AsyncTask#onPostExecute onPostExecute()}, and {@link
+android.os.AsyncTask#onProgressUpdate onProgressUpdate()} are all invoked on the UI thread</li>
+<li>The value returned by {@link android.os.AsyncTask#doInBackground doInBackground()} is sent to
+{@link android.os.AsyncTask#onPostExecute onPostExecute()}</li>
+<li>You can call {@link android.os.AsyncTask#publishProgress publishProgress()} at anytime in {@link
+android.os.AsyncTask#doInBackground doInBackground()} to execute {@link
+android.os.AsyncTask#onProgressUpdate onProgressUpdate()} on the UI thread</li>
+<li>You can cancel the task at any time, from any thread</li>
+</ul>
+
+<p class="caution"><strong>Caution:</strong> Another problem you might encounter when using a worker
+thread is unexpected restarts in your activity due to a <a
+href="{@docRoot}guide/topics/resources/runtime-changes.html">runtime configuration change</a>
+(such as when the user changes the screen orientation), which may destroy your worker thread. To
+see how you can persist your task during one of these restarts and how to properly cancel the task
+when the activity is destroyed, see the source code for the <a
+href="http://code.google.com/p/shelves/">Shelves</a> sample application.</p>
+
+
+<h3 id="ThreadSafe">Thread-safe methods</h3>
+
+<p> In some situations, the methods you implement might be called from more than one thread, and
+therefore must be written to be thread-safe. </p>
+
+<p>This is primarily true for methods that can be called remotely&mdash;such as methods in a <a
+href="{@docRoot}guide/topics/fundamentals/bound-services.html">bound service</a>. When a call on a
+method implemented in an {@link android.os.IBinder} originates in the same process in which the
+{@link android.os.IBinder IBinder} is running, the method is executed in the caller's thread.
+However, when the call originates in another process, the method is executed in a thread chosen from
+a pool of threads that the system maintains in the same process as the {@link android.os.IBinder
+IBinder} (it's not executed in the UI thread of the process). For example, whereas a service's
+{@link android.app.Service#onBind onBind()} method would be called from the UI thread of the
+service's process, methods implemented in the object that {@link android.app.Service#onBind
+onBind()} returns (for example, a subclass that implements RPC methods) would be called from threads
+in the pool. Because a service can have more than one client, more than one pool thread can engage
+the same {@link android.os.IBinder IBinder} method at the same time. {@link android.os.IBinder
+IBinder} methods must, therefore, be implemented to be thread-safe.</p>
+
+<p> Similarly, a content provider can receive data requests that originate in other processes.
+Although the {@link android.content.ContentResolver} and {@link android.content.ContentProvider}
+classes hide the details of how the interprocess communication is managed, {@link
+android.content.ContentProvider} methods that respond to those requests&mdash;the methods {@link
+android.content.ContentProvider#query query()}, {@link android.content.ContentProvider#insert
+insert()}, {@link android.content.ContentProvider#delete delete()}, {@link
+android.content.ContentProvider#update update()}, and {@link android.content.ContentProvider#getType
+getType()}&mdash;are called from a pool of threads in the content provider's process, not the UI
+thread for the process. Because these methods might be called from any number of threads at the
+same time, they too must be implemented to be thread-safe. </p>
+
+
+<h2 id="IPC">Interprocess Communication</h2>
+
+<p>Android offers a mechanism for interprocess communication (IPC) using remote procedure calls
+(RPCs), in which a method is called by an activity or other application component, but executed
+remotely (in another process), with any result returned back to the
+caller. This entails decomposing a method call and its data to a level the operating system can
+understand, transmitting it from the local process and address space to the remote process and
+address space, then reassembling and reenacting the call there. Return values are then
+transmitted in the opposite direction. Android provides all the code to perform these IPC
+transactions, so you can focus on defining and implementing the RPC programming interface. </p>
+
+<p>To perform IPC, your application must bind to a service, using {@link
+android.content.Context#bindService bindService()}. For more information, see the <a
+href="{@docRoot}guide/topics/fundamentals/services.html">Services</a> developer guide.</p>
+
+
+<h2>Beginner's Path</h2>
+
+<p>For information about how to perform work in the background for an indefinite period of time
+(without a user interface), continue with the <b><a
+href="{@docRoot}guide/topics/fundamentals/services.html">Services</a></b> document.</p>
+
diff --git a/docs/html/images/sdk_manager_packages.png b/docs/html/images/sdk_manager_packages.png
index fd59ac6bc975..19a7cb91537d 100644
--- a/docs/html/images/sdk_manager_packages.png
+++ b/docs/html/images/sdk_manager_packages.png
Binary files differ
diff --git a/docs/html/sdk/installing.jd b/docs/html/sdk/installing.jd
index 53d551539670..b0fd761051a9 100644
--- a/docs/html/sdk/installing.jd
+++ b/docs/html/sdk/installing.jd
@@ -148,7 +148,7 @@ last step in setting up your Android SDK.</p>
<p>If you prefer to work in a different IDE, you do not need to
install Eclipse or ADT, instead, you can directly use the SDK tools to build and
debug your application. The <a href="{@docRoot}guide/developing/index.html">Overview</a>
-section of the developer guide outlines the major steps that you need to complete
+section of the developer guide outlines the major steps that you need to complete
when developing in Eclipse or other IDEs.</p>
@@ -177,7 +177,7 @@ better understand the components available from the Android SDK and AVD Manager.
<p>You can launch the Android SDK and AVD Manager in one of the following ways:</p>
<ul>
<li>From within Eclipse, select <strong>Window &gt; Android SDK and AVD Manager</strong>.</li>
- <li>On Windows, double-click the <code>SDK Manager.ext</code> file at the root of the Android
+ <li>On Windows, double-click the <code>SDK Manager.exe</code> file at the root of the Android
SDK directory.</li>
<li>On Mac or Linux, open a terminal and navigate to the <code>tools/</code> directory in the
Android SDK, then execute: <pre>android</pre> </li>
@@ -444,6 +444,7 @@ version.</td>
<td colspan="3"><code>samples/</code></td>
<td>Sample code and apps that are specific to platform version.</td>
</tr>
+<tr>
<td colspan="3"><code>tools/</code></td>
<td>Contains the set of development and profiling tools that are platform-independent, such
as the emulator, the AVD and SDK Manager, ddms, hierarchyviewer and more. The tools in
@@ -548,7 +549,7 @@ first step in getting started with Android development. </p>
href="{@docRoot}guide/developing/tools/index.html">development
tools</a> that are available to you</li>
<li>Read the <a
- href="{@docRoot}guide/developing/index.html">Overview</a>
+ href="{@docRoot}guide/developing/index.html">Overview</a>
for how to develop an Android application.
</li>
<li>Read <a href="{@docRoot}guide/developing/device.html">Developing on a Device</a> to set up an
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 1e29d825cf33..03f894404a17 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -204,8 +204,9 @@ public:
// set audio mode in audio hardware (see AudioSystem::audio_mode)
static status_t setMode(int mode);
- // returns true in *state if tracks are active on the specified stream
- static status_t isStreamActive(int stream, bool *state);
+ // returns true in *state if tracks are active on the specified stream or has been active
+ // in the past inPastMs milliseconds
+ static status_t isStreamActive(int stream, bool *state, uint32_t inPastMs = 0);
// set/get audio hardware parameters. The function accepts a list of parameters
// key value pairs in the form: key1=value1;key2=value2;...
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index 70e505eb342e..589f7cd28a86 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -102,9 +102,6 @@ public:
virtual status_t setMicMute(bool state) = 0;
virtual bool getMicMute() const = 0;
- // is any track active on this stream?
- virtual bool isStreamActive(int stream) const = 0;
-
virtual status_t setParameters(int ioHandle, const String8& keyValuePairs) = 0;
virtual String8 getParameters(int ioHandle, const String8& keys) = 0;
diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h
index 49eee59040e1..5afceaa4bc4f 100644
--- a/include/media/IAudioPolicyService.h
+++ b/include/media/IAudioPolicyService.h
@@ -81,6 +81,7 @@ public:
int session,
int id) = 0;
virtual status_t unregisterEffect(int id) = 0;
+ virtual bool isStreamActive(int stream, uint32_t inPastMs = 0) const = 0;
};
diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h
index 2d50ca578489..31a549cce272 100644
--- a/include/media/stagefright/MediaDefs.h
+++ b/include/media/stagefright/MediaDefs.h
@@ -37,6 +37,7 @@ extern const char *MEDIA_MIMETYPE_AUDIO_VORBIS;
extern const char *MEDIA_MIMETYPE_AUDIO_G711_ALAW;
extern const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW;
extern const char *MEDIA_MIMETYPE_AUDIO_RAW;
+extern const char *MEDIA_MIMETYPE_AUDIO_FLAC;
extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG4;
extern const char *MEDIA_MIMETYPE_CONTAINER_WAV;
diff --git a/libs/hwui/Caches.cpp b/libs/hwui/Caches.cpp
index bffab958378f..ebf7aa0a3d04 100644
--- a/libs/hwui/Caches.cpp
+++ b/libs/hwui/Caches.cpp
@@ -20,6 +20,7 @@
#include "Caches.h"
#include "Properties.h"
+#include "LayerRenderer.h"
namespace android {
@@ -116,12 +117,7 @@ void Caches::clearGarbage() {
size_t count = mLayerGarbage.size();
for (size_t i = 0; i < count; i++) {
Layer* layer = mLayerGarbage.itemAt(i);
- if (layer) {
- if (layer->fbo) glDeleteFramebuffers(1, &layer->fbo);
- if (layer->texture) glDeleteTextures(1, &layer->texture);
-
- delete layer;
- }
+ LayerRenderer::destroyLayer(layer);
}
mLayerGarbage.clear();
}
diff --git a/libs/hwui/LayerCache.cpp b/libs/hwui/LayerCache.cpp
index 7667af5fe018..a9710ad6adbf 100644
--- a/libs/hwui/LayerCache.cpp
+++ b/libs/hwui/LayerCache.cpp
@@ -128,6 +128,31 @@ Layer* LayerCache::get(const uint32_t width, const uint32_t height) {
return layer;
}
+bool LayerCache::resize(Layer* layer, const uint32_t width, const uint32_t height) {
+ // TODO: We should be smarter and see if we have a texture of the appropriate
+ // size already in the cache, and reuse it instead of creating a new one
+
+ LayerEntry entry(width, height);
+ if (entry.mWidth <= layer->width && entry.mHeight <= layer->height) {
+ return true;
+ }
+
+ glActiveTexture(GL_TEXTURE0);
+ glBindTexture(GL_TEXTURE_2D, layer->texture);
+
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, entry.mWidth, entry.mHeight, 0,
+ GL_RGBA, GL_UNSIGNED_BYTE, NULL);
+
+ if (glGetError() != GL_NO_ERROR) {
+ return false;
+ }
+
+ layer->width = entry.mWidth;
+ layer->height = entry.mHeight;
+
+ return true;
+}
+
bool LayerCache::put(Layer* layer) {
const uint32_t size = layer->width * layer->height * 4;
// Don't even try to cache a layer that's bigger than the cache
diff --git a/libs/hwui/LayerCache.h b/libs/hwui/LayerCache.h
index 1333a73feea7..d2d5f3960568 100644
--- a/libs/hwui/LayerCache.h
+++ b/libs/hwui/LayerCache.h
@@ -76,6 +76,17 @@ public:
* Clears the cache. This causes all layers to be deleted.
*/
void clear();
+ /**
+ * Resize the specified layer if needed.
+ *
+ * @param layer The layer to resize
+ * @param width The new width of the layer
+ * @param height The new height of the layer
+ *
+ * @return True if the layer was resized or nothing happened, false if
+ * a failure occurred during the resizing operation
+ */
+ bool resize(Layer* layer, const uint32_t width, const uint32_t height);
/**
* Sets the maximum size of the cache in bytes.
diff --git a/libs/hwui/LayerRenderer.cpp b/libs/hwui/LayerRenderer.cpp
index 60ff0bfafdcb..24f9739715ee 100644
--- a/libs/hwui/LayerRenderer.cpp
+++ b/libs/hwui/LayerRenderer.cpp
@@ -18,6 +18,7 @@
#include <ui/Rect.h>
+#include "LayerCache.h"
#include "LayerRenderer.h"
#include "Properties.h"
#include "Rect.h"
@@ -34,21 +35,24 @@ void LayerRenderer::prepareDirty(float left, float top, float right, float botto
glBindFramebuffer(GL_FRAMEBUFFER, mLayer->fbo);
+ const float width = mLayer->layer.getWidth();
+ const float height = mLayer->layer.getHeight();
+
#if RENDER_LAYERS_AS_REGIONS
Rect dirty(left, top, right, bottom);
if (dirty.isEmpty() || (dirty.left <= 0 && dirty.top <= 0 &&
- dirty.right >= mLayer->width && dirty.bottom >= mLayer->height)) {
+ dirty.right >= width && dirty.bottom >= height)) {
mLayer->region.clear();
- dirty.set(0.0f, 0.0f, mLayer->width, mLayer->height);
+ dirty.set(0.0f, 0.0f, width, height);
} else {
- dirty.intersect(0.0f, 0.0f, mLayer->width, mLayer->height);
+ dirty.intersect(0.0f, 0.0f, width, height);
android::Rect r(dirty.left, dirty.top, dirty.right, dirty.bottom);
mLayer->region.subtractSelf(r);
}
OpenGLRenderer::prepareDirty(dirty.left, dirty.top, dirty.right, dirty.bottom, opaque);
#else
- OpenGLRenderer::prepareDirty(0.0f, 0.0f, mLayer->width, mLayer->height, opaque);
+ OpenGLRenderer::prepareDirty(0.0f, 0.0f, width, height, opaque);
#endif
}
@@ -162,64 +166,56 @@ void LayerRenderer::generateMesh() {
Layer* LayerRenderer::createLayer(uint32_t width, uint32_t height, bool isOpaque) {
LAYER_RENDERER_LOGD("Creating new layer %dx%d", width, height);
- Layer* layer = new Layer(width, height);
-
- GLuint previousFbo;
- glGetIntegerv(GL_FRAMEBUFFER_BINDING, (GLint*) &previousFbo);
-
- glGenFramebuffers(1, &layer->fbo);
- glBindFramebuffer(GL_FRAMEBUFFER, layer->fbo);
-
- if (glGetError() != GL_NO_ERROR) {
- glBindFramebuffer(GL_FRAMEBUFFER, previousFbo);
- glDeleteBuffers(1, &layer->fbo);
- return 0;
+ GLuint fbo = Caches::getInstance().fboCache.get();
+ if (!fbo) {
+ LOGW("Could not obtain an FBO");
+ return NULL;
}
glActiveTexture(GL_TEXTURE0);
- glGenTextures(1, &layer->texture);
- glBindTexture(GL_TEXTURE_2D, layer->texture);
+ Layer* layer = Caches::getInstance().layerCache.get(width, height);
+ if (!layer) {
+ LOGW("Could not obtain a layer");
+ return NULL;
+ }
- glPixelStorei(GL_UNPACK_ALIGNMENT, 4);
+ layer->fbo = fbo;
+ layer->layer.set(0.0f, 0.0f, width, height);
+ layer->texCoords.set(0.0f, height / float(layer->height),
+ width / float(layer->width), 0.0f);
+ layer->alpha = 255;
+ layer->mode = SkXfermode::kSrcOver_Mode;
+ layer->blend = !isOpaque;
+ layer->colorFilter = NULL;
+ layer->region.clear();
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ GLuint previousFbo;
+ glGetIntegerv(GL_FRAMEBUFFER_BINDING, (GLint*) &previousFbo);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ glBindFramebuffer(GL_FRAMEBUFFER, layer->fbo);
+ glBindTexture(GL_TEXTURE_2D, layer->texture);
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0,
- GL_RGBA, GL_UNSIGNED_BYTE, NULL);
+ // Initialize the texture if needed
+ if (layer->empty) {
+ layer->empty = false;
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, layer->width, layer->height, 0,
+ GL_RGBA, GL_UNSIGNED_BYTE, NULL);
- if (glGetError() != GL_NO_ERROR) {
- glBindFramebuffer(GL_FRAMEBUFFER, previousFbo);
- glDeleteBuffers(1, &layer->fbo);
- glDeleteTextures(1, &layer->texture);
- delete layer;
- return 0;
+ if (glGetError() != GL_NO_ERROR) {
+ LOGD("Could not allocate texture");
+ glBindFramebuffer(GL_FRAMEBUFFER, previousFbo);
+ glDeleteTextures(1, &layer->texture);
+ Caches::getInstance().fboCache.put(fbo);
+ delete layer;
+ return NULL;
+ }
}
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D,
layer->texture, 0);
- if (glGetError() != GL_NO_ERROR) {
- glBindFramebuffer(GL_FRAMEBUFFER, previousFbo);
- glDeleteBuffers(1, &layer->fbo);
- glDeleteTextures(1, &layer->texture);
- delete layer;
- return 0;
- }
-
glBindFramebuffer(GL_FRAMEBUFFER, previousFbo);
- layer->layer.set(0.0f, 0.0f, width, height);
- layer->texCoords.set(0.0f, 1.0f, 1.0f, 0.0f);
- layer->alpha = 255;
- layer->mode = SkXfermode::kSrcOver_Mode;
- layer->blend = !isOpaque;
- layer->empty = false;
- layer->colorFilter = NULL;
-
return layer;
}
@@ -227,27 +223,17 @@ bool LayerRenderer::resizeLayer(Layer* layer, uint32_t width, uint32_t height) {
if (layer) {
LAYER_RENDERER_LOGD("Resizing layer fbo = %d to %dx%d", layer->fbo, width, height);
- glActiveTexture(GL_TEXTURE0);
- glBindTexture(GL_TEXTURE_2D, layer->texture);
-
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0,
- GL_RGBA, GL_UNSIGNED_BYTE, NULL);
-
- if (glGetError() != GL_NO_ERROR) {
- glDeleteBuffers(1, &layer->fbo);
- glDeleteTextures(1, &layer->texture);
-
- layer->width = 0;
- layer->height = 0;
- layer->fbo = 0;
- layer->texture = 0;
-
+ if (Caches::getInstance().layerCache.resize(layer, width, height)) {
+ layer->layer.set(0.0f, 0.0f, width, height);
+ layer->texCoords.set(0.0f, height / float(layer->height),
+ width / float(layer->width), 0.0f);
+ } else {
+ if (layer->texture) glDeleteTextures(1, &layer->texture);
+ delete layer;
return false;
}
-
- layer->width = width;
- layer->height = height;
}
+
return true;
}
@@ -255,10 +241,16 @@ void LayerRenderer::destroyLayer(Layer* layer) {
if (layer) {
LAYER_RENDERER_LOGD("Destroying layer, fbo = %d", layer->fbo);
- if (layer->fbo) glDeleteFramebuffers(1, &layer->fbo);
- if (layer->texture) glDeleteTextures(1, &layer->texture);
+ if (layer->fbo) {
+ Caches::getInstance().fboCache.put(layer->fbo);
+ }
- delete layer;
+ if (!Caches::getInstance().layerCache.put(layer)) {
+ if (layer->texture) glDeleteTextures(1, &layer->texture);
+ delete layer;
+ } else {
+ layer->region.clear();
+ }
}
}
diff --git a/libs/hwui/Properties.h b/libs/hwui/Properties.h
index 2f230b566c82..2d8b6f3950f6 100644
--- a/libs/hwui/Properties.h
+++ b/libs/hwui/Properties.h
@@ -64,14 +64,14 @@ enum DebugLevel {
// Converts a number of mega-bytes into bytes
#define MB(s) s * 1024 * 1024
-#define DEFAULT_TEXTURE_CACHE_SIZE 20.0f
-#define DEFAULT_LAYER_CACHE_SIZE 8.0f
+#define DEFAULT_TEXTURE_CACHE_SIZE 24.0f
+#define DEFAULT_LAYER_CACHE_SIZE 24.0f
#define DEFAULT_PATH_CACHE_SIZE 4.0f
#define DEFAULT_SHAPE_CACHE_SIZE 1.0f
#define DEFAULT_PATCH_CACHE_SIZE 512
#define DEFAULT_GRADIENT_CACHE_SIZE 0.5f
#define DEFAULT_DROP_SHADOW_CACHE_SIZE 2.0f
-#define DEFAULT_FBO_CACHE_SIZE 12
+#define DEFAULT_FBO_CACHE_SIZE 16
#define DEFAULT_TEXT_GAMMA 1.4f
#define DEFAULT_TEXT_BLACK_GAMMA_THRESHOLD 64
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index 9058a7ba8e55..051a0fc21760 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -394,10 +394,10 @@ public class AudioManager {
*/
adjustSuggestedStreamVolume(
keyCode == KeyEvent.KEYCODE_VOLUME_UP
- ? AudioManager.ADJUST_RAISE
- : AudioManager.ADJUST_LOWER,
+ ? ADJUST_RAISE
+ : ADJUST_LOWER,
stream,
- AudioManager.FLAG_SHOW_UI | AudioManager.FLAG_VIBRATE);
+ FLAG_SHOW_UI | FLAG_VIBRATE);
break;
case KeyEvent.KEYCODE_VOLUME_MUTE:
// TODO: Actually handle MUTE.
@@ -416,7 +416,11 @@ public class AudioManager {
* Play a sound. This is done on key up since we don't want the
* sound to play when a user holds down volume down to mute.
*/
- adjustSuggestedStreamVolume(ADJUST_SAME, stream, FLAG_PLAY_SOUND);
+ adjustSuggestedStreamVolume(
+ ADJUST_SAME,
+ stream,
+ FLAG_PLAY_SOUND);
+
mVolumeKeyUpTime = SystemClock.uptimeMillis();
break;
case KeyEvent.KEYCODE_VOLUME_MUTE:
@@ -555,6 +559,21 @@ public class AudioManager {
}
/**
+ * Get last audible volume before stream was muted.
+ *
+ * @hide
+ */
+ public int getLastAudibleStreamVolume(int streamType) {
+ IAudioService service = getService();
+ try {
+ return service.getLastAudibleStreamVolume(streamType);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in getLastAudibleStreamVolume", e);
+ return 0;
+ }
+ }
+
+ /**
* Sets the ringer mode.
* <p>
* Silent mode will mute the volume and will not vibrate. Vibrate mode will
@@ -649,6 +668,21 @@ public class AudioManager {
}
/**
+ * get stream mute state.
+ *
+ * @hide
+ */
+ public boolean isStreamMute(int streamType) {
+ IAudioService service = getService();
+ try {
+ return service.isStreamMute(streamType);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in isStreamMute", e);
+ return false;
+ }
+ }
+
+ /**
* Returns whether a particular type should vibrate according to user
* settings and the current ringer mode.
* <p>
@@ -1124,7 +1158,7 @@ public class AudioManager {
* @return true if any music tracks are active.
*/
public boolean isMusicActive() {
- return AudioSystem.isStreamActive(STREAM_MUSIC);
+ return AudioSystem.isStreamActive(STREAM_MUSIC, 0);
}
/*
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index cbc7529803a4..e18220ac0f4b 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -284,6 +284,15 @@ public class AudioService extends IAudioService.Stub {
private SoundPoolListenerThread mSoundPoolListenerThread;
// message looper for SoundPool listener
private Looper mSoundPoolLooper = null;
+ // default volume applied to sound played with playSoundEffect()
+ private static final int SOUND_EFFECT_DEFAULT_VOLUME_DB = -20;
+ // volume applied to sound played with playSoundEffect() read from ro.config.sound_fx_volume
+ private int SOUND_EFFECT_VOLUME_DB;
+ // getActiveStreamType() will return STREAM_NOTIFICATION during this period after a notification
+ // stopped
+ private static final int NOTIFICATION_VOLUME_DELAY_MS = 5000;
+ // previous volume adjustment direction received by checkForRingerModeChange()
+ private int mPrevVolDirection = AudioManager.ADJUST_SAME;
///////////////////////////////////////////////////////////////////////////
// Construction
@@ -301,6 +310,10 @@ public class AudioService extends IAudioService.Stub {
"ro.config.vc_call_vol_steps",
MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL]);
+ SOUND_EFFECT_VOLUME_DB = SystemProperties.getInt(
+ "ro.config.sound_fx_volume",
+ SOUND_EFFECT_DEFAULT_VOLUME_DB);
+
mVolumePanel = new VolumePanel(context, this);
mSettingsObserver = new SettingsObserver();
mForcedUseForComm = AudioSystem.FORCE_NONE;
@@ -401,14 +414,19 @@ public class AudioService extends IAudioService.Stub {
mRingerModeAffectedStreams = Settings.System.getInt(cr,
Settings.System.MODE_RINGER_STREAMS_AFFECTED,
((1 << AudioSystem.STREAM_RING)|(1 << AudioSystem.STREAM_NOTIFICATION)|
- (1 << AudioSystem.STREAM_SYSTEM)|(1 << AudioSystem.STREAM_SYSTEM_ENFORCED)));
+ (1 << AudioSystem.STREAM_SYSTEM)|(1 << AudioSystem.STREAM_SYSTEM_ENFORCED)|
+ (1 << AudioSystem.STREAM_MUSIC)));
mMuteAffectedStreams = System.getInt(cr,
System.MUTE_STREAMS_AFFECTED,
((1 << AudioSystem.STREAM_MUSIC)|(1 << AudioSystem.STREAM_RING)|(1 << AudioSystem.STREAM_SYSTEM)));
- mNotificationsUseRingVolume = System.getInt(cr,
- Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1);
+ if (mVoiceCapable) {
+ mNotificationsUseRingVolume = System.getInt(cr,
+ Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1);
+ } else {
+ mNotificationsUseRingVolume = 1;
+ }
if (mNotificationsUseRingVolume == 1) {
STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_RING;
@@ -465,8 +483,10 @@ public class AudioService extends IAudioService.Stub {
// If either the client forces allowing ringer modes for this adjustment,
// or the stream type is one that is affected by ringer modes
- if ((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0
- || streamType == AudioSystem.STREAM_RING) {
+ if (((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0) ||
+ (!mVoiceCapable && streamType != AudioSystem.STREAM_VOICE_CALL &&
+ streamType != AudioSystem.STREAM_BLUETOOTH_SCO) ||
+ (mVoiceCapable && streamType == AudioSystem.STREAM_RING)) {
// Check if the ringer mode changes with this volume adjustment. If
// it does, it will handle adjusting the volume, so we won't below
adjustVolume = checkForRingerModeChange(oldIndex, direction);
@@ -491,10 +511,8 @@ public class AudioService extends IAudioService.Stub {
}
index = streamState.mIndex;
}
- // UI
- mVolumePanel.postVolumeChanged(streamType, flags);
- // Broadcast Intent
- sendVolumeUpdate(streamType, oldIndex, index);
+
+ sendVolumeUpdate(streamType, oldIndex, index, flags);
}
/** @see AudioManager#setStreamVolume(int, int, int) */
@@ -509,21 +527,23 @@ public class AudioService extends IAudioService.Stub {
index = (streamState.muteCount() != 0) ? streamState.mLastAudibleIndex : streamState.mIndex;
- // UI, etc.
- mVolumePanel.postVolumeChanged(streamType, flags);
- // Broadcast Intent
- sendVolumeUpdate(streamType, oldIndex, index);
+ sendVolumeUpdate(streamType, oldIndex, index, flags);
}
- private void sendVolumeUpdate(int streamType, int oldIndex, int index) {
+ // UI update and Broadcast Intent
+ private void sendVolumeUpdate(int streamType, int oldIndex, int index, int flags) {
+ if (!mVoiceCapable && (streamType == AudioSystem.STREAM_RING)) {
+ streamType = AudioSystem.STREAM_NOTIFICATION;
+ }
+
+ mVolumePanel.postVolumeChanged(streamType, flags);
+
oldIndex = (oldIndex + 5) / 10;
index = (index + 5) / 10;
-
Intent intent = new Intent(AudioManager.VOLUME_CHANGED_ACTION);
intent.putExtra(AudioManager.EXTRA_VOLUME_STREAM_TYPE, streamType);
intent.putExtra(AudioManager.EXTRA_VOLUME_STREAM_VALUE, index);
intent.putExtra(AudioManager.EXTRA_PREV_VOLUME_STREAM_VALUE, oldIndex);
-
mContext.sendBroadcast(intent);
}
@@ -575,6 +595,11 @@ public class AudioService extends IAudioService.Stub {
}
}
+ /** get stream mute state. */
+ public boolean isStreamMute(int streamType) {
+ return (mStreamStates[streamType].muteCount() != 0);
+ }
+
/** @see AudioManager#getStreamVolume(int) */
public int getStreamVolume(int streamType) {
ensureValidStreamType(streamType);
@@ -587,6 +612,13 @@ public class AudioService extends IAudioService.Stub {
return (mStreamStates[streamType].getMaxIndex() + 5) / 10;
}
+
+ /** Get last audible volume before stream was muted. */
+ public int getLastAudibleStreamVolume(int streamType) {
+ ensureValidStreamType(streamType);
+ return (mStreamStates[streamType].mLastAudibleIndex + 5) / 10;
+ }
+
/** @see AudioManager#getRingerMode() */
public int getRingerMode() {
return mRingerMode;
@@ -1383,8 +1415,9 @@ public class AudioService extends IAudioService.Stub {
if (mRingerMode == AudioManager.RINGER_MODE_NORMAL) {
// audible mode, at the bottom of the scale
- if (direction == AudioManager.ADJUST_LOWER
- && (oldIndex + 5) / 10 == 1) {
+ if ((direction == AudioManager.ADJUST_LOWER &&
+ mPrevVolDirection != AudioManager.ADJUST_LOWER) &&
+ ((oldIndex + 5) / 10 == 0)) {
// "silent mode", but which one?
newRingerMode = System.getInt(mContentResolver, System.VIBRATE_IN_SILENT, 1) == 1
? AudioManager.RINGER_MODE_VIBRATE
@@ -1411,6 +1444,8 @@ public class AudioService extends IAudioService.Stub {
adjustVolumeIndex = false;
}
+ mPrevVolDirection = direction;
+
return adjustVolumeIndex;
}
@@ -1439,36 +1474,61 @@ public class AudioService extends IAudioService.Stub {
}
private int getActiveStreamType(int suggestedStreamType) {
- boolean isOffhook = false;
- try {
- ITelephony phone = ITelephony.Stub.asInterface(ServiceManager.checkService("phone"));
- if (phone != null) isOffhook = phone.isOffhook();
- } catch (RemoteException e) {
- Log.w(TAG, "Couldn't connect to phone service", e);
- }
-
- if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION) == AudioSystem.FORCE_BT_SCO) {
- // Log.v(TAG, "getActiveStreamType: Forcing STREAM_BLUETOOTH_SCO...");
- return AudioSystem.STREAM_BLUETOOTH_SCO;
- } else if (isOffhook || getMode() == AudioManager.MODE_IN_COMMUNICATION) {
- // Log.v(TAG, "getActiveStreamType: Forcing STREAM_VOICE_CALL...");
- return AudioSystem.STREAM_VOICE_CALL;
- } else if (AudioSystem.isStreamActive(AudioSystem.STREAM_MUSIC)) {
- // Log.v(TAG, "getActiveStreamType: Forcing STREAM_MUSIC...");
- return AudioSystem.STREAM_MUSIC;
- } else if (suggestedStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
- if (mVoiceCapable) {
+
+ if (mVoiceCapable) {
+ boolean isOffhook = false;
+ try {
+ ITelephony phone = ITelephony.Stub.asInterface(ServiceManager.checkService("phone"));
+ if (phone != null) isOffhook = phone.isOffhook();
+ } catch (RemoteException e) {
+ Log.w(TAG, "Couldn't connect to phone service", e);
+ }
+
+ if (isOffhook || getMode() == AudioManager.MODE_IN_COMMUNICATION) {
+ if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION)
+ == AudioSystem.FORCE_BT_SCO) {
+ // Log.v(TAG, "getActiveStreamType: Forcing STREAM_BLUETOOTH_SCO...");
+ return AudioSystem.STREAM_BLUETOOTH_SCO;
+ } else {
+ // Log.v(TAG, "getActiveStreamType: Forcing STREAM_VOICE_CALL...");
+ return AudioSystem.STREAM_VOICE_CALL;
+ }
+ } else if (AudioSystem.isStreamActive(AudioSystem.STREAM_MUSIC, 0)) {
+ // Log.v(TAG, "getActiveStreamType: Forcing STREAM_MUSIC...");
+ return AudioSystem.STREAM_MUSIC;
+ } else if (suggestedStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
// Log.v(TAG, "getActiveStreamType: Forcing STREAM_RING..."
// + " b/c USE_DEFAULT_STREAM_TYPE...");
return AudioSystem.STREAM_RING;
} else {
+ // Log.v(TAG, "getActiveStreamType: Returning suggested type " + suggestedStreamType);
+ return suggestedStreamType;
+ }
+ } else {
+ if (getMode() == AudioManager.MODE_IN_COMMUNICATION) {
+ if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION)
+ == AudioSystem.FORCE_BT_SCO) {
+ // Log.v(TAG, "getActiveStreamType: Forcing STREAM_BLUETOOTH_SCO...");
+ return AudioSystem.STREAM_BLUETOOTH_SCO;
+ } else {
+ // Log.v(TAG, "getActiveStreamType: Forcing STREAM_VOICE_CALL...");
+ return AudioSystem.STREAM_VOICE_CALL;
+ }
+ } else if (AudioSystem.isStreamActive(AudioSystem.STREAM_NOTIFICATION,
+ NOTIFICATION_VOLUME_DELAY_MS) ||
+ AudioSystem.isStreamActive(AudioSystem.STREAM_RING,
+ NOTIFICATION_VOLUME_DELAY_MS)) {
+ // Log.v(TAG, "getActiveStreamType: Forcing STREAM_NOTIFICATION...");
+ return AudioSystem.STREAM_NOTIFICATION;
+ } else if (AudioSystem.isStreamActive(AudioSystem.STREAM_MUSIC, 0) ||
+ (suggestedStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE)) {
// Log.v(TAG, "getActiveStreamType: Forcing STREAM_MUSIC "
// + " b/c USE_DEFAULT_STREAM_TYPE...");
return AudioSystem.STREAM_MUSIC;
+ } else {
+ // Log.v(TAG, "getActiveStreamType: Returning suggested type " + suggestedStreamType);
+ return suggestedStreamType;
}
- } else {
- // Log.v(TAG, "getActiveStreamType: Returning suggested type " + suggestedStreamType);
- return suggestedStreamType;
}
}
@@ -1801,13 +1861,9 @@ public class AudioService extends IAudioService.Stub {
return;
}
float volFloat;
- // use STREAM_MUSIC volume attenuated by 3 dB if volume is not specified by caller
+ // use default if volume is not specified by caller
if (volume < 0) {
- // Same linear to log conversion as in native AudioSystem::linearToLog() (AudioSystem.cpp)
- float dBPerStep = (float)((0.5 * 100) / MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]);
- int musicVolIndex = (mStreamStates[AudioSystem.STREAM_MUSIC].mIndex + 5) / 10;
- float musicVoldB = dBPerStep * (musicVolIndex - MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]);
- volFloat = (float)Math.pow(10, (musicVoldB - 3)/20);
+ volFloat = (float)Math.pow(10, SOUND_EFFECT_VOLUME_DB/20);
} else {
volFloat = (float) volume / 1000.0f;
}
@@ -1884,7 +1940,7 @@ public class AudioService extends IAudioService.Stub {
// Force creation of new IAudioflinger interface
if (!mMediaServerOk) {
Log.e(TAG, "Media server died.");
- AudioSystem.isStreamActive(AudioSystem.STREAM_MUSIC);
+ AudioSystem.isStreamActive(AudioSystem.STREAM_MUSIC, 0);
sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED, SHARED_MSG, SENDMSG_NOOP, 0, 0,
null, 500);
}
@@ -1981,21 +2037,23 @@ public class AudioService extends IAudioService.Stub {
int notificationsUseRingVolume = Settings.System.getInt(mContentResolver,
Settings.System.NOTIFICATIONS_USE_RING_VOLUME,
1);
- if (notificationsUseRingVolume != mNotificationsUseRingVolume) {
- mNotificationsUseRingVolume = notificationsUseRingVolume;
- if (mNotificationsUseRingVolume == 1) {
- STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_RING;
- mStreamStates[AudioSystem.STREAM_NOTIFICATION].setVolumeIndexSettingName(
- System.VOLUME_SETTINGS[AudioSystem.STREAM_RING]);
- } else {
- STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_NOTIFICATION;
- mStreamStates[AudioSystem.STREAM_NOTIFICATION].setVolumeIndexSettingName(
- System.VOLUME_SETTINGS[AudioSystem.STREAM_NOTIFICATION]);
- // Persist notification volume volume as it was not persisted while aliased to ring volume
- // and persist with no delay as there might be registered observers of the persisted
- // notification volume.
- sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, AudioSystem.STREAM_NOTIFICATION,
- SENDMSG_REPLACE, 1, 1, mStreamStates[AudioSystem.STREAM_NOTIFICATION], 0);
+ if (mVoiceCapable) {
+ if (notificationsUseRingVolume != mNotificationsUseRingVolume) {
+ mNotificationsUseRingVolume = notificationsUseRingVolume;
+ if (mNotificationsUseRingVolume == 1) {
+ STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_RING;
+ mStreamStates[AudioSystem.STREAM_NOTIFICATION].setVolumeIndexSettingName(
+ System.VOLUME_SETTINGS[AudioSystem.STREAM_RING]);
+ } else {
+ STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_NOTIFICATION;
+ mStreamStates[AudioSystem.STREAM_NOTIFICATION].setVolumeIndexSettingName(
+ System.VOLUME_SETTINGS[AudioSystem.STREAM_NOTIFICATION]);
+ // Persist notification volume volume as it was not persisted while aliased to ring volume
+ // and persist with no delay as there might be registered observers of the persisted
+ // notification volume.
+ sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, AudioSystem.STREAM_NOTIFICATION,
+ SENDMSG_REPLACE, 1, 1, mStreamStates[AudioSystem.STREAM_NOTIFICATION], 0);
+ }
}
}
}
diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java
index 1fd03dcb1a3c..e20bb2510d9f 100644
--- a/media/java/android/media/AudioSystem.java
+++ b/media/java/android/media/AudioSystem.java
@@ -158,7 +158,7 @@ public class AudioSystem
*
* return true if any track playing on this stream is active.
*/
- public static native boolean isStreamActive(int stream);
+ public static native boolean isStreamActive(int stream, int inPastMs);
/*
* Sets a group generic audio configuration parameters. The use of these parameters
diff --git a/media/java/android/media/IAudioService.aidl b/media/java/android/media/IAudioService.aidl
index 384b8da9d3f2..e3bd7b4f8627 100644
--- a/media/java/android/media/IAudioService.aidl
+++ b/media/java/android/media/IAudioService.aidl
@@ -35,11 +35,15 @@ interface IAudioService {
void setStreamSolo(int streamType, boolean state, IBinder cb);
void setStreamMute(int streamType, boolean state, IBinder cb);
-
+
+ boolean isStreamMute(int streamType);
+
int getStreamVolume(int streamType);
int getStreamMaxVolume(int streamType);
+ int getLastAudibleStreamVolume(int streamType);
+
void setRingerMode(int ringerMode);
int getRingerMode();
diff --git a/media/java/android/media/MediaFile.java b/media/java/android/media/MediaFile.java
index 6a3ff7c0a18a..ee2c1e8cb2d0 100644
--- a/media/java/android/media/MediaFile.java
+++ b/media/java/android/media/MediaFile.java
@@ -46,8 +46,9 @@ public class MediaFile {
public static final int FILE_TYPE_OGG = 7;
public static final int FILE_TYPE_AAC = 8;
public static final int FILE_TYPE_MKA = 9;
+ public static final int FILE_TYPE_FLAC = 10;
private static final int FIRST_AUDIO_FILE_TYPE = FILE_TYPE_MP3;
- private static final int LAST_AUDIO_FILE_TYPE = FILE_TYPE_MKA;
+ private static final int LAST_AUDIO_FILE_TYPE = FILE_TYPE_FLAC;
// MIDI file types
public static final int FILE_TYPE_MID = 11;
@@ -99,8 +100,7 @@ public class MediaFile {
public static final int FILE_TYPE_MS_WORD = 104;
public static final int FILE_TYPE_MS_EXCEL = 105;
public static final int FILE_TYPE_MS_POWERPOINT = 106;
- public static final int FILE_TYPE_FLAC = 107;
- public static final int FILE_TYPE_ZIP = 108;
+ public static final int FILE_TYPE_ZIP = 107;
static class MediaFileType {
diff --git a/media/java/android/media/videoeditor/VideoEditorImpl.java b/media/java/android/media/videoeditor/VideoEditorImpl.java
index 27ab799416bd..33a86544b8dd 100755
--- a/media/java/android/media/videoeditor/VideoEditorImpl.java
+++ b/media/java/android/media/videoeditor/VideoEditorImpl.java
@@ -33,6 +33,8 @@ import org.xmlpull.v1.XmlSerializer;
import android.graphics.Bitmap;
import android.graphics.Rect;
+import android.media.videoeditor.MediaImageItem;
+import android.media.videoeditor.MediaItem;
import android.util.Log;
import android.util.Xml;
import android.view.Surface;
@@ -772,8 +774,7 @@ public class VideoEditorImpl implements VideoEditor {
return mediaItem;
}
- private synchronized MediaItem removeMediaItem(String mediaItemId,
- boolean flag) {
+ private synchronized MediaItem removeMediaItem(String mediaItemId, boolean flag) {
final String firstItemString = mMediaItems.get(0).getId();
final MediaItem mediaItem = getMediaItem(mediaItemId);
@@ -879,14 +880,15 @@ public class VideoEditorImpl implements VideoEditor {
/**
* the project form XML
*/
- private void load() throws FileNotFoundException, XmlPullParserException,
- IOException {
+ private void load() throws FileNotFoundException, XmlPullParserException, IOException {
final File file = new File(mProjectPath, PROJECT_FILENAME);
/**
* Load the metadata
*/
final FileInputStream fis = new FileInputStream(file);
try {
+ final List<String> ignoredMediaItems = new ArrayList<String>();
+
final XmlPullParser parser = Xml.newPullParser();
parser.setInput(fis, "UTF-8");
int eventType = parser.getEventType();
@@ -898,74 +900,43 @@ public class VideoEditorImpl implements VideoEditor {
case XmlPullParser.START_TAG: {
name = parser.getName();
if (TAG_PROJECT.equals(name)) {
- mAspectRatio =
- Integer.parseInt(parser.getAttributeValue("",
+ mAspectRatio = Integer.parseInt(parser.getAttributeValue("",
ATTR_ASPECT_RATIO));
final boolean mRegenPCM =
- Boolean.parseBoolean(parser.getAttributeValue("",
+ Boolean.parseBoolean(parser.getAttributeValue("",
ATTR_REGENERATE_PCM));
mMANativeHelper.setAudioflag(mRegenPCM);
-
} else if (TAG_MEDIA_ITEM.equals(name)) {
- final String mediaItemId =
- parser.getAttributeValue("", ATTR_ID);
- final String type =
- parser.getAttributeValue("", ATTR_TYPE);
- final String filename =
- parser.getAttributeValue("", ATTR_FILENAME);
- final int renderingMode =
- Integer.parseInt(parser.getAttributeValue("",
- ATTR_RENDERING_MODE));
-
- if (MediaImageItem.class.getSimpleName().equals(type)) {
- final long durationMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_DURATION));
- currentMediaItem = new MediaImageItem(this, mediaItemId, filename,
- durationMs, renderingMode);
- } else if (MediaVideoItem.class.getSimpleName().equals(type)) {
- final long beginMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_BEGIN_TIME));
- final long endMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_END_TIME));
- final int volume = Integer.parseInt(parser.getAttributeValue("",
- ATTR_VOLUME));
- final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("",
- ATTR_MUTED));
- final String audioWaveformFilename = parser.getAttributeValue("",
- ATTR_AUDIO_WAVEFORM_FILENAME);
- currentMediaItem = new MediaVideoItem(this, mediaItemId, filename,
- renderingMode, beginMs, endMs, volume, muted,
- audioWaveformFilename);
-
- final long beginTimeMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_BEGIN_TIME));
- final long endTimeMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_END_TIME));
- ((MediaVideoItem)currentMediaItem).setExtractBoundaries(beginTimeMs,
- endTimeMs);
-
- final int volumePercent = Integer.parseInt(parser.getAttributeValue("",
- ATTR_VOLUME));
- ((MediaVideoItem)currentMediaItem).setVolume(volumePercent);
- } else {
- Log.e(TAG, "Unknown media item type: " + type);
- currentMediaItem = null;
- }
-
- if (currentMediaItem != null) {
+ final String mediaItemId = parser.getAttributeValue("", ATTR_ID);
+ try {
+ currentMediaItem = parseMediaItem(parser);
mMediaItems.add(currentMediaItem);
+ } catch (Exception ex) {
+ Log.w(TAG, "Cannot load media item: " + mediaItemId, ex);
+ currentMediaItem = null;
+ // Ignore the media item
+ ignoredMediaItems.add(mediaItemId);
}
} else if (TAG_TRANSITION.equals(name)) {
- final Transition transition = parseTransition(parser);
- if (transition != null) {
- mTransitions.add(transition);
+ try {
+ final Transition transition = parseTransition(parser,
+ ignoredMediaItems);
+ // The transition will be null if the bounding
+ // media items are ignored
+ if (transition != null) {
+ mTransitions.add(transition);
+ }
+ } catch (Exception ex) {
+ Log.w(TAG, "Cannot load transition", ex);
}
} else if (TAG_OVERLAY.equals(name)) {
if (currentMediaItem != null) {
- currentOverlay = parseOverlay(parser, currentMediaItem);
- if (currentOverlay != null) {
+ try {
+ currentOverlay = parseOverlay(parser, currentMediaItem);
currentMediaItem.addOverlay(currentOverlay);
+ } catch (Exception ex) {
+ Log.w(TAG, "Cannot load overlay", ex);
}
}
} else if (TAG_OVERLAY_USER_ATTRIBUTES.equals(name)) {
@@ -978,40 +949,45 @@ public class VideoEditorImpl implements VideoEditor {
}
} else if (TAG_EFFECT.equals(name)) {
if (currentMediaItem != null) {
- final Effect effect = parseEffect(parser, currentMediaItem);
- if (effect != null) {
+ try {
+ final Effect effect = parseEffect(parser, currentMediaItem);
currentMediaItem.addEffect(effect);
- }
- if (effect instanceof EffectKenBurns) {
- final boolean isImageClipGenerated =
- Boolean.parseBoolean(parser.getAttributeValue("",
- ATTR_IS_IMAGE_CLIP_GENERATED));
- if(isImageClipGenerated) {
- String filename = parser.getAttributeValue("",
- ATTR_GENERATED_IMAGE_CLIP);
- if (new File(filename).exists() == true) {
+
+ if (effect instanceof EffectKenBurns) {
+ final boolean isImageClipGenerated =
+ Boolean.parseBoolean(parser.getAttributeValue("",
+ ATTR_IS_IMAGE_CLIP_GENERATED));
+ if(isImageClipGenerated) {
+ final String filename = parser.getAttributeValue("",
+ ATTR_GENERATED_IMAGE_CLIP);
+ if (new File(filename).exists() == true) {
+ ((MediaImageItem)currentMediaItem).
+ setGeneratedImageClip(filename);
+ ((MediaImageItem)currentMediaItem).
+ setRegenerateClip(false);
+ } else {
+ ((MediaImageItem)currentMediaItem).
+ setGeneratedImageClip(null);
+ ((MediaImageItem)currentMediaItem).
+ setRegenerateClip(true);
+ }
+ } else {
((MediaImageItem)currentMediaItem).
- setGeneratedImageClip(filename);
+ setGeneratedImageClip(null);
((MediaImageItem)currentMediaItem).
- setRegenerateClip(false);
- } else {
- ((MediaImageItem)currentMediaItem).
- setGeneratedImageClip(null);
- ((MediaImageItem)currentMediaItem).
- setRegenerateClip(true);
- }
- } else {
- ((MediaImageItem)currentMediaItem).
- setGeneratedImageClip(null);
- ((MediaImageItem)currentMediaItem).
- setRegenerateClip(true);
+ setRegenerateClip(true);
+ }
}
+ } catch (Exception ex) {
+ Log.w(TAG, "Cannot load effect", ex);
}
}
} else if (TAG_AUDIO_TRACK.equals(name)) {
- final AudioTrack audioTrack = parseAudioTrack(parser);
- if (audioTrack != null) {
+ try {
+ final AudioTrack audioTrack = parseAudioTrack(parser);
addAudioTrack(audioTrack);
+ } catch (Exception ex) {
+ Log.w(TAG, "Cannot load audio track", ex);
}
}
break;
@@ -1042,34 +1018,81 @@ public class VideoEditorImpl implements VideoEditor {
}
/**
+ * Parse the media item
+ *
+ * @param parser The parser
+ * @return The media item
+ */
+ private MediaItem parseMediaItem(XmlPullParser parser) throws IOException {
+ final String mediaItemId = parser.getAttributeValue("", ATTR_ID);
+ final String type = parser.getAttributeValue("", ATTR_TYPE);
+ final String filename = parser.getAttributeValue("", ATTR_FILENAME);
+ final int renderingMode = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_RENDERING_MODE));
+
+ final MediaItem currentMediaItem;
+ if (MediaImageItem.class.getSimpleName().equals(type)) {
+ final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
+ currentMediaItem = new MediaImageItem(this, mediaItemId, filename,
+ durationMs, renderingMode);
+ } else if (MediaVideoItem.class.getSimpleName().equals(type)) {
+ final long beginMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
+ final long endMs = Long.parseLong(parser.getAttributeValue("", ATTR_END_TIME));
+ final int volume = Integer.parseInt(parser.getAttributeValue("", ATTR_VOLUME));
+ final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_MUTED));
+ final String audioWaveformFilename = parser.getAttributeValue("",
+ ATTR_AUDIO_WAVEFORM_FILENAME);
+ currentMediaItem = new MediaVideoItem(this, mediaItemId, filename,
+ renderingMode, beginMs, endMs, volume, muted, audioWaveformFilename);
+
+ final long beginTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
+ final long endTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_END_TIME));
+ ((MediaVideoItem)currentMediaItem).setExtractBoundaries(beginTimeMs, endTimeMs);
+
+ final int volumePercent = Integer.parseInt(parser.getAttributeValue("", ATTR_VOLUME));
+ ((MediaVideoItem)currentMediaItem).setVolume(volumePercent);
+ } else {
+ throw new IllegalArgumentException("Unknown media item type: " + type);
+ }
+
+ return currentMediaItem;
+ }
+
+ /**
* Parse the transition
*
* @param parser The parser
+ * @param ignoredMediaItems The list of ignored media items
+ *
* @return The transition
*/
- private Transition parseTransition(XmlPullParser parser) {
+ private Transition parseTransition(XmlPullParser parser, List<String> ignoredMediaItems) {
final String transitionId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
- final long durationMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_DURATION));
- final int behavior = Integer.parseInt(parser.getAttributeValue("",
- ATTR_BEHAVIOR));
- final boolean isTransitionGenerated;
+ final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
+ final int behavior = Integer.parseInt(parser.getAttributeValue("", ATTR_BEHAVIOR));
-
- final String beforeMediaItemId = parser.getAttributeValue("",
- ATTR_BEFORE_MEDIA_ITEM_ID);
+ final String beforeMediaItemId = parser.getAttributeValue("", ATTR_BEFORE_MEDIA_ITEM_ID);
final MediaItem beforeMediaItem;
if (beforeMediaItemId != null) {
+ if (ignoredMediaItems.contains(beforeMediaItemId)) {
+ // This transition is ignored
+ return null;
+ }
+
beforeMediaItem = getMediaItem(beforeMediaItemId);
} else {
beforeMediaItem = null;
}
- final String afterMediaItemId = parser.getAttributeValue("",
- ATTR_AFTER_MEDIA_ITEM_ID);
+ final String afterMediaItemId = parser.getAttributeValue("", ATTR_AFTER_MEDIA_ITEM_ID);
final MediaItem afterMediaItem;
if (afterMediaItemId != null) {
+ if (ignoredMediaItems.contains(afterMediaItemId)) {
+ // This transition is ignored
+ return null;
+ }
+
afterMediaItem = getMediaItem(afterMediaItemId);
} else {
afterMediaItem = null;
@@ -1093,18 +1116,10 @@ public class VideoEditorImpl implements VideoEditor {
transition = new TransitionFadeBlack(transitionId, afterMediaItem, beforeMediaItem,
durationMs, behavior);
} else {
- transition = null;
- }
-
- if (beforeMediaItem != null) {
- beforeMediaItem.setBeginTransition(transition);
- }
-
- if (afterMediaItem != null) {
- afterMediaItem.setEndTransition(transition);
+ throw new IllegalArgumentException("Invalid transition type: " + type);
}
- isTransitionGenerated = Boolean.parseBoolean(parser.getAttributeValue("",
+ final boolean isTransitionGenerated = Boolean.parseBoolean(parser.getAttributeValue("",
ATTR_IS_TRANSITION_GENERATED));
if (isTransitionGenerated == true) {
final String transitionFile = parser.getAttributeValue("",
@@ -1116,10 +1131,19 @@ public class VideoEditorImpl implements VideoEditor {
transition.setFilename(null);
}
}
+
+ // Use the transition
+ if (beforeMediaItem != null) {
+ beforeMediaItem.setBeginTransition(transition);
+ }
+
+ if (afterMediaItem != null) {
+ afterMediaItem.setEndTransition(transition);
+ }
+
return transition;
}
-
/**
* Parse the overlay
*
@@ -1131,40 +1155,32 @@ public class VideoEditorImpl implements VideoEditor {
private Overlay parseOverlay(XmlPullParser parser, MediaItem mediaItem) {
final String overlayId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
- final long durationMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_DURATION));
- final long startTimeMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_BEGIN_TIME));
+ final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
+ final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
final Overlay overlay;
if (OverlayFrame.class.getSimpleName().equals(type)) {
final String filename = parser.getAttributeValue("", ATTR_FILENAME);
- overlay = new OverlayFrame(mediaItem, overlayId, filename,
- startTimeMs, durationMs);
+ overlay = new OverlayFrame(mediaItem, overlayId, filename, startTimeMs, durationMs);
} else {
- overlay = null;
+ throw new IllegalArgumentException("Invalid overlay type: " + type);
}
- final String overlayRgbFileName = parser.getAttributeValue("",
- ATTR_OVERLAY_RGB_FILENAME);
+ final String overlayRgbFileName = parser.getAttributeValue("", ATTR_OVERLAY_RGB_FILENAME);
if (overlayRgbFileName != null) {
((OverlayFrame)overlay).setFilename(overlayRgbFileName);
- final int overlayFrameWidth =
- Integer.parseInt(parser.getAttributeValue("",
+ final int overlayFrameWidth = Integer.parseInt(parser.getAttributeValue("",
ATTR_OVERLAY_FRAME_WIDTH));
- final int overlayFrameHeight =
- Integer.parseInt(parser.getAttributeValue("",
+ final int overlayFrameHeight = Integer.parseInt(parser.getAttributeValue("",
ATTR_OVERLAY_FRAME_HEIGHT));
((OverlayFrame)overlay).setOverlayFrameWidth(overlayFrameWidth);
((OverlayFrame)overlay).setOverlayFrameHeight(overlayFrameHeight);
- final int resizedRGBFrameWidth =
- Integer.parseInt(parser.getAttributeValue("",
+ final int resizedRGBFrameWidth = Integer.parseInt(parser.getAttributeValue("",
ATTR_OVERLAY_RESIZED_RGB_FRAME_WIDTH));
- final int resizedRGBFrameHeight =
- Integer.parseInt(parser.getAttributeValue("",
+ final int resizedRGBFrameHeight = Integer.parseInt(parser.getAttributeValue("",
ATTR_OVERLAY_RESIZED_RGB_FRAME_HEIGHT));
((OverlayFrame)overlay).setResizedRGBSize(resizedRGBFrameWidth, resizedRGBFrameHeight);
@@ -1184,21 +1200,17 @@ public class VideoEditorImpl implements VideoEditor {
private Effect parseEffect(XmlPullParser parser, MediaItem mediaItem) {
final String effectId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
- final long durationMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_DURATION));
- final long startTimeMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_BEGIN_TIME));
+ final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
+ final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
final Effect effect;
if (EffectColor.class.getSimpleName().equals(type)) {
- final int colorEffectType =
- Integer.parseInt(parser.getAttributeValue("",
+ final int colorEffectType = Integer.parseInt(parser.getAttributeValue("",
ATTR_COLOR_EFFECT_TYPE));
final int color;
if (colorEffectType == EffectColor.TYPE_COLOR
|| colorEffectType == EffectColor.TYPE_GRADIENT) {
- color = Integer.parseInt(parser.getAttributeValue("",
- ATTR_COLOR_EFFECT_VALUE));
+ color = Integer.parseInt(parser.getAttributeValue("", ATTR_COLOR_EFFECT_VALUE));
} else {
color = 0;
}
@@ -1206,27 +1218,19 @@ public class VideoEditorImpl implements VideoEditor {
durationMs, colorEffectType, color);
} else if (EffectKenBurns.class.getSimpleName().equals(type)) {
final Rect startRect = new Rect(
- Integer.parseInt(parser.getAttributeValue("",
- ATTR_START_RECT_LEFT)),
- Integer.parseInt(parser.getAttributeValue("",
- ATTR_START_RECT_TOP)),
- Integer.parseInt(parser.getAttributeValue("",
- ATTR_START_RECT_RIGHT)),
- Integer.parseInt(parser.getAttributeValue("",
- ATTR_START_RECT_BOTTOM)));
+ Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_LEFT)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_TOP)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_RIGHT)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_BOTTOM)));
final Rect endRect = new Rect(
- Integer.parseInt(parser.getAttributeValue("",
- ATTR_END_RECT_LEFT)),
- Integer.parseInt(parser.getAttributeValue("",
- ATTR_END_RECT_TOP)),
- Integer.parseInt(parser.getAttributeValue("",
- ATTR_END_RECT_RIGHT)),
- Integer.parseInt(parser.getAttributeValue("",
- ATTR_END_RECT_BOTTOM)));
+ Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_LEFT)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_TOP)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_RIGHT)),
+ Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_BOTTOM)));
effect = new EffectKenBurns(mediaItem, effectId, startRect, endRect,
startTimeMs, durationMs);
} else {
- effect = null;
+ throw new IllegalArgumentException("Invalid effect type: " + type);
}
return effect;
@@ -1239,48 +1243,34 @@ public class VideoEditorImpl implements VideoEditor {
*
* @return The audio track
*/
- private AudioTrack parseAudioTrack(XmlPullParser parser) {
+ private AudioTrack parseAudioTrack(XmlPullParser parser) throws IOException {
final String audioTrackId = parser.getAttributeValue("", ATTR_ID);
final String filename = parser.getAttributeValue("", ATTR_FILENAME);
- final long startTimeMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_START_TIME));
- final long beginMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_BEGIN_TIME));
- final long endMs = Long.parseLong(parser.getAttributeValue("",
- ATTR_END_TIME));
- final int volume = Integer.parseInt(parser.getAttributeValue("",
- ATTR_VOLUME));
- final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("",
- ATTR_MUTED));
- final boolean loop = Boolean.parseBoolean(parser.getAttributeValue("",
- ATTR_LOOP));
- final boolean duckingEnabled =
- Boolean.parseBoolean(parser.getAttributeValue("",
- ATTR_DUCK_ENABLED));
- final int duckThreshold = Integer.parseInt(parser.getAttributeValue("",
- ATTR_DUCK_THRESHOLD));
- final int duckedTrackVolume =
- Integer.parseInt(parser.getAttributeValue("",
+ final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_START_TIME));
+ final long beginMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
+ final long endMs = Long.parseLong(parser.getAttributeValue("", ATTR_END_TIME));
+ final int volume = Integer.parseInt(parser.getAttributeValue("", ATTR_VOLUME));
+ final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_MUTED));
+ final boolean loop = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_LOOP));
+ final boolean duckingEnabled = Boolean.parseBoolean(
+ parser.getAttributeValue("", ATTR_DUCK_ENABLED));
+ final int duckThreshold = Integer.parseInt(
+ parser.getAttributeValue("", ATTR_DUCK_THRESHOLD));
+ final int duckedTrackVolume = Integer.parseInt(parser.getAttributeValue("",
ATTR_DUCKED_TRACK_VOLUME));
- final String waveformFilename = parser.getAttributeValue("",
- ATTR_AUDIO_WAVEFORM_FILENAME);
- try {
- final AudioTrack audioTrack = new AudioTrack(this, audioTrackId,
- filename, startTimeMs,
- beginMs, endMs, loop,
- volume, muted,
- duckingEnabled,
- duckThreshold,
- duckedTrackVolume,
- waveformFilename);
-
- return audioTrack;
- } catch (IOException ex) {
- return null;
- }
- }
+ final String waveformFilename = parser.getAttributeValue("", ATTR_AUDIO_WAVEFORM_FILENAME);
+ final AudioTrack audioTrack = new AudioTrack(this, audioTrackId,
+ filename, startTimeMs,
+ beginMs, endMs, loop,
+ volume, muted,
+ duckingEnabled,
+ duckThreshold,
+ duckedTrackVolume,
+ waveformFilename);
+ return audioTrack;
+ }
/*
* {@inheritDoc}
@@ -1424,7 +1414,8 @@ public class VideoEditorImpl implements VideoEditor {
Integer.toString(endRect.bottom));
final MediaItem mItem = effect.getMediaItem();
if(((MediaImageItem)mItem).getGeneratedImageClip() != null) {
- serializer.attribute("", ATTR_IS_IMAGE_CLIP_GENERATED,Boolean.toString(true));
+ serializer.attribute("", ATTR_IS_IMAGE_CLIP_GENERATED,
+ Boolean.toString(true));
serializer.attribute("", ATTR_GENERATED_IMAGE_CLIP,
((MediaImageItem)mItem).getGeneratedImageClip());
} else {
@@ -1447,28 +1438,22 @@ public class VideoEditorImpl implements VideoEditor {
for (Transition transition : mTransitions) {
serializer.startTag("", TAG_TRANSITION);
serializer.attribute("", ATTR_ID, transition.getId());
- serializer.attribute("", ATTR_TYPE,
- transition.getClass().getSimpleName());
- serializer.attribute("", ATTR_DURATION,
- Long.toString(transition.getDuration()));
- serializer.attribute("", ATTR_BEHAVIOR,
- Integer.toString(transition.getBehavior()));
+ serializer.attribute("", ATTR_TYPE, transition.getClass().getSimpleName());
+ serializer.attribute("", ATTR_DURATION, Long.toString(transition.getDuration()));
+ serializer.attribute("", ATTR_BEHAVIOR, Integer.toString(transition.getBehavior()));
serializer.attribute("", ATTR_IS_TRANSITION_GENERATED,
Boolean.toString(transition.isGenerated()));
if (transition.isGenerated() == true) {
- serializer.attribute("", ATTR_GENERATED_TRANSITION_CLIP,
- transition.mFilename);
+ serializer.attribute("", ATTR_GENERATED_TRANSITION_CLIP, transition.mFilename);
}
final MediaItem afterMediaItem = transition.getAfterMediaItem();
if (afterMediaItem != null) {
- serializer.attribute("", ATTR_AFTER_MEDIA_ITEM_ID,
- afterMediaItem.getId());
+ serializer.attribute("", ATTR_AFTER_MEDIA_ITEM_ID, afterMediaItem.getId());
}
final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
if (beforeMediaItem != null) {
- serializer.attribute("", ATTR_BEFORE_MEDIA_ITEM_ID,
- beforeMediaItem.getId());
+ serializer.attribute("", ATTR_BEFORE_MEDIA_ITEM_ID, beforeMediaItem.getId());
}
if (transition instanceof TransitionSliding) {
@@ -1492,14 +1477,10 @@ public class VideoEditorImpl implements VideoEditor {
serializer.startTag("", TAG_AUDIO_TRACK);
serializer.attribute("", ATTR_ID, at.getId());
serializer.attribute("", ATTR_FILENAME, at.getFilename());
- serializer.attribute("", ATTR_START_TIME,
- Long.toString(at.getStartTime()));
- serializer.attribute("", ATTR_BEGIN_TIME,
- Long.toString(at.getBoundaryBeginTime()));
- serializer.attribute("", ATTR_END_TIME,
- Long.toString(at.getBoundaryEndTime()));
- serializer.attribute("", ATTR_VOLUME,
- Integer.toString(at.getVolume()));
+ serializer.attribute("", ATTR_START_TIME, Long.toString(at.getStartTime()));
+ serializer.attribute("", ATTR_BEGIN_TIME, Long.toString(at.getBoundaryBeginTime()));
+ serializer.attribute("", ATTR_END_TIME, Long.toString(at.getBoundaryEndTime()));
+ serializer.attribute("", ATTR_VOLUME, Integer.toString(at.getVolume()));
serializer.attribute("", ATTR_DUCK_ENABLED,
Boolean.toString(at.isDuckingEnabled()));
serializer.attribute("", ATTR_DUCKED_TRACK_VOLUME,
@@ -1585,29 +1566,29 @@ public class VideoEditorImpl implements VideoEditor {
}
boolean semAcquireDone = false;
- try{
- semAcquireDone = mMANativeHelper.lock(ENGINE_ACCESS_MAX_TIMEOUT_MS);
- if (semAcquireDone == false) {
- throw new IllegalStateException("Timeout waiting for semaphore");
- }
+ if (!mPreviewInProgress) {
+ try{
+ semAcquireDone = mMANativeHelper.lock(ENGINE_ACCESS_MAX_TIMEOUT_MS);
+ if (semAcquireDone == false) {
+ throw new IllegalStateException("Timeout waiting for semaphore");
+ }
- if (mMediaItems.size() > 0) {
- mMANativeHelper.previewStoryBoard(mMediaItems, mTransitions,
- mAudioTracks, null);
- mMANativeHelper.doPreview(surface, fromMs, toMs, loop,
- callbackAfterFrameCount, listener);
- mPreviewInProgress = true;
- }
- /**
- * release on complete by calling stopPreview
- */
- } catch (InterruptedException ex) {
- Log.w(TAG, "The thread was interrupted", new Throwable());
- throw new IllegalStateException("The thread was interrupted");
- } finally {
- if (semAcquireDone) {
- mMANativeHelper.unlock();
+ if (mMediaItems.size() > 0) {
+ mPreviewInProgress = true;
+ mMANativeHelper.previewStoryBoard(mMediaItems, mTransitions,
+ mAudioTracks, null);
+ mMANativeHelper.doPreview(surface, fromMs, toMs, loop,
+ callbackAfterFrameCount, listener);
+ }
+ /**
+ * release on complete by calling stopPreview
+ */
+ } catch (InterruptedException ex) {
+ Log.w(TAG, "The thread was interrupted", new Throwable());
+ throw new IllegalStateException("The thread was interrupted");
}
+ } else {
+ throw new IllegalStateException("Preview already in progress");
}
}
@@ -1615,15 +1596,20 @@ public class VideoEditorImpl implements VideoEditor {
* {@inheritDoc}
*/
public long stopPreview() {
+ long result = 0;
if (mPreviewInProgress) {
- long result = mMANativeHelper.stopPreview();
- mPreviewInProgress = false;
- /**
- * release the sem acquired in startPreview
- */
- mMANativeHelper.unlock();
+ try {
+ result = mMANativeHelper.stopPreview();
+ /**
+ * release on complete by calling stopPreview
+ */
+ } finally {
+ mPreviewInProgress = false;
+ mMANativeHelper.unlock();
+ }
return result;
- } else {
+ }
+ else {
return 0;
}
}
diff --git a/media/java/android/mtp/MtpDatabase.java b/media/java/android/mtp/MtpDatabase.java
index a59556242a21..98de2f700999 100644
--- a/media/java/android/mtp/MtpDatabase.java
+++ b/media/java/android/mtp/MtpDatabase.java
@@ -309,6 +309,7 @@ public class MtpDatabase {
MtpConstants.FORMAT_M3U_PLAYLIST,
MtpConstants.FORMAT_PLS_PLAYLIST,
MtpConstants.FORMAT_XML_DOCUMENT,
+ MtpConstants.FORMAT_FLAC,
};
}
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 1a3fcd6c3870..9d9b3c08ac9d 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -169,15 +169,6 @@ status_t AudioSystem::setMode(int mode)
return af->setMode(mode);
}
-
-status_t AudioSystem::isStreamActive(int stream, bool* state) {
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
- *state = af->isStreamActive(stream);
- return NO_ERROR;
-}
-
-
status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
@@ -702,6 +693,14 @@ status_t AudioSystem::unregisterEffect(int id)
return aps->unregisterEffect(id);
}
+status_t AudioSystem::isStreamActive(int stream, bool* state, uint32_t inPastMs) {
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ *state = aps->isStreamActive(stream, inPastMs);
+ return NO_ERROR;
+}
+
+
// ---------------------------------------------------------------------------
void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who) {
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index 3a89e250d793..eec47c094517 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -47,7 +47,6 @@ enum {
SET_MODE,
SET_MIC_MUTE,
GET_MIC_MUTE,
- IS_STREAM_ACTIVE,
SET_PARAMETERS,
GET_PARAMETERS,
REGISTER_CLIENT,
@@ -316,15 +315,6 @@ public:
return reply.readInt32();
}
- virtual bool isStreamActive(int stream) const
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeInt32(stream);
- remote()->transact(IS_STREAM_ACTIVE, data, &reply);
- return reply.readInt32();
- }
-
virtual status_t setParameters(int ioHandle, const String8& keyValuePairs)
{
Parcel data, reply;
@@ -826,12 +816,6 @@ status_t BnAudioFlinger::onTransact(
reply->writeInt32( getMicMute() );
return NO_ERROR;
} break;
- case IS_STREAM_ACTIVE: {
- CHECK_INTERFACE(IAudioFlinger, data, reply);
- int stream = data.readInt32();
- reply->writeInt32( isStreamActive(stream) );
- return NO_ERROR;
- } break;
case SET_PARAMETERS: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
int ioHandle = data.readInt32();
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 950c2131824f..457f7ed7af5a 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -48,7 +48,8 @@ enum {
GET_STRATEGY_FOR_STREAM,
GET_OUTPUT_FOR_EFFECT,
REGISTER_EFFECT,
- UNREGISTER_EFFECT
+ UNREGISTER_EFFECT,
+ IS_STREAM_ACTIVE
};
class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
@@ -297,6 +298,15 @@ public:
return static_cast <status_t> (reply.readInt32());
}
+ virtual bool isStreamActive(int stream, uint32_t inPastMs) const
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(stream);
+ data.writeInt32(inPastMs);
+ remote()->transact(IS_STREAM_ACTIVE, data, &reply);
+ return reply.readInt32();
+ }
};
IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -517,6 +527,14 @@ status_t BnAudioPolicyService::onTransact(
return NO_ERROR;
} break;
+ case IS_STREAM_ACTIVE: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ int stream = data.readInt32();
+ uint32_t inPastMs = (uint32_t)data.readInt32();
+ reply->writeInt32( isStreamActive(stream, inPastMs) );
+ return NO_ERROR;
+ } break;
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 2d486e313106..029b23857546 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -17,6 +17,7 @@ LOCAL_SRC_FILES:= \
DRMExtractor.cpp \
ESDS.cpp \
FileSource.cpp \
+ FLACExtractor.cpp \
HTTPStream.cpp \
JPEGSource.cpp \
MP3Extractor.cpp \
@@ -54,6 +55,7 @@ LOCAL_SRC_FILES:= \
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
+ $(TOP)/external/flac/include \
$(TOP)/external/tremolo \
$(TOP)/frameworks/base/media/libstagefright/rtsp
@@ -93,6 +95,7 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_rtsp \
libstagefright_id3 \
libstagefright_g711dec \
+ libFLAC \
LOCAL_SHARED_LIBRARIES += \
libstagefright_amrnb_common \
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 89b3dab46d73..11ac56ce296e 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -378,11 +378,14 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
}
void AwesomePlayer::reset() {
+ LOGI("reset");
+
Mutex::Autolock autoLock(mLock);
reset_l();
}
void AwesomePlayer::reset_l() {
+ LOGI("reset_l");
mDisplayWidth = 0;
mDisplayHeight = 0;
@@ -408,6 +411,10 @@ void AwesomePlayer::reset_l() {
}
}
+ if (mFlags & PREPARING) {
+ LOGI("waiting until preparation is completes.");
+ }
+
while (mFlags & PREPARING) {
mPreparedCondition.wait(mLock);
}
@@ -431,6 +438,8 @@ void AwesomePlayer::reset_l() {
}
mAudioSource.clear();
+ LOGI("audio source cleared");
+
mTimeSource = NULL;
delete mAudioPlayer;
@@ -471,6 +480,8 @@ void AwesomePlayer::reset_l() {
IPCThreadState::self()->flushCommands();
}
+ LOGI("video source cleared");
+
mDurationUs = -1;
mFlags = 0;
mExtractorFlags = 0;
@@ -487,6 +498,8 @@ void AwesomePlayer::reset_l() {
mFileSource.clear();
mBitrate = -1;
+
+ LOGI("reset_l completed");
}
void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) {
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index ee0d792999c4..e06fa81d9f4f 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -23,6 +23,7 @@
#include "include/NuCachedSource2.h"
#include "include/NuHTTPDataSource.h"
#include "include/DRMExtractor.h"
+#include "include/FLACExtractor.h"
#include "matroska/MatroskaExtractor.h"
@@ -104,6 +105,7 @@ void DataSource::RegisterDefaultSniffers() {
RegisterSniffer(SniffMatroska);
RegisterSniffer(SniffOgg);
RegisterSniffer(SniffWAV);
+ RegisterSniffer(SniffFLAC);
RegisterSniffer(SniffAMR);
RegisterSniffer(SniffMPEG2TS);
RegisterSniffer(SniffMP3);
diff --git a/media/libstagefright/FLACExtractor.cpp b/media/libstagefright/FLACExtractor.cpp
new file mode 100644
index 000000000000..8ba5a2d1cf5a
--- /dev/null
+++ b/media/libstagefright/FLACExtractor.cpp
@@ -0,0 +1,813 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FLACExtractor"
+#include <utils/Log.h>
+
+#include "include/FLACExtractor.h"
+// Vorbis comments
+#include "include/OggExtractor.h"
+// libFLAC parser
+#include "FLAC/stream_decoder.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaBuffer.h>
+
+namespace android {
+
+class FLACParser;
+
+class FLACSource : public MediaSource {
+
+public:
+ FLACSource(
+ const sp<DataSource> &dataSource,
+ const sp<MetaData> &trackMetadata);
+
+ virtual status_t start(MetaData *params);
+ virtual status_t stop();
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+protected:
+ virtual ~FLACSource();
+
+private:
+ sp<DataSource> mDataSource;
+ sp<MetaData> mTrackMetadata;
+ sp<FLACParser> mParser;
+ bool mInitCheck;
+ bool mStarted;
+
+ status_t init();
+
+ // no copy constructor or assignment
+ FLACSource(const FLACSource &);
+ FLACSource &operator=(const FLACSource &);
+
+};
+
+// FLACParser wraps a C libFLAC parser aka stream decoder
+
+class FLACParser : public RefBase {
+
+public:
+ FLACParser(
+ const sp<DataSource> &dataSource,
+ // If metadata pointers aren't provided, we don't fill them
+ const sp<MetaData> &fileMetadata = 0,
+ const sp<MetaData> &trackMetadata = 0);
+
+ status_t initCheck() const {
+ return mInitCheck;
+ }
+
+ // stream properties
+ unsigned getMaxBlockSize() const {
+ return mStreamInfo.max_blocksize;
+ }
+ unsigned getSampleRate() const {
+ return mStreamInfo.sample_rate;
+ }
+ unsigned getChannels() const {
+ return mStreamInfo.channels;
+ }
+ unsigned getBitsPerSample() const {
+ return mStreamInfo.bits_per_sample;
+ }
+ FLAC__uint64 getTotalSamples() const {
+ return mStreamInfo.total_samples;
+ }
+
+ // media buffers
+ void allocateBuffers();
+ void releaseBuffers();
+ MediaBuffer *readBuffer() {
+ return readBuffer(false, 0LL);
+ }
+ MediaBuffer *readBuffer(FLAC__uint64 sample) {
+ return readBuffer(true, sample);
+ }
+
+protected:
+ virtual ~FLACParser();
+
+private:
+ sp<DataSource> mDataSource;
+ sp<MetaData> mFileMetadata;
+ sp<MetaData> mTrackMetadata;
+ bool mInitCheck;
+
+ // media buffers
+ size_t mMaxBufferSize;
+ MediaBufferGroup *mGroup;
+ void (*mCopy)(short *dst, const int *const *src, unsigned nSamples);
+
+ // handle to underlying libFLAC parser
+ FLAC__StreamDecoder *mDecoder;
+
+ // current position within the data source
+ off64_t mCurrentPos;
+ bool mEOF;
+
+ // cached when the STREAMINFO metadata is parsed by libFLAC
+ FLAC__StreamMetadata_StreamInfo mStreamInfo;
+ bool mStreamInfoValid;
+
+ // cached when a decoded PCM block is "written" by libFLAC parser
+ bool mWriteRequested;
+ bool mWriteCompleted;
+ FLAC__FrameHeader mWriteHeader;
+ const FLAC__int32 * const *mWriteBuffer;
+
+ // most recent error reported by libFLAC parser
+ FLAC__StreamDecoderErrorStatus mErrorStatus;
+
+ status_t init();
+ MediaBuffer *readBuffer(bool doSeek, FLAC__uint64 sample);
+
+ // no copy constructor or assignment
+ FLACParser(const FLACParser &);
+ FLACParser &operator=(const FLACParser &);
+
+ // FLAC parser callbacks as C++ instance methods
+ FLAC__StreamDecoderReadStatus readCallback(
+ FLAC__byte buffer[], size_t *bytes);
+ FLAC__StreamDecoderSeekStatus seekCallback(
+ FLAC__uint64 absolute_byte_offset);
+ FLAC__StreamDecoderTellStatus tellCallback(
+ FLAC__uint64 *absolute_byte_offset);
+ FLAC__StreamDecoderLengthStatus lengthCallback(
+ FLAC__uint64 *stream_length);
+ FLAC__bool eofCallback();
+ FLAC__StreamDecoderWriteStatus writeCallback(
+ const FLAC__Frame *frame, const FLAC__int32 * const buffer[]);
+ void metadataCallback(const FLAC__StreamMetadata *metadata);
+ void errorCallback(FLAC__StreamDecoderErrorStatus status);
+
+ // FLAC parser callbacks as C-callable functions
+ static FLAC__StreamDecoderReadStatus read_callback(
+ const FLAC__StreamDecoder *decoder,
+ FLAC__byte buffer[], size_t *bytes,
+ void *client_data);
+ static FLAC__StreamDecoderSeekStatus seek_callback(
+ const FLAC__StreamDecoder *decoder,
+ FLAC__uint64 absolute_byte_offset,
+ void *client_data);
+ static FLAC__StreamDecoderTellStatus tell_callback(
+ const FLAC__StreamDecoder *decoder,
+ FLAC__uint64 *absolute_byte_offset,
+ void *client_data);
+ static FLAC__StreamDecoderLengthStatus length_callback(
+ const FLAC__StreamDecoder *decoder,
+ FLAC__uint64 *stream_length,
+ void *client_data);
+ static FLAC__bool eof_callback(
+ const FLAC__StreamDecoder *decoder,
+ void *client_data);
+ static FLAC__StreamDecoderWriteStatus write_callback(
+ const FLAC__StreamDecoder *decoder,
+ const FLAC__Frame *frame, const FLAC__int32 * const buffer[],
+ void *client_data);
+ static void metadata_callback(
+ const FLAC__StreamDecoder *decoder,
+ const FLAC__StreamMetadata *metadata,
+ void *client_data);
+ static void error_callback(
+ const FLAC__StreamDecoder *decoder,
+ FLAC__StreamDecoderErrorStatus status,
+ void *client_data);
+
+};
+
+// The FLAC parser calls our C++ static callbacks using C calling conventions,
+// inside FLAC__stream_decoder_process_until_end_of_metadata
+// and FLAC__stream_decoder_process_single.
+// We immediately then call our corresponding C++ instance methods
+// with the same parameter list, but discard redundant information.
+
+FLAC__StreamDecoderReadStatus FLACParser::read_callback(
+ const FLAC__StreamDecoder *decoder, FLAC__byte buffer[],
+ size_t *bytes, void *client_data)
+{
+ return ((FLACParser *) client_data)->readCallback(buffer, bytes);
+}
+
+FLAC__StreamDecoderSeekStatus FLACParser::seek_callback(
+ const FLAC__StreamDecoder *decoder,
+ FLAC__uint64 absolute_byte_offset, void *client_data)
+{
+ return ((FLACParser *) client_data)->seekCallback(absolute_byte_offset);
+}
+
+FLAC__StreamDecoderTellStatus FLACParser::tell_callback(
+ const FLAC__StreamDecoder *decoder,
+ FLAC__uint64 *absolute_byte_offset, void *client_data)
+{
+ return ((FLACParser *) client_data)->tellCallback(absolute_byte_offset);
+}
+
+FLAC__StreamDecoderLengthStatus FLACParser::length_callback(
+ const FLAC__StreamDecoder *decoder,
+ FLAC__uint64 *stream_length, void *client_data)
+{
+ return ((FLACParser *) client_data)->lengthCallback(stream_length);
+}
+
+FLAC__bool FLACParser::eof_callback(
+ const FLAC__StreamDecoder *decoder, void *client_data)
+{
+ return ((FLACParser *) client_data)->eofCallback();
+}
+
+FLAC__StreamDecoderWriteStatus FLACParser::write_callback(
+ const FLAC__StreamDecoder *decoder, const FLAC__Frame *frame,
+ const FLAC__int32 * const buffer[], void *client_data)
+{
+ return ((FLACParser *) client_data)->writeCallback(frame, buffer);
+}
+
+void FLACParser::metadata_callback(
+ const FLAC__StreamDecoder *decoder,
+ const FLAC__StreamMetadata *metadata, void *client_data)
+{
+ ((FLACParser *) client_data)->metadataCallback(metadata);
+}
+
+void FLACParser::error_callback(
+ const FLAC__StreamDecoder *decoder,
+ FLAC__StreamDecoderErrorStatus status, void *client_data)
+{
+ ((FLACParser *) client_data)->errorCallback(status);
+}
+
+// These are the corresponding callbacks with C++ calling conventions
+
+FLAC__StreamDecoderReadStatus FLACParser::readCallback(
+ FLAC__byte buffer[], size_t *bytes)
+{
+ size_t requested = *bytes;
+ ssize_t actual = mDataSource->readAt(mCurrentPos, buffer, requested);
+ if (0 > actual) {
+ *bytes = 0;
+ return FLAC__STREAM_DECODER_READ_STATUS_ABORT;
+ } else if (0 == actual) {
+ *bytes = 0;
+ mEOF = true;
+ return FLAC__STREAM_DECODER_READ_STATUS_END_OF_STREAM;
+ } else {
+ assert(actual <= requested);
+ *bytes = actual;
+ mCurrentPos += actual;
+ return FLAC__STREAM_DECODER_READ_STATUS_CONTINUE;
+ }
+}
+
+FLAC__StreamDecoderSeekStatus FLACParser::seekCallback(
+ FLAC__uint64 absolute_byte_offset)
+{
+ mCurrentPos = absolute_byte_offset;
+ mEOF = false;
+ return FLAC__STREAM_DECODER_SEEK_STATUS_OK;
+}
+
+FLAC__StreamDecoderTellStatus FLACParser::tellCallback(
+ FLAC__uint64 *absolute_byte_offset)
+{
+ *absolute_byte_offset = mCurrentPos;
+ return FLAC__STREAM_DECODER_TELL_STATUS_OK;
+}
+
+FLAC__StreamDecoderLengthStatus FLACParser::lengthCallback(
+ FLAC__uint64 *stream_length)
+{
+ off64_t size;
+ if (OK == mDataSource->getSize(&size)) {
+ *stream_length = size;
+ return FLAC__STREAM_DECODER_LENGTH_STATUS_OK;
+ } else {
+ return FLAC__STREAM_DECODER_LENGTH_STATUS_UNSUPPORTED;
+ }
+}
+
+FLAC__bool FLACParser::eofCallback()
+{
+ return mEOF;
+}
+
+FLAC__StreamDecoderWriteStatus FLACParser::writeCallback(
+ const FLAC__Frame *frame, const FLAC__int32 * const buffer[])
+{
+ if (mWriteRequested) {
+ mWriteRequested = false;
+ // FLAC parser doesn't free or realloc buffer until next frame or finish
+ mWriteHeader = frame->header;
+ mWriteBuffer = buffer;
+ mWriteCompleted = true;
+ return FLAC__STREAM_DECODER_WRITE_STATUS_CONTINUE;
+ } else {
+ LOGE("FLACParser::writeCallback unexpected");
+ return FLAC__STREAM_DECODER_WRITE_STATUS_ABORT;
+ }
+}
+
+void FLACParser::metadataCallback(const FLAC__StreamMetadata *metadata)
+{
+ switch (metadata->type) {
+ case FLAC__METADATA_TYPE_STREAMINFO:
+ if (!mStreamInfoValid) {
+ mStreamInfo = metadata->data.stream_info;
+ mStreamInfoValid = true;
+ } else {
+ LOGE("FLACParser::metadataCallback unexpected STREAMINFO");
+ }
+ break;
+ case FLAC__METADATA_TYPE_VORBIS_COMMENT:
+ {
+ const FLAC__StreamMetadata_VorbisComment *vc;
+ vc = &metadata->data.vorbis_comment;
+ for (FLAC__uint32 i = 0; i < vc->num_comments; ++i) {
+ FLAC__StreamMetadata_VorbisComment_Entry *vce;
+ vce = &vc->comments[i];
+ if (mFileMetadata != 0) {
+ parseVorbisComment(mFileMetadata, (const char *) vce->entry,
+ vce->length);
+ }
+ }
+ }
+ break;
+ case FLAC__METADATA_TYPE_PICTURE:
+ if (mFileMetadata != 0) {
+ const FLAC__StreamMetadata_Picture *p = &metadata->data.picture;
+ mFileMetadata->setData(kKeyAlbumArt,
+ MetaData::TYPE_NONE, p->data, p->data_length);
+ mFileMetadata->setCString(kKeyAlbumArtMIME, p->mime_type);
+ }
+ break;
+ default:
+ LOGW("FLACParser::metadataCallback unexpected type %u", metadata->type);
+ break;
+ }
+}
+
+void FLACParser::errorCallback(FLAC__StreamDecoderErrorStatus status)
+{
+ LOGE("FLACParser::errorCallback status=%d", status);
+ mErrorStatus = status;
+}
+
+// Copy samples from FLAC native 32-bit non-interleaved to 16-bit interleaved.
+// These are candidates for optimization if needed.
+
+static void copyMono8(short *dst, const int *const *src, unsigned nSamples)
+{
+ for (unsigned i = 0; i < nSamples; ++i) {
+ *dst++ = src[0][i] << 8;
+ }
+}
+
+static void copyStereo8(short *dst, const int *const *src, unsigned nSamples)
+{
+ for (unsigned i = 0; i < nSamples; ++i) {
+ *dst++ = src[0][i] << 8;
+ *dst++ = src[1][i] << 8;
+ }
+}
+
+static void copyMono16(short *dst, const int *const *src, unsigned nSamples)
+{
+ for (unsigned i = 0; i < nSamples; ++i) {
+ *dst++ = src[0][i];
+ }
+}
+
+static void copyStereo16(short *dst, const int *const *src, unsigned nSamples)
+{
+ for (unsigned i = 0; i < nSamples; ++i) {
+ *dst++ = src[0][i];
+ *dst++ = src[1][i];
+ }
+}
+
+// 24-bit versions should do dithering or noise-shaping, here or in AudioFlinger
+
+static void copyMono24(short *dst, const int *const *src, unsigned nSamples)
+{
+ for (unsigned i = 0; i < nSamples; ++i) {
+ *dst++ = src[0][i] >> 8;
+ }
+}
+
+static void copyStereo24(short *dst, const int *const *src, unsigned nSamples)
+{
+ for (unsigned i = 0; i < nSamples; ++i) {
+ *dst++ = src[0][i] >> 8;
+ *dst++ = src[1][i] >> 8;
+ }
+}
+
+static void copyTrespass(short *dst, const int *const *src, unsigned nSamples)
+{
+ TRESPASS();
+}
+
+// FLACParser
+
+FLACParser::FLACParser(
+ const sp<DataSource> &dataSource,
+ const sp<MetaData> &fileMetadata,
+ const sp<MetaData> &trackMetadata)
+ : mDataSource(dataSource),
+ mFileMetadata(fileMetadata),
+ mTrackMetadata(trackMetadata),
+ mInitCheck(false),
+ mMaxBufferSize(0),
+ mGroup(NULL),
+ mCopy(copyTrespass),
+ mDecoder(NULL),
+ mCurrentPos(0LL),
+ mEOF(false),
+ mStreamInfoValid(false),
+ mWriteRequested(false),
+ mWriteCompleted(false),
+ mWriteBuffer(NULL),
+ mErrorStatus((FLAC__StreamDecoderErrorStatus) -1)
+{
+ LOGV("FLACParser::FLACParser");
+ memset(&mStreamInfo, 0, sizeof(mStreamInfo));
+ memset(&mWriteHeader, 0, sizeof(mWriteHeader));
+ mInitCheck = init();
+}
+
+FLACParser::~FLACParser()
+{
+ LOGV("FLACParser::~FLACParser");
+ if (mDecoder != NULL) {
+ FLAC__stream_decoder_delete(mDecoder);
+ mDecoder = NULL;
+ }
+}
+
+status_t FLACParser::init()
+{
+ // setup libFLAC parser
+ mDecoder = FLAC__stream_decoder_new();
+ if (mDecoder == NULL) {
+ // The new should succeed, since probably all it does is a malloc
+ // that always succeeds in Android. But to avoid dependence on the
+ // libFLAC internals, we check and log here.
+ LOGE("new failed");
+ return NO_INIT;
+ }
+ FLAC__stream_decoder_set_md5_checking(mDecoder, false);
+ FLAC__stream_decoder_set_metadata_ignore_all(mDecoder);
+ FLAC__stream_decoder_set_metadata_respond(
+ mDecoder, FLAC__METADATA_TYPE_STREAMINFO);
+ FLAC__stream_decoder_set_metadata_respond(
+ mDecoder, FLAC__METADATA_TYPE_PICTURE);
+ FLAC__stream_decoder_set_metadata_respond(
+ mDecoder, FLAC__METADATA_TYPE_VORBIS_COMMENT);
+ FLAC__StreamDecoderInitStatus initStatus;
+ initStatus = FLAC__stream_decoder_init_stream(
+ mDecoder,
+ read_callback, seek_callback, tell_callback,
+ length_callback, eof_callback, write_callback,
+ metadata_callback, error_callback, (void *) this);
+ if (initStatus != FLAC__STREAM_DECODER_INIT_STATUS_OK) {
+ // A failure here probably indicates a programming error and so is
+ // unlikely to happen. But we check and log here similarly to above.
+ LOGE("init_stream failed %d", initStatus);
+ return NO_INIT;
+ }
+ // parse all metadata
+ if (!FLAC__stream_decoder_process_until_end_of_metadata(mDecoder)) {
+ LOGE("end_of_metadata failed");
+ return NO_INIT;
+ }
+ if (mStreamInfoValid) {
+ // check channel count
+ switch (getChannels()) {
+ case 1:
+ case 2:
+ break;
+ default:
+ LOGE("unsupported channel count %u", getChannels());
+ return NO_INIT;
+ }
+ // check bit depth
+ switch (getBitsPerSample()) {
+ case 8:
+ case 16:
+ case 24:
+ break;
+ default:
+ LOGE("unsupported bits per sample %u", getBitsPerSample());
+ return NO_INIT;
+ }
+ // check sample rate
+ switch (getSampleRate()) {
+ case 8000:
+ case 11025:
+ case 12000:
+ case 16000:
+ case 22050:
+ case 24000:
+ case 32000:
+ case 44100:
+ case 48000:
+ break;
+ default:
+ // 96000 would require a proper downsampler in AudioFlinger
+ LOGE("unsupported sample rate %u", getSampleRate());
+ return NO_INIT;
+ }
+ // configure the appropriate copy function, defaulting to trespass
+ static const struct {
+ unsigned mChannels;
+ unsigned mBitsPerSample;
+ void (*mCopy)(short *dst, const int *const *src, unsigned nSamples);
+ } table[] = {
+ { 1, 8, copyMono8 },
+ { 2, 8, copyStereo8 },
+ { 1, 16, copyMono16 },
+ { 2, 16, copyStereo16 },
+ { 1, 24, copyMono24 },
+ { 2, 24, copyStereo24 },
+ };
+ for (unsigned i = 0; i < sizeof(table)/sizeof(table[0]); ++i) {
+ if (table[i].mChannels == getChannels() &&
+ table[i].mBitsPerSample == getBitsPerSample()) {
+ mCopy = table[i].mCopy;
+ break;
+ }
+ }
+ // populate track metadata
+ if (mTrackMetadata != 0) {
+ mTrackMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
+ mTrackMetadata->setInt32(kKeyChannelCount, getChannels());
+ mTrackMetadata->setInt32(kKeySampleRate, getSampleRate());
+ // sample rate is non-zero, so division by zero not possible
+ mTrackMetadata->setInt64(kKeyDuration,
+ (getTotalSamples() * 1000000LL) / getSampleRate());
+ }
+ } else {
+ LOGE("missing STREAMINFO");
+ return NO_INIT;
+ }
+ if (mFileMetadata != 0) {
+ mFileMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_FLAC);
+ }
+ return OK;
+}
+
+void FLACParser::allocateBuffers()
+{
+ CHECK(mGroup == NULL);
+ mGroup = new MediaBufferGroup;
+ mMaxBufferSize = getMaxBlockSize() * getChannels() * sizeof(short);
+ mGroup->add_buffer(new MediaBuffer(mMaxBufferSize));
+}
+
+void FLACParser::releaseBuffers()
+{
+ CHECK(mGroup != NULL);
+ delete mGroup;
+ mGroup = NULL;
+}
+
+MediaBuffer *FLACParser::readBuffer(bool doSeek, FLAC__uint64 sample)
+{
+ mWriteRequested = true;
+ mWriteCompleted = false;
+ if (doSeek) {
+ // We implement the seek callback, so this works without explicit flush
+ if (!FLAC__stream_decoder_seek_absolute(mDecoder, sample)) {
+ LOGE("FLACParser::readBuffer seek to sample %llu failed", sample);
+ return NULL;
+ }
+ LOGV("FLACParser::readBuffer seek to sample %llu succeeded", sample);
+ } else {
+ if (!FLAC__stream_decoder_process_single(mDecoder)) {
+ LOGE("FLACParser::readBuffer process_single failed");
+ return NULL;
+ }
+ }
+ if (!mWriteCompleted) {
+ LOGV("FLACParser::readBuffer write did not complete");
+ return NULL;
+ }
+ // verify that block header keeps the promises made by STREAMINFO
+ unsigned blocksize = mWriteHeader.blocksize;
+ if (blocksize == 0 || blocksize > getMaxBlockSize()) {
+ LOGE("FLACParser::readBuffer write invalid blocksize %u", blocksize);
+ return NULL;
+ }
+ if (mWriteHeader.sample_rate != getSampleRate() ||
+ mWriteHeader.channels != getChannels() ||
+ mWriteHeader.bits_per_sample != getBitsPerSample()) {
+ LOGE("FLACParser::readBuffer write changed parameters mid-stream");
+ }
+ // acquire a media buffer
+ CHECK(mGroup != NULL);
+ MediaBuffer *buffer;
+ status_t err = mGroup->acquire_buffer(&buffer);
+ if (err != OK) {
+ return NULL;
+ }
+ size_t bufferSize = blocksize * getChannels() * sizeof(short);
+ CHECK(bufferSize <= mMaxBufferSize);
+ short *data = (short *) buffer->data();
+ buffer->set_range(0, bufferSize);
+ // copy PCM from FLAC write buffer to our media buffer, with interleaving
+ (*mCopy)(data, mWriteBuffer, blocksize);
+ // fill in buffer metadata
+ CHECK(mWriteHeader.number_type == FLAC__FRAME_NUMBER_TYPE_SAMPLE_NUMBER);
+ FLAC__uint64 sampleNumber = mWriteHeader.number.sample_number;
+ int64_t timeUs = (1000000LL * sampleNumber) / getSampleRate();
+ buffer->meta_data()->setInt64(kKeyTime, timeUs);
+ buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);
+ return buffer;
+}
+
+// FLACsource
+
+FLACSource::FLACSource(
+ const sp<DataSource> &dataSource,
+ const sp<MetaData> &trackMetadata)
+ : mDataSource(dataSource),
+ mTrackMetadata(trackMetadata),
+ mParser(0),
+ mInitCheck(false),
+ mStarted(false)
+{
+ LOGV("FLACSource::FLACSource");
+ mInitCheck = init();
+}
+
+FLACSource::~FLACSource()
+{
+ LOGV("~FLACSource::FLACSource");
+ if (mStarted) {
+ stop();
+ }
+}
+
+status_t FLACSource::start(MetaData *params)
+{
+ LOGV("FLACSource::start");
+
+ CHECK(!mStarted);
+ mParser->allocateBuffers();
+ mStarted = true;
+
+ return OK;
+}
+
+status_t FLACSource::stop()
+{
+ LOGV("FLACSource::stop");
+
+ CHECK(mStarted);
+ mParser->releaseBuffers();
+ mStarted = false;
+
+ return OK;
+}
+
+sp<MetaData> FLACSource::getFormat()
+{
+ return mTrackMetadata;
+}
+
+status_t FLACSource::read(
+ MediaBuffer **outBuffer, const ReadOptions *options)
+{
+ MediaBuffer *buffer;
+ // process an optional seek request
+ int64_t seekTimeUs;
+ ReadOptions::SeekMode mode;
+ if ((NULL != options) && options->getSeekTo(&seekTimeUs, &mode)) {
+ FLAC__uint64 sample;
+ if (seekTimeUs <= 0LL) {
+ sample = 0LL;
+ } else {
+ // sample and total samples are both zero-based, and seek to EOF ok
+ sample = (seekTimeUs * mParser->getSampleRate()) / 1000000LL;
+ if (sample >= mParser->getTotalSamples()) {
+ sample = mParser->getTotalSamples();
+ }
+ }
+ buffer = mParser->readBuffer(sample);
+ // otherwise read sequentially
+ } else {
+ buffer = mParser->readBuffer();
+ }
+ *outBuffer = buffer;
+ return buffer != NULL ? (status_t) OK : (status_t) ERROR_END_OF_STREAM;
+}
+
+status_t FLACSource::init()
+{
+ LOGV("FLACSource::init");
+ // re-use the same track metadata passed into constructor from FLACExtractor
+ mParser = new FLACParser(mDataSource);
+ return mParser->initCheck();
+}
+
+// FLACExtractor
+
+FLACExtractor::FLACExtractor(
+ const sp<DataSource> &dataSource)
+ : mDataSource(dataSource),
+ mInitCheck(false)
+{
+ LOGV("FLACExtractor::FLACExtractor");
+ mInitCheck = init();
+}
+
+FLACExtractor::~FLACExtractor()
+{
+ LOGV("~FLACExtractor::FLACExtractor");
+}
+
+size_t FLACExtractor::countTracks()
+{
+ return mInitCheck == OK ? 1 : 0;
+}
+
+sp<MediaSource> FLACExtractor::getTrack(size_t index)
+{
+ if (mInitCheck != OK || index > 0) {
+ return NULL;
+ }
+ return new FLACSource(mDataSource, mTrackMetadata);
+}
+
+sp<MetaData> FLACExtractor::getTrackMetaData(
+ size_t index, uint32_t flags)
+{
+ if (mInitCheck != OK || index > 0) {
+ return NULL;
+ }
+ return mTrackMetadata;
+}
+
+status_t FLACExtractor::init()
+{
+ mFileMetadata = new MetaData;
+ mTrackMetadata = new MetaData;
+ // FLACParser will fill in the metadata for us
+ mParser = new FLACParser(mDataSource, mFileMetadata, mTrackMetadata);
+ return mParser->initCheck();
+}
+
+sp<MetaData> FLACExtractor::getMetaData()
+{
+ return mFileMetadata;
+}
+
+// Sniffer
+
+bool SniffFLAC(
+ const sp<DataSource> &source, String8 *mimeType, float *confidence,
+ sp<AMessage> *)
+{
+ // first 4 is the signature word
+ // second 4 is the sizeof STREAMINFO
+ // 042 is the mandatory STREAMINFO
+ // no need to read rest of the header, as a premature EOF will be caught later
+ uint8_t header[4+4];
+ if (source->readAt(0, header, sizeof(header)) != sizeof(header)
+ || memcmp("fLaC\0\0\0\042", header, 4+4))
+ {
+ return false;
+ }
+
+ *mimeType = MEDIA_MIMETYPE_AUDIO_FLAC;
+ *confidence = 0.5;
+
+ return true;
+}
+
+} // namespace android
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index e6e98aa3e422..108a1d12953a 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -1074,6 +1074,20 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
break;
}
+ case FOURCC('c', 't', 't', 's'):
+ {
+ status_t err =
+ mLastTrack->sampleTable->setCompositionTimeToSampleParams(
+ data_offset, chunk_data_size);
+
+ if (err != OK) {
+ return err;
+ }
+
+ *offset += chunk_size;
+ break;
+ }
+
case FOURCC('s', 't', 's', 's'):
{
status_t err =
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index 4599fcaa41b1..b50af89a20d2 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -35,6 +35,7 @@ const char *MEDIA_MIMETYPE_AUDIO_VORBIS = "audio/vorbis";
const char *MEDIA_MIMETYPE_AUDIO_G711_ALAW = "audio/g711-alaw";
const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW = "audio/g711-mlaw";
const char *MEDIA_MIMETYPE_AUDIO_RAW = "audio/raw";
+const char *MEDIA_MIMETYPE_AUDIO_FLAC = "audio/flac";
const char *MEDIA_MIMETYPE_CONTAINER_MPEG4 = "video/mpeg4";
const char *MEDIA_MIMETYPE_CONTAINER_WAV = "audio/wav";
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index d12ac643bc15..08ed20677e00 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -26,6 +26,7 @@
#include "include/MPEG2TSExtractor.h"
#include "include/DRMExtractor.h"
#include "include/WVMExtractor.h"
+#include "include/FLACExtractor.h"
#include "matroska/MatroskaExtractor.h"
@@ -85,6 +86,8 @@ sp<MediaExtractor> MediaExtractor::Create(
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)
|| !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
return new AMRExtractor(source);
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
+ return new FLACExtractor(source);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_WAV)) {
return new WAVExtractor(source);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_OGG)) {
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 2a19b25bfa20..247ace7c2af5 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -3269,7 +3269,7 @@ status_t OMXCodec::start(MetaData *meta) {
}
status_t OMXCodec::stop() {
- CODEC_LOGV("stop mState=%d", mState);
+ CODEC_LOGI("stop mState=%d", mState);
Mutex::Autolock autoLock(mLock);
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index cf622afffae0..0e51caf00bb0 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -114,7 +114,6 @@ private:
MediaBuffer *buffer, uint8_t type);
void parseFileMetaData();
- void extractAlbumArt(const void *data, size_t size);
uint64_t findPrevGranulePosition(off64_t pageOffset);
@@ -122,6 +121,9 @@ private:
MyVorbisExtractor &operator=(const MyVorbisExtractor &);
};
+static void extractAlbumArt(
+ const sp<MetaData> &fileMeta, const void *data, size_t size);
+
////////////////////////////////////////////////////////////////////////////////
OggSource::OggSource(const sp<OggExtractor> &extractor)
@@ -654,6 +656,17 @@ void MyVorbisExtractor::parseFileMetaData() {
mFileMeta = new MetaData;
mFileMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_OGG);
+ for (int i = 0; i < mVc.comments; ++i) {
+ const char *comment = mVc.user_comments[i];
+ size_t commentLength = mVc.comment_lengths[i];
+ parseVorbisComment(mFileMeta, comment, commentLength);
+ //LOGI("comment #%d: '%s'", i + 1, mVc.user_comments[i]);
+ }
+}
+
+void parseVorbisComment(
+ const sp<MetaData> &fileMeta, const char *comment, size_t commentLength)
+{
struct {
const char *const mTag;
uint32_t mKey;
@@ -675,33 +688,25 @@ void MyVorbisExtractor::parseFileMetaData() {
{ "ANDROID_LOOP", kKeyAutoLoop },
};
- for (int i = 0; i < mVc.comments; ++i) {
- const char *comment = mVc.user_comments[i];
-
for (size_t j = 0; j < sizeof(kMap) / sizeof(kMap[0]); ++j) {
size_t tagLen = strlen(kMap[j].mTag);
if (!strncasecmp(kMap[j].mTag, comment, tagLen)
&& comment[tagLen] == '=') {
if (kMap[j].mKey == kKeyAlbumArt) {
extractAlbumArt(
+ fileMeta,
&comment[tagLen + 1],
- mVc.comment_lengths[i] - tagLen - 1);
+ commentLength - tagLen - 1);
} else if (kMap[j].mKey == kKeyAutoLoop) {
if (!strcasecmp(&comment[tagLen + 1], "true")) {
- mFileMeta->setInt32(kKeyAutoLoop, true);
+ fileMeta->setInt32(kKeyAutoLoop, true);
}
} else {
- mFileMeta->setCString(kMap[j].mKey, &comment[tagLen + 1]);
+ fileMeta->setCString(kMap[j].mKey, &comment[tagLen + 1]);
}
}
}
- }
-#if 0
- for (int i = 0; i < mVc.comments; ++i) {
- LOGI("comment #%d: '%s'", i + 1, mVc.user_comments[i]);
- }
-#endif
}
// The returned buffer should be free()d.
@@ -769,7 +774,8 @@ static uint8_t *DecodeBase64(const char *s, size_t size, size_t *outSize) {
return (uint8_t *)buffer;
}
-void MyVorbisExtractor::extractAlbumArt(const void *data, size_t size) {
+static void extractAlbumArt(
+ const sp<MetaData> &fileMeta, const void *data, size_t size) {
LOGV("extractAlbumArt from '%s'", (const char *)data);
size_t flacSize;
@@ -833,10 +839,10 @@ void MyVorbisExtractor::extractAlbumArt(const void *data, size_t size) {
LOGV("got image data, %d trailing bytes",
flacSize - 32 - typeLen - descLen - dataLen);
- mFileMeta->setData(
+ fileMeta->setData(
kKeyAlbumArt, 0, &flac[8 + typeLen + 4 + descLen + 20], dataLen);
- mFileMeta->setCString(kKeyAlbumArtMIME, type);
+ fileMeta->setCString(kKeyAlbumArtMIME, type);
exit:
free(flac);
diff --git a/media/libstagefright/SampleIterator.cpp b/media/libstagefright/SampleIterator.cpp
index 062ab9bbfaa7..c7b00b171974 100644
--- a/media/libstagefright/SampleIterator.cpp
+++ b/media/libstagefright/SampleIterator.cpp
@@ -307,6 +307,8 @@ status_t SampleIterator::findSampleTime(
*time = mTTSSampleTime + mTTSDuration * (sampleIndex - mTTSSampleIndex);
+ *time += mTable->getCompositionTimeOffset(sampleIndex);
+
return OK;
}
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index a9163fcc7ad2..423df705a592 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -53,6 +53,8 @@ SampleTable::SampleTable(const sp<DataSource> &source)
mNumSampleSizes(0),
mTimeToSampleCount(0),
mTimeToSample(NULL),
+ mCompositionTimeDeltaEntries(NULL),
+ mNumCompositionTimeDeltaEntries(0),
mSyncSampleOffset(-1),
mNumSyncSamples(0),
mSyncSamples(NULL),
@@ -68,6 +70,9 @@ SampleTable::~SampleTable() {
delete[] mSyncSamples;
mSyncSamples = NULL;
+ delete[] mCompositionTimeDeltaEntries;
+ mCompositionTimeDeltaEntries = NULL;
+
delete[] mTimeToSample;
mTimeToSample = NULL;
@@ -260,6 +265,51 @@ status_t SampleTable::setTimeToSampleParams(
return OK;
}
+status_t SampleTable::setCompositionTimeToSampleParams(
+ off64_t data_offset, size_t data_size) {
+ LOGI("There are reordered frames present.");
+
+ if (mCompositionTimeDeltaEntries != NULL || data_size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t header[8];
+ if (mDataSource->readAt(
+ data_offset, header, sizeof(header))
+ < (ssize_t)sizeof(header)) {
+ return ERROR_IO;
+ }
+
+ if (U32_AT(header) != 0) {
+ // Expected version = 0, flags = 0.
+ return ERROR_MALFORMED;
+ }
+
+ size_t numEntries = U32_AT(&header[4]);
+
+ if (data_size != (numEntries + 1) * 8) {
+ return ERROR_MALFORMED;
+ }
+
+ mNumCompositionTimeDeltaEntries = numEntries;
+ mCompositionTimeDeltaEntries = new uint32_t[2 * numEntries];
+
+ if (mDataSource->readAt(
+ data_offset + 8, mCompositionTimeDeltaEntries, numEntries * 8)
+ < (ssize_t)numEntries * 8) {
+ delete[] mCompositionTimeDeltaEntries;
+ mCompositionTimeDeltaEntries = NULL;
+
+ return ERROR_IO;
+ }
+
+ for (size_t i = 0; i < 2 * numEntries; ++i) {
+ mCompositionTimeDeltaEntries[i] = ntohl(mCompositionTimeDeltaEntries[i]);
+ }
+
+ return OK;
+}
+
status_t SampleTable::setSyncSampleParams(off64_t data_offset, size_t data_size) {
if (mSyncSampleOffset >= 0 || data_size < 8) {
return ERROR_MALFORMED;
@@ -333,6 +383,8 @@ uint32_t abs_difference(uint32_t time1, uint32_t time2) {
status_t SampleTable::findSampleAtTime(
uint32_t req_time, uint32_t *sample_index, uint32_t flags) {
+ // XXX this currently uses decoding time, instead of composition time.
+
*sample_index = 0;
Mutex::Autolock autoLock(mLock);
@@ -607,5 +659,26 @@ status_t SampleTable::getMetaDataForSample(
return OK;
}
+uint32_t SampleTable::getCompositionTimeOffset(uint32_t sampleIndex) const {
+ if (mCompositionTimeDeltaEntries == NULL) {
+ return 0;
+ }
+
+ uint32_t curSample = 0;
+ for (size_t i = 0; i < mNumCompositionTimeDeltaEntries; ++i) {
+ uint32_t sampleCount = mCompositionTimeDeltaEntries[2 * i];
+
+ if (sampleIndex < curSample + sampleCount) {
+ uint32_t sampleDelta = mCompositionTimeDeltaEntries[2 * i + 1];
+
+ return sampleDelta;
+ }
+
+ curSample += sampleCount;
+ }
+
+ return 0;
+}
+
} // namespace android
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index be3df7c58a08..84f65ff11fdb 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -37,7 +37,7 @@ static bool FileHasAcceptableExtension(const char *extension) {
".mp3", ".mp4", ".m4a", ".3gp", ".3gpp", ".3g2", ".3gpp2",
".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac",
".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota",
- ".mkv", ".mka", ".webm", ".ts", ".fl"
+ ".mkv", ".mka", ".webm", ".ts", ".fl", ".flac"
};
static const size_t kNumValidExtensions =
sizeof(kValidExtensions) / sizeof(kValidExtensions[0]);
diff --git a/media/libstagefright/include/FLACExtractor.h b/media/libstagefright/include/FLACExtractor.h
new file mode 100644
index 000000000000..ded91c27e547
--- /dev/null
+++ b/media/libstagefright/include/FLACExtractor.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FLAC_EXTRACTOR_H_
+#define FLAC_EXTRACTOR_H_
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <utils/String8.h>
+
+namespace android {
+
+class FLACParser;
+
+class FLACExtractor : public MediaExtractor {
+
+public:
+ // Extractor assumes ownership of source
+ FLACExtractor(const sp<DataSource> &source);
+
+ virtual size_t countTracks();
+ virtual sp<MediaSource> getTrack(size_t index);
+ virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+
+ virtual sp<MetaData> getMetaData();
+
+protected:
+ virtual ~FLACExtractor();
+
+private:
+ sp<DataSource> mDataSource;
+ sp<FLACParser> mParser;
+ status_t mInitCheck;
+ sp<MetaData> mFileMetadata;
+
+ // There is only one track
+ sp<MetaData> mTrackMetadata;
+
+ status_t init();
+
+ FLACExtractor(const FLACExtractor &);
+ FLACExtractor &operator=(const FLACExtractor &);
+
+};
+
+bool SniffFLAC(const sp<DataSource> &source, String8 *mimeType,
+ float *confidence, sp<AMessage> *);
+
+} // namespace android
+
+#endif // FLAC_EXTRACTOR_H_
diff --git a/media/libstagefright/include/OggExtractor.h b/media/libstagefright/include/OggExtractor.h
index 1eda025d4288..a41f681e7af2 100644
--- a/media/libstagefright/include/OggExtractor.h
+++ b/media/libstagefright/include/OggExtractor.h
@@ -57,6 +57,9 @@ bool SniffOgg(
const sp<DataSource> &source, String8 *mimeType, float *confidence,
sp<AMessage> *);
+void parseVorbisComment(
+ const sp<MetaData> &fileMeta, const char *comment, size_t commentLength);
+
} // namespace android
#endif // OGG_EXTRACTOR_H_
diff --git a/media/libstagefright/include/SampleTable.h b/media/libstagefright/include/SampleTable.h
index c5e81365da1e..2f95de9ce5ed 100644
--- a/media/libstagefright/include/SampleTable.h
+++ b/media/libstagefright/include/SampleTable.h
@@ -46,6 +46,9 @@ public:
status_t setTimeToSampleParams(off64_t data_offset, size_t data_size);
+ status_t setCompositionTimeToSampleParams(
+ off64_t data_offset, size_t data_size);
+
status_t setSyncSampleParams(off64_t data_offset, size_t data_size);
////////////////////////////////////////////////////////////////////////////
@@ -104,6 +107,9 @@ private:
uint32_t mTimeToSampleCount;
uint32_t *mTimeToSample;
+ uint32_t *mCompositionTimeDeltaEntries;
+ size_t mNumCompositionTimeDeltaEntries;
+
off64_t mSyncSampleOffset;
uint32_t mNumSyncSamples;
uint32_t *mSyncSamples;
@@ -122,6 +128,8 @@ private:
status_t getSampleSize_l(uint32_t sample_index, size_t *sample_size);
+ uint32_t getCompositionTimeOffset(uint32_t sampleIndex) const;
+
SampleTable(const SampleTable &);
SampleTable &operator=(const SampleTable &);
};
diff --git a/opengl/java/android/opengl/GLUtils.java b/opengl/java/android/opengl/GLUtils.java
index e150c1967fc2..2f1707221d77 100644
--- a/opengl/java/android/opengl/GLUtils.java
+++ b/opengl/java/android/opengl/GLUtils.java
@@ -142,6 +142,9 @@ public final class GLUtils {
if (bitmap == null) {
throw new NullPointerException("texImage2D can't be used with a null Bitmap");
}
+ if (bitmap.isRecycled()) {
+ throw new IllegalArgumentException("bitmap is recycled");
+ }
if (native_texImage2D(target, level, -1, bitmap, -1, border)!=0) {
throw new IllegalArgumentException("invalid Bitmap format");
}
diff --git a/packages/SettingsProvider/src/com/android/providers/settings/DatabaseHelper.java b/packages/SettingsProvider/src/com/android/providers/settings/DatabaseHelper.java
index 49b71e2e1703..f336f069d623 100644
--- a/packages/SettingsProvider/src/com/android/providers/settings/DatabaseHelper.java
+++ b/packages/SettingsProvider/src/com/android/providers/settings/DatabaseHelper.java
@@ -61,7 +61,7 @@ public class DatabaseHelper extends SQLiteOpenHelper {
// database gets upgraded properly. At a minimum, please confirm that 'upgradeVersion'
// is properly propagated through your change. Not doing so will result in a loss of user
// settings.
- private static final int DATABASE_VERSION = 63;
+ private static final int DATABASE_VERSION = 64;
private Context mContext;
@@ -797,6 +797,28 @@ public class DatabaseHelper extends SQLiteOpenHelper {
upgradeVersion = 63;
}
+ if (upgradeVersion == 63) {
+ // This upgrade adds the STREAM_MUSIC type to the list of
+ // types affected by ringer modes (silent, vibrate, etc.)
+ db.beginTransaction();
+ try {
+ db.execSQL("DELETE FROM system WHERE name='"
+ + Settings.System.MODE_RINGER_STREAMS_AFFECTED + "'");
+ int newValue = (1 << AudioManager.STREAM_RING)
+ | (1 << AudioManager.STREAM_NOTIFICATION)
+ | (1 << AudioManager.STREAM_SYSTEM)
+ | (1 << AudioManager.STREAM_SYSTEM_ENFORCED)
+ | (1 << AudioManager.STREAM_MUSIC);
+ db.execSQL("INSERT INTO system ('name', 'value') values ('"
+ + Settings.System.MODE_RINGER_STREAMS_AFFECTED + "', '"
+ + String.valueOf(newValue) + "')");
+ db.setTransactionSuccessful();
+ } finally {
+ db.endTransaction();
+ }
+ upgradeVersion = 64;
+ }
+
// *** Remember to update DATABASE_VERSION above!
if (upgradeVersion != currentVersion) {
@@ -1057,10 +1079,11 @@ public class DatabaseHelper extends SQLiteOpenHelper {
loadVibrateSetting(db, false);
- // By default, only the ring/notification and system streams are affected
+ // By default, only the ring/notification, system and music streams are affected
loadSetting(stmt, Settings.System.MODE_RINGER_STREAMS_AFFECTED,
(1 << AudioManager.STREAM_RING) | (1 << AudioManager.STREAM_NOTIFICATION) |
- (1 << AudioManager.STREAM_SYSTEM) | (1 << AudioManager.STREAM_SYSTEM_ENFORCED));
+ (1 << AudioManager.STREAM_SYSTEM) | (1 << AudioManager.STREAM_SYSTEM_ENFORCED) |
+ (1 << AudioManager.STREAM_MUSIC));
loadSetting(stmt, Settings.System.MUTE_STREAMS_AFFECTED,
((1 << AudioManager.STREAM_MUSIC) |
diff --git a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_default.png b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_default.png
index 9812339f04c8..ca8656cd70ea 100644
--- a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_default.png
+++ b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_ime_default.png b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_ime_default.png
index 4f61511af1ae..a4f4e872b5d6 100644
--- a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_ime_default.png
+++ b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_back_ime_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_home_default.png b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_home_default.png
index 60050751fa95..06c99218f234 100644
--- a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_home_default.png
+++ b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_home_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_menu_default.png b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_menu_default.png
index 2591521f2cc3..cb938fedcc3e 100644
--- a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_menu_default.png
+++ b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_menu_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_recent_default.png b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_recent_default.png
index 77546575f3a2..b2c4d45d172c 100644
--- a/packages/SystemUI/res/drawable-hdpi/ic_sysbar_recent_default.png
+++ b/packages/SystemUI/res/drawable-hdpi/ic_sysbar_recent_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth.png
index 08f1993feaf7..fe9be2ca1179 100644
--- a/packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth.png
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth_connected.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth_connected.png
index 4a40b14f1ef4..f9b39666ec55 100644
--- a/packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth_connected.png
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_data_bluetooth_connected.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_in.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_in.png
new file mode 100644
index 000000000000..883808abc328
--- /dev/null
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_out.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_out.png
new file mode 100644
index 000000000000..8ea42c79c44e
--- /dev/null
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_signal_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_in.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_in.png
new file mode 100644
index 000000000000..71429ae49123
--- /dev/null
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_inout.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_inout.png
new file mode 100644
index 000000000000..5c34554dddbd
--- /dev/null
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_out.png b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_out.png
new file mode 100644
index 000000000000..44e7072b7dd1
--- /dev/null
+++ b/packages/SystemUI/res/drawable-hdpi/stat_sys_wifi_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_default.png b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_default.png
index 4bcd2be5cb86..91fd0e85f7ad 100644
--- a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_default.png
+++ b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_ime_default.png b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_ime_default.png
index 92ffde9988e0..e4d5a328bdb3 100644
--- a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_ime_default.png
+++ b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_back_ime_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_home_default.png b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_home_default.png
index cfeba3e5e7f0..accdcfd24ef7 100644
--- a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_home_default.png
+++ b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_home_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_menu_default.png b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_menu_default.png
index 2490cdf8dd98..0d58c3ad9cec 100644
--- a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_menu_default.png
+++ b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_menu_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_recent_default.png b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_recent_default.png
index 1d97e0581c59..88f0a44141a3 100644
--- a/packages/SystemUI/res/drawable-mdpi/ic_sysbar_recent_default.png
+++ b/packages/SystemUI/res/drawable-mdpi/ic_sysbar_recent_default.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth.png
index 5ca2415a6670..45a97fd233ac 100644
--- a/packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth.png
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth_connected.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth_connected.png
index b727c2d81310..306afd09804f 100644
--- a/packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth_connected.png
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_data_bluetooth_connected.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_in.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_in.png
new file mode 100644
index 000000000000..e429ea180546
--- /dev/null
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_inout.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_inout.png
new file mode 100644
index 000000000000..e3bcaf955fb5
--- /dev/null
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_out.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_out.png
new file mode 100644
index 000000000000..26db91e1241b
--- /dev/null
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_signal_out.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_in.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_in.png
new file mode 100644
index 000000000000..957c5baa9832
--- /dev/null
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_in.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_inout.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_inout.png
new file mode 100644
index 000000000000..aec60502a575
--- /dev/null
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_inout.png
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_out.png b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_out.png
new file mode 100644
index 000000000000..bb8eea0d7fc7
--- /dev/null
+++ b/packages/SystemUI/res/drawable-mdpi/stat_sys_wifi_out.png
Binary files differ
diff --git a/packages/SystemUI/res/layout-xlarge/status_bar_notification_area.xml b/packages/SystemUI/res/layout-xlarge/status_bar_notification_area.xml
index 4e41e538fef4..a892cd91c1b4 100644
--- a/packages/SystemUI/res/layout-xlarge/status_bar_notification_area.xml
+++ b/packages/SystemUI/res/layout-xlarge/status_bar_notification_area.xml
@@ -84,7 +84,7 @@
android:layout_height="wrap_content"
android:singleLine="true"
android:textSize="40sp"
- android:textColor="#8cccdaff" />
+ android:textColor="#ff525e79" />
</com.android.systemui.statusbar.tablet.HoloClock>
<TextView
@@ -104,10 +104,16 @@
android:id="@+id/signal_battery_cluster"
android:layout_width="wrap_content"
android:layout_height="match_parent"
- android:layout_marginRight="8dp"
+ android:layout_marginRight="16dp"
android:orientation="horizontal"
android:gravity="center"
>
+ <ImageView
+ android:id="@+id/bluetooth"
+ android:layout_height="wrap_content"
+ android:layout_width="wrap_content"
+ android:visibility="gone"
+ />
<FrameLayout
android:layout_height="wrap_content"
android:layout_width="wrap_content"
@@ -123,6 +129,11 @@
android:layout_height="wrap_content"
android:layout_width="wrap_content"
/>
+ <ImageView
+ android:id="@+id/network_direction"
+ android:layout_height="wrap_content"
+ android:layout_width="wrap_content"
+ />
</FrameLayout>
<ImageView
android:id="@+id/battery"
diff --git a/packages/SystemUI/res/layout-xlarge/status_bar_notification_panel_title.xml b/packages/SystemUI/res/layout-xlarge/status_bar_notification_panel_title.xml
index 15b2b708d46e..0cf28a79da57 100644
--- a/packages/SystemUI/res/layout-xlarge/status_bar_notification_panel_title.xml
+++ b/packages/SystemUI/res/layout-xlarge/status_bar_notification_panel_title.xml
@@ -23,67 +23,89 @@
android:orientation="vertical"
android:background="@drawable/notify_panel_clock_bg"
>
- <ImageView
- android:id="@+id/network_signal"
- android:layout_height="32dp"
- android:layout_width="32dp"
- android:scaleType="centerInside"
+ <LinearLayout
+ android:id="@+id/icons"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
android:layout_alignParentLeft="true"
android:layout_alignParentBottom="true"
- android:baseline="22dp"
+ android:baselineAlignedChildIndex="0"
android:layout_marginLeft="32dp"
android:layout_marginTop="16dp"
android:layout_marginBottom="16dp"
- />
-
- <ImageView
- android:id="@+id/network_type"
- android:layout_height="wrap_content"
- android:layout_width="wrap_content"
- android:layout_alignLeft="@id/network_signal"
- android:layout_alignBottom="@id/network_signal"
- android:layout_marginRight="8dp"
- />
+ >
+ <ImageView
+ android:id="@+id/bluetooth"
+ android:layout_height="32dp"
+ android:layout_width="32dp"
+ android:scaleType="centerInside"
+ android:baseline="22dp"
+ android:visibility="gone"
+ />
- <TextView
- android:id="@+id/network_text"
- style="@style/StatusBarNotificationText"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:layout_toRightOf="@id/network_signal"
- android:layout_marginRight="8dp"
- android:layout_alignBaseline="@id/network_signal"
- android:singleLine="true"
- android:text="@string/status_bar_settings_settings_button"
- />
+ <FrameLayout
+ android:id="@+id/netwerk"
+ android:layout_height="32dp"
+ android:layout_width="32dp"
+ android:layout_marginRight="4dp"
+ >
+ <ImageView
+ android:id="@+id/network_signal"
+ android:layout_height="match_parent"
+ android:layout_width="match_parent"
+ />
+ <ImageView
+ android:id="@+id/network_type"
+ android:layout_height="match_parent"
+ android:layout_width="match_parent"
+ />
+ <ImageView
+ android:id="@+id/network_direction"
+ android:layout_height="match_parent"
+ android:layout_width="match_parent"
+ />
+ </FrameLayout>
+
+ <TextView
+ android:id="@+id/network_text"
+ style="@style/StatusBarNotificationText"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_toRightOf="@id/netwerk"
+ android:layout_marginRight="8dp"
+ android:layout_alignBaseline="@id/network_signal"
+ android:singleLine="true"
+ android:text="@string/status_bar_settings_settings_button"
+ />
- <ImageView
- android:id="@+id/battery"
- android:layout_height="32dp"
- android:layout_width="32dp"
- android:scaleType="centerInside"
- android:layout_toRightOf="@id/network_text"
- android:layout_alignBaseline="@id/network_signal"
- android:baseline="22dp"
- />
+ <ImageView
+ android:id="@+id/battery"
+ android:layout_height="32dp"
+ android:layout_width="32dp"
+ android:scaleType="centerInside"
+ android:layout_toRightOf="@id/network_text"
+ android:layout_alignBaseline="@id/network_signal"
+ android:baseline="22dp"
+ />
- <TextView
- android:id="@+id/battery_text"
- style="@style/StatusBarNotificationText"
- android:layout_width="56dp"
- android:layout_height="wrap_content"
- android:layout_toRightOf="@id/battery"
- android:layout_alignBaseline="@id/battery"
- android:layout_marginRight="8dp"
- android:singleLine="true"
- android:text="@string/status_bar_settings_settings_button"
- />
+ <TextView
+ android:id="@+id/battery_text"
+ style="@style/StatusBarNotificationText"
+ android:layout_width="56dp"
+ android:layout_height="wrap_content"
+ android:layout_toRightOf="@id/battery"
+ android:layout_alignBaseline="@id/battery"
+ android:layout_marginRight="8dp"
+ android:singleLine="true"
+ android:text="@string/status_bar_settings_settings_button"
+ />
+ </LinearLayout>
<ImageView
android:id="@+id/settings_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
- android:layout_alignBaseline="@id/battery"
+ android:layout_alignBaseline="@id/icons"
android:layout_alignParentRight="true"
android:paddingRight="16dp"
android:src="@drawable/ic_sysbar_quicksettings"
@@ -94,12 +116,12 @@
android:id="@+id/notification_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
+ android:layout_alignBaseline="@id/icons"
android:layout_alignParentRight="true"
- android:layout_alignBaseline="@id/battery"
android:paddingRight="16dp"
- android:visibility="invisible"
android:src="@drawable/ic_notification_open"
android:baseline="21dp"
+ android:visibility="invisible"
/>
<View
diff --git a/packages/SystemUI/res/values-xlarge/colors.xml b/packages/SystemUI/res/values-xlarge/colors.xml
index 7f1e3588079a..1fd396d754b7 100644
--- a/packages/SystemUI/res/values-xlarge/colors.xml
+++ b/packages/SystemUI/res/values-xlarge/colors.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<drawable name="status_bar_background">#000000</drawable>
- <drawable name="notification_icon_area_smoke">#D0000000</drawable>
+ <drawable name="notification_icon_area_smoke">#CC000000</drawable>
</resources>
diff --git a/packages/SystemUI/src/com/android/systemui/power/PowerUI.java b/packages/SystemUI/src/com/android/systemui/power/PowerUI.java
index f07f1d7d8555..3401441439bf 100644
--- a/packages/SystemUI/src/com/android/systemui/power/PowerUI.java
+++ b/packages/SystemUI/src/com/android/systemui/power/PowerUI.java
@@ -227,7 +227,7 @@ public class PowerUI extends SystemUI {
if (soundUri != null) {
final Ringtone sfx = RingtoneManager.getRingtone(mContext, soundUri);
if (sfx != null) {
- sfx.setStreamType(AudioManager.STREAM_SYSTEM);
+ sfx.setStreamType(AudioManager.STREAM_NOTIFICATION);
sfx.play();
}
}
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/NotificationData.java b/packages/SystemUI/src/com/android/systemui/statusbar/NotificationData.java
index 004174ee611a..3d904ee52bda 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/NotificationData.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/NotificationData.java
@@ -48,7 +48,12 @@ public class NotificationData {
private final ArrayList<Entry> mEntries = new ArrayList<Entry>();
private final Comparator<Entry> mEntryCmp = new Comparator<Entry>() {
public int compare(Entry a, Entry b) {
- return (int)(a.notification.notification.when - b.notification.notification.when);
+ final StatusBarNotification na = a.notification;
+ final StatusBarNotification nb = b.notification;
+ int priDiff = na.priority - nb.priority;
+ return (priDiff != 0)
+ ? priDiff
+ : (int)(na.notification.when - nb.notification.when);
}
};
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/policy/BluetoothController.java b/packages/SystemUI/src/com/android/systemui/statusbar/policy/BluetoothController.java
new file mode 100644
index 000000000000..0525054b4aaa
--- /dev/null
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/policy/BluetoothController.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.systemui.statusbar.policy;
+
+import java.util.ArrayList;
+
+import android.bluetooth.BluetoothAdapter;
+import android.bluetooth.BluetoothDevice;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.util.Slog;
+import android.view.View;
+import android.widget.ImageView;
+
+import com.android.systemui.R;
+
+public class BluetoothController extends BroadcastReceiver {
+ private static final String TAG = "StatusBar.BluetoothController";
+
+ private Context mContext;
+ private ArrayList<ImageView> mIconViews = new ArrayList<ImageView>();
+
+ private int mIconId = R.drawable.stat_sys_data_bluetooth;
+ private boolean mEnabled;
+
+ public BluetoothController(Context context) {
+ mContext = context;
+
+ IntentFilter filter = new IntentFilter();
+ filter.addAction(BluetoothAdapter.ACTION_STATE_CHANGED);
+ filter.addAction(BluetoothAdapter.ACTION_CONNECTION_STATE_CHANGED);
+ context.registerReceiver(this, filter);
+ }
+
+ public void addIconView(ImageView v) {
+ mIconViews.add(v);
+ }
+
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ String action = intent.getAction();
+ if (action.equals(BluetoothAdapter.ACTION_STATE_CHANGED)) {
+ int state = intent.getIntExtra(BluetoothAdapter.EXTRA_STATE, BluetoothAdapter.ERROR);
+ mEnabled = state == BluetoothAdapter.STATE_ON;
+ } else if (action.equals(BluetoothAdapter.ACTION_CONNECTION_STATE_CHANGED)) {
+ int state = intent.getIntExtra(BluetoothAdapter.EXTRA_CONNECTION_STATE,
+ BluetoothAdapter.STATE_DISCONNECTED);
+ if (state == BluetoothAdapter.STATE_CONNECTED) {
+ mIconId = R.drawable.stat_sys_data_bluetooth_connected;
+ } else {
+ mIconId = R.drawable.stat_sys_data_bluetooth;
+ }
+ }
+
+
+ int N = mIconViews.size();
+ for (int i=0; i<N; i++) {
+ ImageView v = mIconViews.get(i);
+ v.setImageResource(mIconId);
+ v.setVisibility(mEnabled ? View.VISIBLE : View.GONE);
+ }
+ }
+}
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/policy/NetworkController.java b/packages/SystemUI/src/com/android/systemui/statusbar/policy/NetworkController.java
index c94f9ee8808c..42868db00788 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/policy/NetworkController.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/policy/NetworkController.java
@@ -75,8 +75,8 @@ public class NetworkController extends BroadcastReceiver {
String mNetworkNameSeparator;
int mPhoneSignalIconId;
int mDataDirectionIconId;
+ int mDataDirectionOverlayIconId;
int mDataSignalIconId;
- int mDataActiveSignalIconId;
int mDataTypeIconId;
boolean mDataActive;
@@ -101,12 +101,14 @@ public class NetworkController extends BroadcastReceiver {
Context mContext;
ArrayList<ImageView> mPhoneSignalIconViews = new ArrayList<ImageView>();
ArrayList<ImageView> mDataDirectionIconViews = new ArrayList<ImageView>();
+ ArrayList<ImageView> mDataDirectionOverlayIconViews = new ArrayList<ImageView>();
ArrayList<ImageView> mWifiIconViews = new ArrayList<ImageView>();
ArrayList<ImageView> mCombinedSignalIconViews = new ArrayList<ImageView>();
ArrayList<ImageView> mDataTypeIconViews = new ArrayList<ImageView>();
ArrayList<TextView> mLabelViews = new ArrayList<TextView>();
int mLastPhoneSignalIconId = -1;
int mLastDataDirectionIconId = -1;
+ int mLastDataDirectionOverlayIconId = -1;
int mLastWifiIconId = -1;
int mLastCombinedSignalIconId = -1;
int mLastDataTypeIconId = -1;
@@ -163,6 +165,10 @@ public class NetworkController extends BroadcastReceiver {
mDataDirectionIconViews.add(v);
}
+ public void addDataDirectionOverlayIconView(ImageView v) {
+ mDataDirectionOverlayIconViews.add(v);
+ }
+
public void addWifiIconView(ImageView v) {
mWifiIconViews.add(v);
}
@@ -367,17 +373,15 @@ public class NetworkController extends BroadcastReceiver {
if (Settings.System.getInt(mContext.getContentResolver(),
Settings.System.AIRPLANE_MODE_ON, 0) == 1) {
mPhoneSignalIconId = R.drawable.stat_sys_signal_flightmode;
- mDataActiveSignalIconId = mDataSignalIconId = R.drawable.stat_sys_signal_flightmode;
+ mDataSignalIconId = R.drawable.stat_sys_signal_flightmode;
} else {
mPhoneSignalIconId = R.drawable.stat_sys_signal_null;
- // note we use 0 instead of null
- mDataActiveSignalIconId = mDataSignalIconId = R.drawable.stat_sys_signal_0;
+ mDataSignalIconId = R.drawable.stat_sys_signal_0; // note we use 0 instead of null
}
} else {
if (mSignalStrength == null) {
mPhoneSignalIconId = R.drawable.stat_sys_signal_null;
- // note we use 0 instead of null
- mDataActiveSignalIconId = mDataSignalIconId = R.drawable.stat_sys_signal_0;
+ mDataSignalIconId = R.drawable.stat_sys_signal_0; // note we use 0 instead of null
} else if (isCdma()) {
// If 3G(EV) and 1x network are available than 3G should be
// displayed, displayed RSSI should be from the EV side.
@@ -396,8 +400,6 @@ public class NetworkController extends BroadcastReceiver {
}
mPhoneSignalIconId = iconList[iconLevel];
mDataSignalIconId = TelephonyIcons.DATA_SIGNAL_STRENGTH[mInetCondition][iconLevel];
- mDataActiveSignalIconId
- = TelephonyIcons.DATA_SIGNAL_STRENGTH_ACTIVE[mInetCondition][iconLevel];
} else {
int asu = mSignalStrength.getGsmSignalStrength();
@@ -421,8 +423,6 @@ public class NetworkController extends BroadcastReceiver {
}
mPhoneSignalIconId = iconList[iconLevel];
mDataSignalIconId = TelephonyIcons.DATA_SIGNAL_STRENGTH[mInetCondition][iconLevel];
- mDataActiveSignalIconId
- = TelephonyIcons.DATA_SIGNAL_STRENGTH_ACTIVE[mInetCondition][iconLevel];
}
}
}
@@ -685,6 +685,7 @@ public class NetworkController extends BroadcastReceiver {
Context context = mContext;
int combinedSignalIconId;
+ int dataDirectionOverlayIconId = 0;
int dataTypeIconId;
String label;
int N;
@@ -699,16 +700,22 @@ public class NetworkController extends BroadcastReceiver {
dataTypeIconId = 0;
} else if (mDataConnected) {
label = mNetworkName;
+ combinedSignalIconId = mDataSignalIconId;
switch (mDataActivity) {
case TelephonyManager.DATA_ACTIVITY_IN:
+ dataDirectionOverlayIconId = R.drawable.stat_sys_signal_in;
+ break;
case TelephonyManager.DATA_ACTIVITY_OUT:
+ dataDirectionOverlayIconId = R.drawable.stat_sys_signal_out;
+ break;
case TelephonyManager.DATA_ACTIVITY_INOUT:
- combinedSignalIconId = mDataActiveSignalIconId;
+ dataDirectionOverlayIconId = R.drawable.stat_sys_signal_inout;
break;
default:
- combinedSignalIconId = mDataSignalIconId;
+ dataDirectionOverlayIconId = 0;
break;
}
+ combinedSignalIconId = mDataSignalIconId;
dataTypeIconId = mDataTypeIconId;
} else if (mBluetoothTethered) {
label = mContext.getString(R.string.bluetooth_tethered);
@@ -724,11 +731,11 @@ public class NetworkController extends BroadcastReceiver {
Slog.d(TAG, "refreshViews combinedSignalIconId=0x"
+ Integer.toHexString(combinedSignalIconId)
+ "/" + getResourceName(combinedSignalIconId)
+ + " dataDirectionOverlayIconId=0x" + Integer.toHexString(dataDirectionOverlayIconId)
+ " mDataActivity=" + mDataActivity
+ " mPhoneSignalIconId=0x" + Integer.toHexString(mPhoneSignalIconId)
+ " mDataDirectionIconId=0x" + Integer.toHexString(mDataDirectionIconId)
+ " mDataSignalIconId=0x" + Integer.toHexString(mDataSignalIconId)
- + " mDataActiveSignalIconId=0x" + Integer.toHexString(mDataActiveSignalIconId)
+ " mDataTypeIconId=0x" + Integer.toHexString(mDataTypeIconId)
+ " mWifiIconId=0x" + Integer.toHexString(mWifiIconId)
+ " mBluetoothTetherIconId=0x" + Integer.toHexString(mBluetoothTetherIconId));
@@ -789,6 +796,22 @@ public class NetworkController extends BroadcastReceiver {
}
}
+ // the data direction overlay
+ if (mLastDataDirectionOverlayIconId != dataDirectionOverlayIconId) {
+ Slog.d(TAG, "changing data overlay icon id to " + dataDirectionOverlayIconId);
+ mLastDataDirectionOverlayIconId = dataDirectionOverlayIconId;
+ N = mDataDirectionOverlayIconViews.size();
+ for (int i=0; i<N; i++) {
+ final ImageView v = mDataDirectionOverlayIconViews.get(i);
+ if (dataDirectionOverlayIconId == 0) {
+ v.setVisibility(View.INVISIBLE);
+ } else {
+ v.setVisibility(View.VISIBLE);
+ v.setImageResource(dataDirectionOverlayIconId);
+ }
+ }
+ }
+
// the label in the notification panel
if (!mLastLabel.equals(label)) {
mLastLabel = label;
@@ -834,10 +857,6 @@ public class NetworkController extends BroadcastReceiver {
pw.print(Integer.toHexString(mDataSignalIconId));
pw.print("/");
pw.println(getResourceName(mDataSignalIconId));
- pw.print(" mDataActiveSignalIconId=");
- pw.print(Integer.toHexString(mDataActiveSignalIconId));
- pw.print("/");
- pw.println(getResourceName(mDataActiveSignalIconId));
pw.print(" mDataTypeIconId=");
pw.print(Integer.toHexString(mDataTypeIconId));
pw.print("/");
@@ -872,6 +891,10 @@ public class NetworkController extends BroadcastReceiver {
pw.print(Integer.toHexString(mLastDataDirectionIconId));
pw.print("/");
pw.println(getResourceName(mLastDataDirectionIconId));
+ pw.print(" mLastDataDirectionOverlayIconId=0x");
+ pw.print(Integer.toHexString(mLastDataDirectionOverlayIconId));
+ pw.print("/");
+ pw.println(getResourceName(mLastDataDirectionOverlayIconId));
pw.print(" mLastWifiIconId=0x");
pw.print(Integer.toHexString(mLastWifiIconId));
pw.print("/");
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/tablet/TabletStatusBar.java b/packages/SystemUI/src/com/android/systemui/statusbar/tablet/TabletStatusBar.java
index 4373dba7a62f..4bac07fb44ac 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/tablet/TabletStatusBar.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/tablet/TabletStatusBar.java
@@ -64,6 +64,7 @@ import com.android.internal.statusbar.StatusBarNotification;
import com.android.systemui.R;
import com.android.systemui.statusbar.*;
import com.android.systemui.statusbar.policy.BatteryController;
+import com.android.systemui.statusbar.policy.BluetoothController;
import com.android.systemui.statusbar.policy.NetworkController;
import com.android.systemui.recent.RecentApplicationsActivity;
@@ -133,6 +134,7 @@ public class TabletStatusBar extends StatusBar implements
HeightReceiver mHeightReceiver;
BatteryController mBatteryController;
+ BluetoothController mBluetoothController;
NetworkController mNetworkController;
View mBarContents;
@@ -170,10 +172,14 @@ public class TabletStatusBar extends StatusBar implements
mBatteryController.addIconView((ImageView)mNotificationPanel.findViewById(R.id.battery));
mBatteryController.addLabelView(
(TextView)mNotificationPanel.findViewById(R.id.battery_text));
+ mBluetoothController.addIconView(
+ (ImageView)mNotificationPanel.findViewById(R.id.bluetooth));
mNetworkController.addCombinedSignalIconView(
(ImageView)mNotificationPanel.findViewById(R.id.network_signal));
mNetworkController.addDataTypeIconView(
(ImageView)mNotificationPanel.findViewById(R.id.network_type));
+ mNetworkController.addDataDirectionOverlayIconView(
+ (ImageView)mNotificationPanel.findViewById(R.id.network_direction));
mNetworkController.addLabelView(
(TextView)mNotificationPanel.findViewById(R.id.network_text));
mNetworkController.addLabelView(
@@ -355,11 +361,15 @@ public class TabletStatusBar extends StatusBar implements
// The icons
mBatteryController = new BatteryController(mContext);
mBatteryController.addIconView((ImageView)sb.findViewById(R.id.battery));
+ mBluetoothController = new BluetoothController(mContext);
+ mBluetoothController.addIconView((ImageView)sb.findViewById(R.id.bluetooth));
mNetworkController = new NetworkController(mContext);
mNetworkController.addCombinedSignalIconView(
(ImageView)sb.findViewById(R.id.network_signal));
mNetworkController.addDataTypeIconView(
(ImageView)sb.findViewById(R.id.network_type));
+ mNetworkController.addDataDirectionOverlayIconView(
+ (ImageView)sb.findViewById(R.id.network_direction));
// The navigation buttons
mBackButton = (ImageView)sb.findViewById(R.id.back);
@@ -660,7 +670,8 @@ public class TabletStatusBar extends StatusBar implements
&& oldContentView.getLayoutId() == contentView.getLayoutId();
ViewGroup rowParent = (ViewGroup) oldEntry.row.getParent();
boolean orderUnchanged = notification.notification.when==oldNotification.notification.when
- && notification.isOngoing() == oldNotification.isOngoing();
+ && notification.priority == oldNotification.priority;
+ // priority now encompasses isOngoing()
boolean isLastAnyway = rowParent.indexOfChild(oldEntry.row) == rowParent.getChildCount()-1;
if (contentsUnchanged && (orderUnchanged || isLastAnyway)) {
if (DEBUG) Slog.d(TAG, "reusing notification for key: " + key);
@@ -1177,7 +1188,10 @@ public class TabletStatusBar extends StatusBar implements
}
// Add the icon.
- mNotns.add(entry);
+ int pos = mNotns.add(entry);
+ if (DEBUG) {
+ Slog.d(TAG, "addNotificationViews: added at " + pos);
+ }
updateNotificationIcons();
return iconView;
@@ -1264,7 +1278,7 @@ public class TabletStatusBar extends StatusBar implements
for (int i=0; i<toShow.size(); i++) {
View v = toShow.get(i);
if (v.getParent() == null) {
- mPile.addView(toShow.get(i));
+ mPile.addView(v, N-1-i); // the notification panel has newest at the bottom
}
}
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 11ad4e468a9a..4ec16c1c110f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -624,17 +624,6 @@ bool AudioFlinger::streamMute(int stream) const
return mStreamTypes[stream].mute;
}
-bool AudioFlinger::isStreamActive(int stream) const
-{
- Mutex::Autolock _l(mLock);
- for (uint32_t i = 0; i < mPlaybackThreads.size(); i++) {
- if (mPlaybackThreads.valueAt(i)->isStreamActive(stream)) {
- return true;
- }
- }
- return false;
-}
-
status_t AudioFlinger::setParameters(int ioHandle, const String8& keyValuePairs)
{
status_t result;
@@ -1291,20 +1280,6 @@ bool AudioFlinger::PlaybackThread::streamMute(int stream) const
return mStreamTypes[stream].mute;
}
-bool AudioFlinger::PlaybackThread::isStreamActive(int stream) const
-{
- Mutex::Autolock _l(mLock);
- size_t count = mActiveTracks.size();
- for (size_t i = 0 ; i < count ; ++i) {
- sp<Track> t = mActiveTracks[i].promote();
- if (t == 0) continue;
- Track* const track = t.get();
- if (t->type() == stream)
- return true;
- }
- return false;
-}
-
// addTrack_l() must be called with ThreadBase::mLock held
status_t AudioFlinger::PlaybackThread::addTrack_l(const sp<Track>& track)
{
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index f0ef86788c3a..81f2eb4db9b9 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -107,8 +107,6 @@ public:
virtual status_t setMicMute(bool state);
virtual bool getMicMute() const;
- virtual bool isStreamActive(int stream) const;
-
virtual status_t setParameters(int ioHandle, const String8& keyValuePairs);
virtual String8 getParameters(int ioHandle, const String8& keys);
@@ -579,8 +577,6 @@ private:
virtual float streamVolume(int stream) const;
virtual bool streamMute(int stream) const;
- bool isStreamActive(int stream) const;
-
sp<Track> createTrack_l(
const sp<AudioFlinger::Client>& client,
int streamType,
diff --git a/services/audioflinger/AudioPolicyManagerBase.cpp b/services/audioflinger/AudioPolicyManagerBase.cpp
index e84d13609e46..04d63e6e9aaa 100644
--- a/services/audioflinger/AudioPolicyManagerBase.cpp
+++ b/services/audioflinger/AudioPolicyManagerBase.cpp
@@ -313,8 +313,7 @@ void AudioPolicyManagerBase::setPhoneState(int state)
// Flag that ringtone volume must be limited to music volume until we exit MODE_RINGTONE
if (state == AudioSystem::MODE_RINGTONE &&
- (hwOutputDesc->mRefCount[AudioSystem::MUSIC] ||
- (systemTime() - mMusicStopTime) < seconds(SONIFICATION_HEADSET_MUSIC_DELAY))) {
+ isStreamActive(AudioSystem::MUSIC, SONIFICATION_HEADSET_MUSIC_DELAY)) {
mLimitRingtoneVolume = true;
} else {
mLimitRingtoneVolume = false;
@@ -479,6 +478,7 @@ audio_io_handle_t AudioPolicyManagerBase::getOutput(AudioSystem::stream_type str
outputDesc->mLatency = 0;
outputDesc->mFlags = (AudioSystem::output_flags)(flags | AudioSystem::OUTPUT_FLAG_DIRECT);
outputDesc->mRefCount[stream] = 0;
+ outputDesc->mStopTime[stream] = 0;
output = mpClientInterface->openOutput(&outputDesc->mDevice,
&outputDesc->mSamplingRate,
&outputDesc->mFormat,
@@ -607,10 +607,8 @@ status_t AudioPolicyManagerBase::stopOutput(audio_io_handle_t output,
if (outputDesc->mRefCount[stream] > 0) {
// decrement usage count of this stream on the output
outputDesc->changeRefCount(stream, -1);
- // store time at which the last music track was stopped - see computeVolume()
- if (stream == AudioSystem::MUSIC) {
- mMusicStopTime = systemTime();
- }
+ // store time at which the stream was stopped - see isStreamActive()
+ outputDesc->mStopTime[stream] = systemTime();
setOutputDevice(output, getNewDevice(output));
@@ -920,6 +918,19 @@ status_t AudioPolicyManagerBase::unregisterEffect(int id)
return NO_ERROR;
}
+bool AudioPolicyManagerBase::isStreamActive(int stream, uint32_t inPastMs) const
+{
+ nsecs_t sysTime = systemTime();
+ for (size_t i = 0; i < mOutputs.size(); i++) {
+ if (mOutputs.valueAt(i)->mRefCount[stream] != 0 ||
+ ns2ms(sysTime - mOutputs.valueAt(i)->mStopTime[stream]) < inPastMs) {
+ return true;
+ }
+ }
+ return false;
+}
+
+
status_t AudioPolicyManagerBase::dump(int fd)
{
const size_t SIZE = 256;
@@ -1010,7 +1021,7 @@ AudioPolicyManagerBase::AudioPolicyManagerBase(AudioPolicyClientInterface *clien
Thread(false),
#endif //AUDIO_POLICY_TEST
mPhoneState(AudioSystem::MODE_NORMAL), mRingerMode(0),
- mMusicStopTime(0), mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f),
+ mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f),
mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0),
mA2dpSuspended(false)
{
@@ -2094,6 +2105,7 @@ AudioPolicyManagerBase::AudioOutputDescriptor::AudioOutputDescriptor()
mRefCount[i] = 0;
mCurVolume[i] = -1.0;
mMuteCount[i] = 0;
+ mStopTime[i] = 0;
}
}
@@ -2144,7 +2156,6 @@ uint32_t AudioPolicyManagerBase::AudioOutputDescriptor::strategyRefCount(routing
return refCount;
}
-
status_t AudioPolicyManagerBase::AudioOutputDescriptor::dump(int fd)
{
const size_t SIZE = 256;
diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp
index 46a01adc8086..b04672d7d6fe 100644
--- a/services/audioflinger/AudioPolicyService.cpp
+++ b/services/audioflinger/AudioPolicyService.cpp
@@ -394,6 +394,15 @@ status_t AudioPolicyService::unregisterEffect(int id)
return mpPolicyManager->unregisterEffect(id);
}
+bool AudioPolicyService::isStreamActive(int stream, uint32_t inPastMs) const
+{
+ if (mpPolicyManager == NULL) {
+ return 0;
+ }
+ Mutex::Autolock _l(mLock);
+ return mpPolicyManager->isStreamActive(stream, inPastMs);
+}
+
void AudioPolicyService::binderDied(const wp<IBinder>& who) {
LOGW("binderDied() %p, tid %d, calling tid %d", who.unsafe_get(), gettid(),
IPCThreadState::self()->getCallingPid());
diff --git a/services/audioflinger/AudioPolicyService.h b/services/audioflinger/AudioPolicyService.h
index 558f455e50f1..54af1f1e0265 100644
--- a/services/audioflinger/AudioPolicyService.h
+++ b/services/audioflinger/AudioPolicyService.h
@@ -88,6 +88,7 @@ public:
int session,
int id);
virtual status_t unregisterEffect(int id);
+ virtual bool isStreamActive(int stream, uint32_t inPastMs = 0) const;
virtual status_t onTransact(
uint32_t code,
@@ -230,8 +231,8 @@ private:
status_t dumpPermissionDenial(int fd);
- Mutex mLock; // prevents concurrent access to AudioPolicy manager functions changing device
- // connection stated our routing
+ mutable Mutex mLock; // prevents concurrent access to AudioPolicy manager functions changing
+ // device connection state or routing
AudioPolicyInterface* mpPolicyManager; // the platform specific policy manager
sp <AudioCommandThread> mAudioCommandThread; // audio commands thread
sp <AudioCommandThread> mTonePlaybackThread; // tone playback thread
diff --git a/services/java/com/android/server/NetworkManagementService.java b/services/java/com/android/server/NetworkManagementService.java
index 152605f0a4ae..ef3e89a929b2 100644
--- a/services/java/com/android/server/NetworkManagementService.java
+++ b/services/java/com/android/server/NetworkManagementService.java
@@ -26,6 +26,8 @@ import android.content.pm.PackageManager;
import android.net.Uri;
import android.net.InterfaceConfiguration;
import android.net.INetworkManagementEventObserver;
+import android.net.LinkAddress;
+import android.net.NetworkUtils;
import android.net.wifi.WifiConfiguration;
import android.net.wifi.WifiConfiguration.KeyMgmt;
import android.os.INetworkManagementService;
@@ -246,7 +248,7 @@ class NetworkManagementService extends INetworkManagementService.Stub {
}
Slog.d(TAG, String.format("rsp <%s>", rsp));
- // Rsp: 213 xx:xx:xx:xx:xx:xx yyy.yyy.yyy.yyy zzz.zzz.zzz.zzz [flag1 flag2 flag3]
+ // Rsp: 213 xx:xx:xx:xx:xx:xx yyy.yyy.yyy.yyy zzz [flag1 flag2 flag3]
StringTokenizer st = new StringTokenizer(rsp);
InterfaceConfiguration cfg;
@@ -265,18 +267,21 @@ class NetworkManagementService extends INetworkManagementService.Stub {
cfg = new InterfaceConfiguration();
cfg.hwAddr = st.nextToken(" ");
+ InetAddress addr = null;
+ int prefixLength = 0;
try {
- cfg.addr = InetAddress.getByName(st.nextToken(" "));
+ addr = InetAddress.getByName(st.nextToken(" "));
} catch (UnknownHostException uhe) {
Slog.e(TAG, "Failed to parse ipaddr", uhe);
}
try {
- cfg.mask = InetAddress.getByName(st.nextToken(" "));
- } catch (UnknownHostException uhe) {
- Slog.e(TAG, "Failed to parse netmask", uhe);
+ prefixLength = Integer.parseInt(st.nextToken(" "));
+ } catch (NumberFormatException nfe) {
+ Slog.e(TAG, "Failed to parse prefixLength", nfe);
}
+ cfg.addr = new LinkAddress(addr, prefixLength);
cfg.interfaceFlags = st.nextToken("]").trim() +"]";
} catch (NoSuchElementException nsee) {
throw new IllegalStateException(
@@ -288,8 +293,13 @@ class NetworkManagementService extends INetworkManagementService.Stub {
public void setInterfaceConfig(
String iface, InterfaceConfiguration cfg) throws IllegalStateException {
- String cmd = String.format("interface setcfg %s %s %s %s", iface,
- cfg.addr.getHostAddress(), cfg.mask.getHostAddress(),
+ LinkAddress linkAddr = cfg.addr;
+ if (linkAddr == null || linkAddr.getAddress() == null) {
+ throw new IllegalStateException("Null LinkAddress given");
+ }
+ String cmd = String.format("interface setcfg %s %s %d %s", iface,
+ linkAddr.getAddress().getHostAddress(),
+ linkAddr.getNetworkPrefixLength(),
cfg.interfaceFlags);
try {
mConnector.doCommand(cmd);
diff --git a/services/java/com/android/server/ScreenRotationAnimation.java b/services/java/com/android/server/ScreenRotationAnimation.java
index 90318f19044e..ef00b08ff1a4 100644
--- a/services/java/com/android/server/ScreenRotationAnimation.java
+++ b/services/java/com/android/server/ScreenRotationAnimation.java
@@ -39,10 +39,48 @@ class ScreenRotationAnimation {
static final String TAG = "ScreenRotationAnimation";
static final boolean DEBUG = false;
+ static final int FREEZE_LAYER = WindowManagerService.TYPE_LAYER_MULTIPLIER * 200;
+
+ class BlackSurface {
+ final int left;
+ final int top;
+ final Surface surface;
+
+ BlackSurface(SurfaceSession session, int layer, int l, int t, int w, int h)
+ throws Surface.OutOfResourcesException {
+ left = l;
+ top = t;
+ surface = new Surface(session, 0, "BlackSurface",
+ -1, w, h, PixelFormat.OPAQUE, Surface.FX_SURFACE_DIM);
+ surface.setAlpha(1.0f);
+ surface.setLayer(FREEZE_LAYER);
+ }
+
+ void setMatrix(Matrix matrix) {
+ mTmpMatrix.setTranslate(left, top);
+ mTmpMatrix.postConcat(matrix);
+ mTmpMatrix.getValues(mTmpFloats);
+ surface.setPosition((int)mTmpFloats[Matrix.MTRANS_X],
+ (int)mTmpFloats[Matrix.MTRANS_Y]);
+ surface.setMatrix(
+ mTmpFloats[Matrix.MSCALE_X], mTmpFloats[Matrix.MSKEW_Y],
+ mTmpFloats[Matrix.MSKEW_X], mTmpFloats[Matrix.MSCALE_Y]);
+ if (false) {
+ Slog.i(TAG, "Black Surface @ (" + left + "," + top + "): ("
+ + mTmpFloats[Matrix.MTRANS_X] + ","
+ + mTmpFloats[Matrix.MTRANS_Y] + ") matrix=["
+ + mTmpFloats[Matrix.MSCALE_X] + ","
+ + mTmpFloats[Matrix.MSCALE_Y] + "]["
+ + mTmpFloats[Matrix.MSKEW_X] + ","
+ + mTmpFloats[Matrix.MSKEW_Y] + "]");
+ }
+ }
+ }
+
final Context mContext;
final Display mDisplay;
Surface mSurface;
- Surface mBlackSurface;
+ BlackSurface[] mBlackSurfaces;
int mWidth, mHeight;
int mSnapshotRotation;
@@ -60,6 +98,7 @@ class ScreenRotationAnimation {
final DisplayMetrics mDisplayMetrics = new DisplayMetrics();
final Matrix mSnapshotInitialMatrix = new Matrix();
final Matrix mSnapshotFinalMatrix = new Matrix();
+ final Matrix mTmpMatrix = new Matrix();
final float[] mTmpFloats = new float[9];
public ScreenRotationAnimation(Context context, Display display, SurfaceSession session,
@@ -97,23 +136,11 @@ class ScreenRotationAnimation {
try {
mSurface = new Surface(session, 0, "FreezeSurface",
-1, mWidth, mHeight, PixelFormat.OPAQUE, 0);
- mSurface.setLayer(WindowManagerService.TYPE_LAYER_MULTIPLIER * 200);
+ mSurface.setLayer(FREEZE_LAYER + 1);
} catch (Surface.OutOfResourcesException e) {
Slog.w(TAG, "Unable to allocate freeze surface", e);
}
- if (true) {
- try {
- int size = mOriginalWidth > mOriginalHeight ? mOriginalWidth : mOriginalHeight;
- mBlackSurface = new Surface(session, 0, "BlackSurface",
- -1, size, size, PixelFormat.OPAQUE, Surface.FX_SURFACE_DIM);
- mBlackSurface.setAlpha(1.0f);
- mBlackSurface.setLayer(0);
- } catch (Surface.OutOfResourcesException e) {
- Slog.w(TAG, "Unable to allocate black surface", e);
- }
- }
-
setRotation(display.getRotation());
if (mSurface != null) {
@@ -138,7 +165,7 @@ class ScreenRotationAnimation {
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC));
c.drawBitmap(screenshot, 0, 0, paint);
} else {
- c.drawColor(Color.GREEN, PorterDuff.Mode.SRC);
+ c.drawColor(Color.BLACK, PorterDuff.Mode.SRC);
}
mSurface.unlockCanvasAndPost(c);
@@ -221,7 +248,8 @@ class ScreenRotationAnimation {
/**
* Returns true if animating.
*/
- public boolean dismiss(long maxAnimationDuration, float animationScale) {
+ public boolean dismiss(SurfaceSession session, long maxAnimationDuration,
+ float animationScale) {
// Figure out how the screen has moved from the original rotation.
int delta = deltaRotation(mCurRotation, mOriginalRotation);
if (false && delta == 0) {
@@ -277,6 +305,26 @@ class ScreenRotationAnimation {
mEnterAnimation.restrictDuration(maxAnimationDuration);
mEnterAnimation.scaleCurrentDuration(animationScale);
+ if (WindowManagerService.SHOW_TRANSACTIONS) Slog.i(WindowManagerService.TAG,
+ ">>> OPEN TRANSACTION ScreenRotationAnimation.dismiss");
+ Surface.openTransaction();
+
+ mBlackSurfaces = new BlackSurface[4];
+ try {
+ final int w = mDisplayMetrics.widthPixels;
+ final int h = mDisplayMetrics.heightPixels;
+ mBlackSurfaces[0] = new BlackSurface(session, FREEZE_LAYER, -w, -h, w, h*2);
+ mBlackSurfaces[1] = new BlackSurface(session, FREEZE_LAYER, 0, -h, w*2, h);
+ mBlackSurfaces[2] = new BlackSurface(session, FREEZE_LAYER, w, 0, w, h*2);
+ mBlackSurfaces[3] = new BlackSurface(session, FREEZE_LAYER, -w, h, w*2, h);
+ } catch (Surface.OutOfResourcesException e) {
+ Slog.w(TAG, "Unable to allocate black surface", e);
+ } finally {
+ Surface.closeTransaction();
+ if (WindowManagerService.SHOW_TRANSACTIONS) Slog.i(WindowManagerService.TAG,
+ "<<< CLOSE TRANSACTION ScreenRotationAnimation.dismiss");
+ }
+
return true;
}
@@ -285,9 +333,13 @@ class ScreenRotationAnimation {
mSurface.destroy();
mSurface = null;
}
- if (mBlackSurface != null) {
- mBlackSurface.destroy();
- mBlackSurface = null;
+ if (mBlackSurfaces != null) {
+ for (int i=0; i<mBlackSurfaces.length; i++) {
+ if (mBlackSurfaces[i] != null) {
+ mBlackSurfaces[i].surface.destroy();
+ }
+ }
+ mBlackSurfaces = null;
}
if (mExitAnimation != null) {
mExitAnimation.cancel();
@@ -325,12 +377,7 @@ class ScreenRotationAnimation {
mExitAnimation = null;
mExitTransformation.clear();
if (mSurface != null) {
- mSurface.destroy();
- mSurface = null;
- }
- if (mBlackSurface != null) {
- mBlackSurface.destroy();
- mBlackSurface = null;
+ mSurface.hide();
}
}
}
@@ -343,6 +390,21 @@ class ScreenRotationAnimation {
mEnterAnimation.cancel();
mEnterAnimation = null;
mEnterTransformation.clear();
+ if (mBlackSurfaces != null) {
+ for (int i=0; i<mBlackSurfaces.length; i++) {
+ if (mBlackSurfaces[i] != null) {
+ mBlackSurfaces[i].surface.hide();
+ }
+ }
+ }
+ } else {
+ if (mBlackSurfaces != null) {
+ for (int i=0; i<mBlackSurfaces.length; i++) {
+ if (mBlackSurfaces[i] != null) {
+ mBlackSurfaces[i].setMatrix(mEnterTransformation.getMatrix());
+ }
+ }
+ }
}
}
diff --git a/services/java/com/android/server/WindowManagerService.java b/services/java/com/android/server/WindowManagerService.java
index 169c6d87d063..31866bca3270 100644
--- a/services/java/com/android/server/WindowManagerService.java
+++ b/services/java/com/android/server/WindowManagerService.java
@@ -7841,7 +7841,7 @@ public class WindowManagerService extends IWindowManager.Stub
if (selfTransformation) {
tmpMatrix.postConcat(mTransformation.getMatrix());
}
- tmpMatrix.postTranslate(frame.left, frame.top);
+ tmpMatrix.postTranslate(frame.left + mXOffset, frame.top + mYOffset);
if (attachedTransformation != null) {
tmpMatrix.postConcat(attachedTransformation.getMatrix());
}
@@ -7865,8 +7865,8 @@ public class WindowManagerService extends IWindowManager.Stub
mDtDx = tmpFloats[Matrix.MSKEW_Y];
mDsDy = tmpFloats[Matrix.MSKEW_X];
mDtDy = tmpFloats[Matrix.MSCALE_Y];
- int x = (int)tmpFloats[Matrix.MTRANS_X] + mXOffset;
- int y = (int)tmpFloats[Matrix.MTRANS_Y] + mYOffset;
+ int x = (int)tmpFloats[Matrix.MTRANS_X];
+ int y = (int)tmpFloats[Matrix.MTRANS_Y];
int w = frame.width();
int h = frame.height();
mShownFrame.set(x, y, x+w, y+h);
@@ -9782,6 +9782,7 @@ public class WindowManagerService extends IWindowManager.Stub
boolean animating = false;
boolean createWatermark = false;
boolean updateRotation = false;
+ boolean screenRotationFinished = false;
if (mFxSession == null) {
mFxSession = new SurfaceSession();
@@ -9878,7 +9879,7 @@ public class WindowManagerService extends IWindowManager.Stub
if (mScreenRotationAnimation.stepAnimation(currentTime)) {
animating = true;
} else {
- mScreenRotationAnimation = null;
+ screenRotationFinished = true;
updateRotation = true;
}
}
@@ -11042,6 +11043,11 @@ public class WindowManagerService extends IWindowManager.Stub
mTurnOnScreen = false;
}
+ if (screenRotationFinished && mScreenRotationAnimation != null) {
+ mScreenRotationAnimation.kill();
+ mScreenRotationAnimation = null;
+ }
+
if (updateRotation) {
if (DEBUG_ORIENTATION) Slog.d(TAG, "Performing post-rotate rotation");
boolean changed = setRotationUncheckedLocked(
@@ -11381,7 +11387,7 @@ public class WindowManagerService extends IWindowManager.Stub
if (CUSTOM_SCREEN_ROTATION) {
if (mScreenRotationAnimation != null) {
- if (mScreenRotationAnimation.dismiss(MAX_ANIMATION_DURATION,
+ if (mScreenRotationAnimation.dismiss(mFxSession, MAX_ANIMATION_DURATION,
mTransitionAnimationScale)) {
requestAnimationLocked(0);
} else {
diff --git a/services/java/com/android/server/connectivity/Tethering.java b/services/java/com/android/server/connectivity/Tethering.java
index 4bc3b06bd0c0..ff5f989978dc 100644
--- a/services/java/com/android/server/connectivity/Tethering.java
+++ b/services/java/com/android/server/connectivity/Tethering.java
@@ -31,6 +31,7 @@ import android.net.ConnectivityManager;
import android.net.InterfaceConfiguration;
import android.net.IConnectivityManager;
import android.net.INetworkManagementEventObserver;
+import android.net.LinkAddress;
import android.net.LinkProperties;
import android.net.NetworkInfo;
import android.os.Binder;
@@ -566,8 +567,9 @@ public class Tethering extends INetworkManagementEventObserver.Stub {
try {
ifcg = service.getInterfaceConfig(iface);
if (ifcg != null) {
- ifcg.addr = InetAddress.getByName(USB_NEAR_IFACE_ADDR);
- ifcg.mask = InetAddress.getByName(USB_NETMASK);
+ InetAddress addr = InetAddress.getByName(USB_NEAR_IFACE_ADDR);
+ InetAddress mask = InetAddress.getByName(USB_NETMASK);
+ ifcg.addr = new LinkAddress(addr, mask);
if (enabled) {
ifcg.interfaceFlags = ifcg.interfaceFlags.replace("down", "up");
} else {
diff --git a/tests/HwAccelerationTest/AndroidManifest.xml b/tests/HwAccelerationTest/AndroidManifest.xml
index fc50334da73b..353580910c4d 100644
--- a/tests/HwAccelerationTest/AndroidManifest.xml
+++ b/tests/HwAccelerationTest/AndroidManifest.xml
@@ -96,7 +96,16 @@
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
-
+
+ <activity
+ android:name="ViewLayersActivity6"
+ android:label="_ViewLayers6">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+
<activity
android:name="AlphaLayersActivity"
android:label="_αLayers">
diff --git a/tests/HwAccelerationTest/res/layout/view_layers_6.xml b/tests/HwAccelerationTest/res/layout/view_layers_6.xml
new file mode 100644
index 000000000000..36cf8c9b80e3
--- /dev/null
+++ b/tests/HwAccelerationTest/res/layout/view_layers_6.xml
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent">
+
+ <LinearLayout
+ android:orientation="vertical"
+ android:layout_width="0dip"
+ android:layout_height="match_parent"
+ android:layout_weight="1">
+
+ <Button
+ android:onClick="enableLayer"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="Enable layer" />
+
+ <Button
+ android:onClick="disableLayer"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="Disable layer" />
+
+ <Button
+ android:onClick="shrinkLayer"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="Shrink layer" />
+
+ <Button
+ android:onClick="growLayer"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="Grow layer" />
+
+ </LinearLayout>
+
+ <ListView
+ android:id="@+id/list1"
+ android:layout_width="0dip"
+ android:layout_height="match_parent"
+ android:layout_weight="1" />
+
+</LinearLayout>
diff --git a/tests/HwAccelerationTest/src/com/android/test/hwui/ViewLayersActivity6.java b/tests/HwAccelerationTest/src/com/android/test/hwui/ViewLayersActivity6.java
new file mode 100644
index 000000000000..2edfec70e1f7
--- /dev/null
+++ b/tests/HwAccelerationTest/src/com/android/test/hwui/ViewLayersActivity6.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.test.hwui;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.res.Resources;
+import android.graphics.Paint;
+import android.graphics.PorterDuff;
+import android.graphics.PorterDuffColorFilter;
+import android.os.Bundle;
+import android.util.DisplayMetrics;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.ArrayAdapter;
+import android.widget.ListView;
+import android.widget.TextView;
+
+@SuppressWarnings({"UnusedDeclaration"})
+public class ViewLayersActivity6 extends Activity {
+ private final Paint mPaint = new Paint();
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ setContentView(R.layout.view_layers_6);
+
+ mPaint.setColorFilter(new PorterDuffColorFilter(0xff00ff00, PorterDuff.Mode.MULTIPLY));
+
+ setupList(R.id.list1);
+ }
+
+ public void enableLayer(View v) {
+ findViewById(R.id.list1).setLayerType(View.LAYER_TYPE_HARDWARE, mPaint);
+ }
+
+ public void disableLayer(View v) {
+ findViewById(R.id.list1).setLayerType(View.LAYER_TYPE_NONE, null);
+ }
+
+ public void growLayer(View v) {
+ findViewById(R.id.list1).getLayoutParams().height = ViewGroup.LayoutParams.MATCH_PARENT;
+ findViewById(R.id.list1).requestLayout();
+ }
+
+ public void shrinkLayer(View v) {
+ findViewById(R.id.list1).getLayoutParams().height = 300;
+ findViewById(R.id.list1).requestLayout();
+ }
+
+ private void setupList(int listId) {
+ final ListView list = (ListView) findViewById(listId);
+ list.setAdapter(new SimpleListAdapter(this));
+ }
+
+ private static class SimpleListAdapter extends ArrayAdapter<String> {
+ public SimpleListAdapter(Context context) {
+ super(context, android.R.layout.simple_list_item_1, DATA_LIST);
+ }
+
+ @Override
+ public View getView(int position, View convertView, ViewGroup parent) {
+ TextView v = (TextView) super.getView(position, convertView, parent);
+ final Resources r = getContext().getResources();
+ final DisplayMetrics metrics = r.getDisplayMetrics();
+ v.setCompoundDrawablePadding((int) (6 * metrics.density + 0.5f));
+ v.setCompoundDrawablesWithIntrinsicBounds(r.getDrawable(R.drawable.icon),
+ null, null, null);
+ return v;
+ }
+ }
+
+ private static final String[] DATA_LIST = {
+ "Afghanistan", "Albania", "Algeria", "American Samoa", "Andorra",
+ "Angola", "Anguilla", "Antarctica", "Antigua and Barbuda", "Argentina",
+ "Armenia", "Aruba", "Australia", "Austria", "Azerbaijan",
+ "Bahrain", "Bangladesh", "Barbados", "Belarus", "Belgium",
+ "Belize", "Benin", "Bermuda", "Bhutan", "Bolivia",
+ "Bosnia and Herzegovina", "Botswana", "Bouvet Island", "Brazil",
+ "British Indian Ocean Territory", "British Virgin Islands", "Brunei", "Bulgaria",
+ "Burkina Faso", "Burundi", "Cote d'Ivoire", "Cambodia", "Cameroon", "Canada", "Cape Verde",
+ "Cayman Islands", "Central African Republic", "Chad", "Chile", "China",
+ "Christmas Island", "Cocos (Keeling) Islands", "Colombia", "Comoros", "Congo",
+ "Cook Islands", "Costa Rica", "Croatia", "Cuba", "Cyprus", "Czech Republic",
+ "Democratic Republic of the Congo", "Denmark", "Djibouti", "Dominica", "Dominican Republic",
+ "East Timor", "Ecuador", "Egypt", "El Salvador", "Equatorial Guinea", "Eritrea",
+ "Estonia", "Ethiopia", "Faeroe Islands", "Falkland Islands", "Fiji", "Finland",
+ "Former Yugoslav Republic of Macedonia", "France", "French Guiana", "French Polynesia",
+ "French Southern Territories", "Gabon", "Georgia", "Germany", "Ghana", "Gibraltar",
+ "Greece", "Greenland", "Grenada", "Guadeloupe", "Guam", "Guatemala", "Guinea", "Guinea-Bissau",
+ "Guyana", "Haiti", "Heard Island and McDonald Islands", "Honduras", "Hong Kong", "Hungary",
+ "Iceland", "India", "Indonesia", "Iran", "Iraq", "Ireland", "Israel", "Italy", "Jamaica",
+ "Japan", "Jordan", "Kazakhstan", "Kenya", "Kiribati", "Kuwait", "Kyrgyzstan", "Laos",
+ "Latvia", "Lebanon", "Lesotho", "Liberia", "Libya", "Liechtenstein", "Lithuania", "Luxembourg",
+ "Macau", "Madagascar", "Malawi", "Malaysia", "Maldives", "Mali", "Malta", "Marshall Islands",
+ "Martinique", "Mauritania", "Mauritius", "Mayotte", "Mexico", "Micronesia", "Moldova",
+ "Monaco", "Mongolia", "Montserrat", "Morocco", "Mozambique", "Myanmar", "Namibia",
+ "Nauru", "Nepal", "Netherlands", "Netherlands Antilles", "New Caledonia", "New Zealand",
+ "Nicaragua", "Niger", "Nigeria", "Niue", "Norfolk Island", "North Korea", "Northern Marianas",
+ "Norway", "Oman", "Pakistan", "Palau", "Panama", "Papua New Guinea", "Paraguay", "Peru",
+ "Philippines", "Pitcairn Islands", "Poland", "Portugal", "Puerto Rico", "Qatar",
+ "Reunion", "Romania", "Russia", "Rwanda", "Sqo Tome and Principe", "Saint Helena",
+ "Saint Kitts and Nevis", "Saint Lucia", "Saint Pierre and Miquelon",
+ "Saint Vincent and the Grenadines", "Samoa", "San Marino", "Saudi Arabia", "Senegal",
+ "Seychelles", "Sierra Leone", "Singapore", "Slovakia", "Slovenia", "Solomon Islands",
+ "Somalia", "South Africa", "South Georgia and the South Sandwich Islands", "South Korea",
+ "Spain", "Sri Lanka", "Sudan", "Suriname", "Svalbard and Jan Mayen", "Swaziland", "Sweden",
+ "Switzerland", "Syria", "Taiwan", "Tajikistan", "Tanzania", "Thailand", "The Bahamas",
+ "The Gambia", "Togo", "Tokelau", "Tonga", "Trinidad and Tobago", "Tunisia", "Turkey",
+ "Turkmenistan", "Turks and Caicos Islands", "Tuvalu", "Virgin Islands", "Uganda",
+ "Ukraine", "United Arab Emirates", "United Kingdom",
+ "United States", "United States Minor Outlying Islands", "Uruguay", "Uzbekistan",
+ "Vanuatu", "Vatican City", "Venezuela", "Vietnam", "Wallis and Futuna", "Western Sahara",
+ "Yemen", "Yugoslavia", "Zambia", "Zimbabwe"
+ };
+}
diff --git a/wifi/java/android/net/wifi/WifiStateMachine.java b/wifi/java/android/net/wifi/WifiStateMachine.java
index f0d26d11325f..b4dcf41d514e 100644
--- a/wifi/java/android/net/wifi/WifiStateMachine.java
+++ b/wifi/java/android/net/wifi/WifiStateMachine.java
@@ -1013,8 +1013,7 @@ public class WifiStateMachine extends HierarchicalStateMachine {
ifcg = service.getInterfaceConfig(intf);
if (ifcg != null) {
/* IP/netmask: 192.168.43.1/255.255.255.0 */
- ifcg.addr = InetAddress.getByName("192.168.43.1");
- ifcg.mask = InetAddress.getByName("255.255.255.0");
+ ifcg.addr = new LinkAddress(InetAddress.getByName("192.168.43.1"), 24);
ifcg.interfaceFlags = "[up]";
service.setInterfaceConfig(intf, ifcg);
@@ -2529,9 +2528,7 @@ public class WifiStateMachine extends HierarchicalStateMachine {
IBinder b = ServiceManager.getService(Context.NETWORKMANAGEMENT_SERVICE);
INetworkManagementService netd = INetworkManagementService.Stub.asInterface(b);
InterfaceConfiguration ifcg = new InterfaceConfiguration();
- ifcg.addr = NetworkUtils.numericToInetAddress(dhcpInfoInternal.ipAddress);
- ifcg.mask = NetworkUtils.intToInetAddress(
- NetworkUtils.prefixLengthToNetmaskInt(dhcpInfoInternal.prefixLength));
+ ifcg.addr = dhcpInfoInternal.makeLinkAddress();
ifcg.interfaceFlags = "[up]";
try {
netd.setInterfaceConfig(mInterfaceName, ifcg);