summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--camera/libcameraservice/CameraService.cpp1
-rw-r--r--cmds/stagefright/Android.mk2
-rw-r--r--cmds/stagefright/stagefright.cpp37
-rw-r--r--core/java/android/app/NativeActivity.java25
-rw-r--r--core/java/com/android/internal/os/PowerProfile.java18
-rw-r--r--core/jni/android_app_NativeActivity.cpp82
-rw-r--r--core/jni/android_view_Surface.cpp3
-rw-r--r--core/jni/android_view_Surface.h31
-rw-r--r--core/jni/com_google_android_gles_jni_EGLImpl.cpp11
-rw-r--r--core/res/res/xml/power_profile.xml3
-rw-r--r--docs/html/resources/dashboard/platform-versions.jd78
-rw-r--r--docs/html/resources/dashboard/screens.jd10
-rw-r--r--include/media/stagefright/MediaWriter.h5
-rw-r--r--libs/surfaceflinger/TextureManager.cpp35
-rw-r--r--libs/ui/PixelFormat.cpp7
-rw-r--r--media/java/android/media/AudioService.java15
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.cpp330
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.h8
-rw-r--r--media/libstagefright/MPEG4Writer.cpp41
-rw-r--r--native/android/Android.mk3
-rw-r--r--native/android/native_window.cpp47
-rw-r--r--native/include/android/native_activity.h29
-rw-r--r--native/include/android/native_window.h42
-rw-r--r--packages/SystemUI/src/com/android/systemui/statusbar/StatusBarService.java2
24 files changed, 607 insertions, 258 deletions
diff --git a/camera/libcameraservice/CameraService.cpp b/camera/libcameraservice/CameraService.cpp
index 75948a5d532c..10668a496fb5 100644
--- a/camera/libcameraservice/CameraService.cpp
+++ b/camera/libcameraservice/CameraService.cpp
@@ -1031,6 +1031,7 @@ void CameraService::Client::handleShutter(image_rect_type *size) {
mHardware->getRawHeap());
mSurface->registerBuffers(buffers);
+ IPCThreadState::self()->flushCommands();
}
mLock.unlock();
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 33696f4eae5c..9a972841460a 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -7,7 +7,7 @@ LOCAL_SRC_FILES:= \
SineSource.cpp
LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia libutils libbinder
+ libstagefright libmedia libutils libbinder libstagefright_foundation
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 877b90878d68..b7a3f99fd833 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -38,6 +38,9 @@
#include <media/stagefright/OMXCodec.h>
#include <media/mediametadataretriever.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MPEG4Writer.h>
+
using namespace android;
static long gNumRepetitions;
@@ -45,6 +48,8 @@ static long gMaxNumFrames; // 0 means decode all available.
static long gReproduceBug; // if not -1.
static bool gPreferSoftwareCodec;
static bool gPlaybackAudio;
+static bool gWriteMP4;
+static String8 gWriteMP4Filename;
static int64_t getNowUs() {
struct timeval tv;
@@ -258,6 +263,21 @@ static void playSource(OMXClient *client, const sp<MediaSource> &source) {
}
}
+static void writeSourceToMP4(const sp<MediaSource> &source) {
+ sp<MPEG4Writer> writer =
+ new MPEG4Writer(gWriteMP4Filename.string());
+
+ CHECK_EQ(writer->addSource(source), OK);
+
+ sp<MetaData> params = new MetaData;
+ CHECK_EQ(writer->start(), OK);
+
+ while (!writer->reachedEOS()) {
+ usleep(100000);
+ }
+ writer->stop();
+}
+
static void usage(const char *me) {
fprintf(stderr, "usage: %s\n", me);
fprintf(stderr, " -h(elp)\n");
@@ -270,6 +290,7 @@ static void usage(const char *me) {
fprintf(stderr, " -t(humbnail) extract video thumbnail or album art\n");
fprintf(stderr, " -s(oftware) prefer software codec\n");
fprintf(stderr, " -o playback audio\n");
+ fprintf(stderr, " -w(rite) filename (write to .mp4 file)\n");
}
int main(int argc, char **argv) {
@@ -284,9 +305,10 @@ int main(int argc, char **argv) {
gReproduceBug = -1;
gPreferSoftwareCodec = false;
gPlaybackAudio = false;
+ gWriteMP4 = false;
int res;
- while ((res = getopt(argc, argv, "han:lm:b:ptso")) >= 0) {
+ while ((res = getopt(argc, argv, "han:lm:b:ptsow:")) >= 0) {
switch (res) {
case 'a':
{
@@ -322,6 +344,13 @@ int main(int argc, char **argv) {
break;
}
+ case 'w':
+ {
+ gWriteMP4 = true;
+ gWriteMP4Filename.setTo(optarg);
+ break;
+ }
+
case 'p':
{
dumpProfiles = true;
@@ -554,7 +583,11 @@ int main(int argc, char **argv) {
mediaSource = extractor->getTrack(i);
}
- playSource(&client, mediaSource);
+ if (gWriteMP4) {
+ writeSourceToMP4(mediaSource);
+ } else {
+ playSource(&client, mediaSource);
+ }
}
client.disconnect();
diff --git a/core/java/android/app/NativeActivity.java b/core/java/android/app/NativeActivity.java
index d43368b1b1e2..161161c7d8fe 100644
--- a/core/java/android/app/NativeActivity.java
+++ b/core/java/android/app/NativeActivity.java
@@ -5,12 +5,14 @@ import dalvik.system.PathClassLoader;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
+import android.graphics.PixelFormat;
import android.os.Bundle;
import android.os.Looper;
import android.os.MessageQueue;
import android.view.InputChannel;
import android.view.InputQueue;
import android.view.KeyEvent;
+import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.View;
@@ -41,10 +43,10 @@ public class NativeActivity extends Activity implements SurfaceHolder.Callback,
private native void onStopNative(int handle);
private native void onLowMemoryNative(int handle);
private native void onWindowFocusChangedNative(int handle, boolean focused);
- private native void onSurfaceCreatedNative(int handle, SurfaceHolder holder);
- private native void onSurfaceChangedNative(int handle, SurfaceHolder holder,
+ private native void onSurfaceCreatedNative(int handle, Surface surface);
+ private native void onSurfaceChangedNative(int handle, Surface surface,
int format, int width, int height);
- private native void onSurfaceDestroyedNative(int handle, SurfaceHolder holder);
+ private native void onSurfaceDestroyedNative(int handle);
private native void onInputChannelCreatedNative(int handle, InputChannel channel);
private native void onInputChannelDestroyedNative(int handle, InputChannel channel);
@@ -55,6 +57,7 @@ public class NativeActivity extends Activity implements SurfaceHolder.Callback,
getWindow().takeSurface(this);
getWindow().takeInputQueue(this);
+ getWindow().setFormat(PixelFormat.RGB_565);
try {
ai = getPackageManager().getActivityInfo(
@@ -98,7 +101,7 @@ public class NativeActivity extends Activity implements SurfaceHolder.Callback,
protected void onDestroy() {
mDestroyed = true;
if (mCurSurfaceHolder != null) {
- onSurfaceDestroyedNative(mNativeHandle, mCurSurfaceHolder);
+ onSurfaceDestroyedNative(mNativeHandle);
mCurSurfaceHolder = null;
}
if (mCurInputQueue != null) {
@@ -158,21 +161,21 @@ public class NativeActivity extends Activity implements SurfaceHolder.Callback,
public void surfaceCreated(SurfaceHolder holder) {
if (!mDestroyed) {
mCurSurfaceHolder = holder;
- onSurfaceCreatedNative(mNativeHandle, holder);
+ onSurfaceCreatedNative(mNativeHandle, holder.getSurface());
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (!mDestroyed) {
mCurSurfaceHolder = holder;
- onSurfaceChangedNative(mNativeHandle, holder, format, width, height);
+ onSurfaceChangedNative(mNativeHandle, holder.getSurface(), format, width, height);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
mCurSurfaceHolder = null;
if (!mDestroyed) {
- onSurfaceDestroyedNative(mNativeHandle, holder);
+ onSurfaceDestroyedNative(mNativeHandle);
}
}
@@ -196,4 +199,12 @@ public class NativeActivity extends Activity implements SurfaceHolder.Callback,
decor.dispatchKeyEvent(event);
}
}
+
+ void setWindowFlags(int flags, int mask) {
+ getWindow().setFlags(flags, mask);
+ }
+
+ void setWindowFormat(int format) {
+ getWindow().setFormat(format);
+ }
}
diff --git a/core/java/com/android/internal/os/PowerProfile.java b/core/java/com/android/internal/os/PowerProfile.java
index 2369d25bd472..127ed68dff98 100644
--- a/core/java/com/android/internal/os/PowerProfile.java
+++ b/core/java/com/android/internal/os/PowerProfile.java
@@ -126,6 +126,11 @@ public class PowerProfile {
public static final String POWER_CPU_SPEEDS = "cpu.speeds";
+ /**
+ * Battery capacity in milliAmpHour (mAh).
+ */
+ public static final String POWER_BATTERY_CAPACITY = "battery.capacity";
+
static final HashMap<String, Object> sPowerMap = new HashMap<String, Object>();
private static final String TAG_DEVICE = "device";
@@ -243,6 +248,19 @@ public class PowerProfile {
}
}
+ /**
+ * Returns the battery capacity, if available, in milli Amp Hours. If not available,
+ * it returns zero.
+ * @return the battery capacity in mAh
+ */
+ public double getBatteryCapacity() {
+ return getAveragePower(POWER_BATTERY_CAPACITY);
+ }
+
+ /**
+ * Returns the number of speeds that the CPU can be run at.
+ * @return
+ */
public int getNumSpeedSteps() {
Object value = sPowerMap.get(POWER_CPU_SPEEDS);
if (value != null && value instanceof Double[]) {
diff --git a/core/jni/android_app_NativeActivity.cpp b/core/jni/android_app_NativeActivity.cpp
index dd59d6327663..dab1dba811cb 100644
--- a/core/jni/android_app_NativeActivity.cpp
+++ b/core/jni/android_app_NativeActivity.cpp
@@ -22,6 +22,8 @@
#include <android_runtime/AndroidRuntime.h>
#include <android/native_activity.h>
+#include <surfaceflinger/Surface.h>
+#include <ui/egl/android_natives.h>
#include <ui/InputTransport.h>
#include <utils/PollLoop.h>
@@ -29,6 +31,7 @@
#include "android_os_MessageQueue.h"
#include "android_view_InputChannel.h"
#include "android_view_KeyEvent.h"
+#include "android_view_Surface.h"
namespace android
{
@@ -37,8 +40,16 @@ static struct {
jclass clazz;
jmethodID dispatchUnhandledKeyEvent;
+ jmethodID setWindowFlags;
+ jmethodID setWindowFormat;
} gNativeActivityClassInfo;
+// ------------------------------------------------------------------------
+
+/*
+ * Specialized input queue that allows unhandled key events to be dispatched
+ * back to the native activity's Java framework code.
+ */
struct MyInputQueue : AInputQueue {
explicit MyInputQueue(const android::sp<android::InputChannel>& channel, int workWrite)
: AInputQueue(channel), mWorkWrite(workWrite) {
@@ -74,13 +85,18 @@ struct MyInputQueue : AInputQueue {
Vector<KeyEvent*> mPendingKeys;
};
+// ------------------------------------------------------------------------
+
+/*
+ * Native state for interacting with the NativeActivity class.
+ */
struct NativeCode {
NativeCode(void* _dlhandle, ANativeActivity_createFunc* _createFunc) {
memset(&activity, sizeof(activity), 0);
memset(&callbacks, sizeof(callbacks), 0);
dlhandle = _dlhandle;
createActivityFunc = _createFunc;
- surface = NULL;
+ nativeWindow = NULL;
inputChannel = NULL;
nativeInputQueue = NULL;
mainWorkRead = mainWorkWrite = -1;
@@ -104,18 +120,18 @@ struct NativeCode {
if (mainWorkRead >= 0) close(mainWorkRead);
if (mainWorkWrite >= 0) close(mainWorkWrite);
if (dlhandle != NULL) {
- dlclose(dlhandle);
+ // for now don't unload... we probably should clean this
+ // up and only keep one open dlhandle per proc, since there
+ // is really no benefit to unloading the code.
+ //dlclose(dlhandle);
}
}
void setSurface(jobject _surface) {
- if (surface != NULL) {
- activity.env->DeleteGlobalRef(surface);
- }
if (_surface != NULL) {
- surface = activity.env->NewGlobalRef(_surface);
+ nativeWindow = android_Surface_getNativeWindow(activity.env, _surface);
} else {
- surface = NULL;
+ nativeWindow = NULL;
}
}
@@ -150,7 +166,7 @@ struct NativeCode {
void* dlhandle;
ANativeActivity_createFunc* createActivityFunc;
- jobject surface;
+ sp<ANativeWindow> nativeWindow;
jobject inputChannel;
struct MyInputQueue* nativeInputQueue;
@@ -160,6 +176,11 @@ struct NativeCode {
sp<PollLoop> pollLoop;
};
+// ------------------------------------------------------------------------
+
+/*
+ * Callback for handling native events on the application's main thread.
+ */
static bool mainWorkCallback(int fd, int events, void* data) {
NativeCode* code = (NativeCode*)data;
if ((events & POLLIN) != 0) {
@@ -180,6 +201,8 @@ static bool mainWorkCallback(int fd, int events, void* data) {
return true;
}
+// ------------------------------------------------------------------------
+
static jint
loadNativeCode_native(JNIEnv* env, jobject clazz, jstring path, jobject messageQueue)
{
@@ -323,9 +346,9 @@ onSurfaceCreated_native(JNIEnv* env, jobject clazz, jint handle, jobject surface
if (handle != 0) {
NativeCode* code = (NativeCode*)handle;
code->setSurface(surface);
- if (code->callbacks.onSurfaceCreated != NULL) {
- code->callbacks.onSurfaceCreated(&code->activity,
- (ASurfaceHolder*)code->surface);
+ if (code->nativeWindow != NULL && code->callbacks.onNativeWindowCreated != NULL) {
+ code->callbacks.onNativeWindowCreated(&code->activity,
+ code->nativeWindow.get());
}
}
}
@@ -336,9 +359,17 @@ onSurfaceChanged_native(JNIEnv* env, jobject clazz, jint handle, jobject surface
{
if (handle != 0) {
NativeCode* code = (NativeCode*)handle;
- if (code->surface != NULL && code->callbacks.onSurfaceChanged != NULL) {
- code->callbacks.onSurfaceChanged(&code->activity,
- (ASurfaceHolder*)code->surface, format, width, height);
+ sp<ANativeWindow> oldNativeWindow = code->nativeWindow;
+ code->setSurface(surface);
+ if (oldNativeWindow != code->nativeWindow) {
+ if (oldNativeWindow != NULL && code->callbacks.onNativeWindowDestroyed != NULL) {
+ code->callbacks.onNativeWindowDestroyed(&code->activity,
+ oldNativeWindow.get());
+ }
+ if (code->nativeWindow != NULL && code->callbacks.onNativeWindowCreated != NULL) {
+ code->callbacks.onNativeWindowCreated(&code->activity,
+ code->nativeWindow.get());
+ }
}
}
}
@@ -348,9 +379,9 @@ onSurfaceDestroyed_native(JNIEnv* env, jobject clazz, jint handle, jobject surfa
{
if (handle != 0) {
NativeCode* code = (NativeCode*)handle;
- if (code->surface != NULL && code->callbacks.onSurfaceDestroyed != NULL) {
- code->callbacks.onSurfaceDestroyed(&code->activity,
- (ASurfaceHolder*)code->surface);
+ if (code->nativeWindow != NULL && code->callbacks.onNativeWindowDestroyed != NULL) {
+ code->callbacks.onNativeWindowDestroyed(&code->activity,
+ code->nativeWindow.get());
}
code->setSurface(NULL);
}
@@ -398,9 +429,9 @@ static const JNINativeMethod g_methods[] = {
{ "onStopNative", "(I)V", (void*)onStop_native },
{ "onLowMemoryNative", "(I)V", (void*)onLowMemory_native },
{ "onWindowFocusChangedNative", "(IZ)V", (void*)onWindowFocusChanged_native },
- { "onSurfaceCreatedNative", "(ILandroid/view/SurfaceHolder;)V", (void*)onSurfaceCreated_native },
- { "onSurfaceChangedNative", "(ILandroid/view/SurfaceHolder;III)V", (void*)onSurfaceChanged_native },
- { "onSurfaceDestroyedNative", "(ILandroid/view/SurfaceHolder;)V", (void*)onSurfaceDestroyed_native },
+ { "onSurfaceCreatedNative", "(ILandroid/view/Surface;)V", (void*)onSurfaceCreated_native },
+ { "onSurfaceChangedNative", "(ILandroid/view/Surface;III)V", (void*)onSurfaceChanged_native },
+ { "onSurfaceDestroyedNative", "(I)V", (void*)onSurfaceDestroyed_native },
{ "onInputChannelCreatedNative", "(ILandroid/view/InputChannel;)V", (void*)onInputChannelCreated_native },
{ "onInputChannelDestroyedNative", "(ILandroid/view/InputChannel;)V", (void*)onInputChannelDestroyed_native },
};
@@ -421,11 +452,18 @@ int register_android_app_NativeActivity(JNIEnv* env)
//LOGD("register_android_app_NativeActivity");
FIND_CLASS(gNativeActivityClassInfo.clazz, kNativeActivityPathName);
-
+
GET_METHOD_ID(gNativeActivityClassInfo.dispatchUnhandledKeyEvent,
gNativeActivityClassInfo.clazz,
"dispatchUnhandledKeyEvent", "(Landroid/view/KeyEvent;)V");
-
+
+ GET_METHOD_ID(gNativeActivityClassInfo.setWindowFlags,
+ gNativeActivityClassInfo.clazz,
+ "setWindowFlags", "(II)V");
+ GET_METHOD_ID(gNativeActivityClassInfo.setWindowFormat,
+ gNativeActivityClassInfo.clazz,
+ "setWindowFormat", "(I)V");
+
return AndroidRuntime::registerNativeMethods(
env, kNativeActivityPathName,
g_methods, NELEM(g_methods));
diff --git a/core/jni/android_view_Surface.cpp b/core/jni/android_view_Surface.cpp
index cef5c107ec86..a82abc93eacf 100644
--- a/core/jni/android_view_Surface.cpp
+++ b/core/jni/android_view_Surface.cpp
@@ -33,6 +33,7 @@
#include "jni.h"
#include <android_runtime/AndroidRuntime.h>
+#include "android_view_Surface.h"
#include <utils/misc.h>
@@ -179,7 +180,7 @@ static sp<Surface> getSurface(JNIEnv* env, jobject clazz)
return result;
}
-EGLNativeWindowType android_Surface_getEGLNativeWindow(
+sp<ANativeWindow> android_Surface_getNativeWindow(
JNIEnv* env, jobject clazz) {
return getSurface(env, clazz).get();
}
diff --git a/core/jni/android_view_Surface.h b/core/jni/android_view_Surface.h
new file mode 100644
index 000000000000..c37932e74c4f
--- /dev/null
+++ b/core/jni/android_view_Surface.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_VIEW_SURFACE_H
+#define _ANDROID_VIEW_SURFACE_H
+
+#include <android/native_window.h>
+
+#include "jni.h"
+
+namespace android {
+
+extern sp<ANativeWindow> android_Surface_getNativeWindow(
+ JNIEnv* env, jobject clazz);
+
+} // namespace android
+
+#endif // _ANDROID_VIEW_SURFACE_H
diff --git a/core/jni/com_google_android_gles_jni_EGLImpl.cpp b/core/jni/com_google_android_gles_jni_EGLImpl.cpp
index d5cde48b8538..866c038eac43 100644
--- a/core/jni/com_google_android_gles_jni_EGLImpl.cpp
+++ b/core/jni/com_google_android_gles_jni_EGLImpl.cpp
@@ -25,10 +25,9 @@
#include <SkBitmap.h>
#include <SkPixelRef.h>
-namespace android {
+#include "android_view_Surface.h"
-extern EGLNativeWindowType android_Surface_getEGLNativeWindow(
- JNIEnv* env, jobject clazz);
+namespace android {
static jclass gDisplay_class;
static jclass gContext_class;
@@ -325,7 +324,7 @@ static jint jni_eglCreateWindowSurface(JNIEnv *_env, jobject _this, jobject disp
}
EGLDisplay dpy = getDisplay(_env, display);
EGLContext cnf = getConfig(_env, config);
- EGLNativeWindowType window = 0;
+ sp<ANativeWindow> window;
if (native_window == NULL) {
not_valid_surface:
doThrow(_env, "java/lang/IllegalArgumentException",
@@ -333,12 +332,12 @@ not_valid_surface:
return 0;
}
- window = android_Surface_getEGLNativeWindow(_env, native_window);
+ window = android_Surface_getNativeWindow(_env, native_window);
if (window == NULL)
goto not_valid_surface;
jint* base = beginNativeAttribList(_env, attrib_list);
- EGLSurface sur = eglCreateWindowSurface(dpy, cnf, window, base);
+ EGLSurface sur = eglCreateWindowSurface(dpy, cnf, window.get(), base);
endNativeAttributeList(_env, attrib_list, base);
return (jint)sur;
}
diff --git a/core/res/res/xml/power_profile.xml b/core/res/res/xml/power_profile.xml
index ce623e8d4d35..30312b369499 100644
--- a/core/res/res/xml/power_profile.xml
+++ b/core/res/res/xml/power_profile.xml
@@ -18,6 +18,7 @@
-->
<device name="Android">
+ <!-- All values are in mAh except as noted -->
<item name="none">0</item>
<item name="screen.on">0.1</item>
<item name="bluetooth.active">0.1</item>
@@ -48,4 +49,6 @@
<array name="cpu.active">
<value>0.2</value>
</array>
+ <!-- This is the battery capacity in mAh -->
+ <item name="battery.capacity">1000</item>
</device>
diff --git a/docs/html/resources/dashboard/platform-versions.jd b/docs/html/resources/dashboard/platform-versions.jd
index 5e751057e17d..6cb7228016b5 100644
--- a/docs/html/resources/dashboard/platform-versions.jd
+++ b/docs/html/resources/dashboard/platform-versions.jd
@@ -43,29 +43,73 @@ the development of your application features for the devices currently in
the hands of users. For information about how to target your application to devices based on
platform version, see <a href="{@docRoot}guide/appendix/api-levels.html">API Levels</a>.</p>
-<p class="note"><strong>Note:</strong> This data is based on the number
-of Android devices that have accessed Android Market within a 14-day period
-ending on the data collection date noted below.</p>
+
+<h3 id="Current">Current Distribution</h3>
+
+<p>The following pie chart and table is based on the number of Android devices that have accessed
+Android Market within a 14-day period ending on the data collection date noted below.</p>
<div class="dashboard-panel">
-<img alt="" width="460" height="250"
-src="http://chart.apis.google.com/chart?&cht=p&chs=460x250&chd=t:0.1,24.6,25.0,0.1,0.3,50.0&chl=
-Android%201.1|Android%201.5|Android%201.6|Android%202.0|Android%202.0.1|Android%202.1&chco=c4df9b,
-6fad0c" />
+<img alt="" height="250" width="460"
+src="http://chart.apis.google.com/chart?&cht=p&chs=460x250&chd=t:0.3,21.3,23.5,53.1,1.8&chl=Other*|
+Android%201.5|Android%201.6|Android%202.1|Android%202.2&chco=c4df9b,6fad0c" />
<table>
<tr>
- <th>Android Platform</th>
- <th>Percent of Devices</th>
+ <th>Platform</th>
+ <th>API Level</th>
+ <th>Distribution</th>
</tr>
-<tr><td>Android 1.1</td><td>0.1%</td></tr>
-<tr><td>Android 1.5</td><td>24.6%</td></tr>
-<tr><td>Android 1.6</td><td>25.0%</td></tr>
-<tr><td>Android 2.0</td><td>0.1%</td></tr>
-<tr><td>Android 2.0.1</td><td>0.3%</td></tr>
-<tr><td>Android 2.1</td><td>50.0%</td></tr>
+<tr><td>Android 1.5</td><td>3</td><td>21.3%</td></tr>
+<tr><td>Android 1.6</td><td>4</td><td>23.5%</td></tr>
+<tr><td>Android 2.1</td><td>7</td><td>53.1%</td></tr>
+<tr><td>Android 2.2</td><td>8</td><td>1.8%</td></tr>
</table>
-<p><em>Data collected during two weeks ending on June 16, 2010</em></p>
-</div>
+
+<p><em>Data collected during two weeks ending on July 1, 2010</em></p>
+<p style="font-size:.9em">* <em>Other: 0.3% of devices running obsolete versions</em></p>
+
+</div><!-- end dashboard-panel -->
+
+
+<h3 id="Historical">Historical Distribution</h3>
+
+<p>The following stacked line graph provides a history of the relative number of
+active Android devices running different versions of the Android platform. It also provides a
+valuable perspective of how many devices your application is compatible with, based on the
+platform version.</p>
+
+<p>Notice that the platform versions are stacked on top of each other with the oldest active
+version at the top. This format indicates the total percent of active devices that are compatible
+with a given version of Android. For example, if you develop your application for
+the version that is at the very top of the chart, then your application is
+compatible with 100% of active devices (and all future versions), because all Android APIs are
+forward compatible. Or, if you develop your application for a version lower on the chart,
+then it is currently compatible with the percentage of devices indicated on the y-axis, where the
+line for that version meets the y-axis on the right.</p>
+
+<p>Each dataset in the timeline is based on the number of Android devices that accessed
+Android Market within a 14-day period ending on the date indicated on the x-axis.</p>
+
+<div class="dashboard-panel">
+
+<img alt="" height="265" width="700" style="padding:5px;background:#fff"
+src="http://chart.apis.google.com/chart?&cht=lc&chs=700x265&chxt=x,y,r&chxr=0,0,10%7C1,0,100%7C2,0,
+100&chxl=0%3A%7C2010/02/01%7C02/15%7C03/01%7C03/15%7C04/01%7C04/15%7C05/01%7C05/15%7C06/01%7C06/15%
+7C2010/07/01%7C1%3A%7C0%25%7C25%25%7C50%25%7C75%25%7C100%25%7C2%3A%7C0%25%7C25%25%7C50%25%7C75%25%
+7C100%25&chxp=0,0,1,2,3,4,5,6,7,8,9,10&chxtc=0,5&chd=t:99.0,99.2,99.4,99.5,99.6,99.6,99.6,99.7,100.6
+,101.1,99.9%7C63.4,62.5,61.6,60.6,61.5,61.7,62.3,63.5,73.0,76.4,78.6%7C22.6,23.2,24.3,25.4,29.4,30.2
+,32.7,35.3,46.2,51.3,55.1%7C0.0,0.0,0.0,0.0,4.0,28.3,32.0,34.9,45.9,51.0,54.9%7C0.0,0.0,0.0,0.0,0.0,
+0.0,0.0,0.0,0.8,1.2,1.8&chm=tAndroid%201.5,7caa36,0,0,15,,t::-5%7Cb,c3df9b,0,1,0%7CtAndroid%201.6,
+638d23,1,0,15,,t::-5%7Cb,b0db6e,1,2,0%7CtAndroid%202.0.1,496c13,2,0,15,,t::-5%7Cb,9ddb3d,2,3,0%
+7CtAndroid%202.1,2f4708,3,5,15,,t::-5%7Cb,89cf19,3,4,0%7CB,6fad0c,4,5,0&chg=9,25&chdl=Android%201.5%
+20(API%20Level%203)%7CAndroid%201.6%20(API%20Level%204)%7CAndroid%202.0.1%20(API%20Level%206)%
+7CAndroid%202.1%20(API%20Level%207)%7CAndroid%202.2%20(API%20Level %208)&chco=add274,
+9ad145,84c323,6ba213,507d08" />
+
+<p><em>Last historical dataset collected during two weeks ending on July 1, 2010</em></p>
+
+
+</div><!-- end dashboard-panel -->
diff --git a/docs/html/resources/dashboard/screens.jd b/docs/html/resources/dashboard/screens.jd
index f8130ea50e9d..89fdd2d30352 100644
--- a/docs/html/resources/dashboard/screens.jd
+++ b/docs/html/resources/dashboard/screens.jd
@@ -49,7 +49,7 @@ ending on the data collection date noted below.</p>
<div class="dashboard-panel">
<img alt="" width="460" height="250"
-src="http://chart.apis.google.com/chart?&cht=p&chs=460x250&chd=t:1.1,57.8,41.0&chl=Small%20/%20ldpi|
+src="http://chart.apis.google.com/chart?&cht=p&chs=460x250&chd=t:1.4,54.5,44.1&chl=Small%20/%20ldpi|
Normal%20/%20mdpi|Normal%20/%20hdpi&chco=c4df9b,6fad0c" />
<table>
@@ -60,14 +60,14 @@ Normal%20/%20mdpi|Normal%20/%20hdpi&chco=c4df9b,6fad0c" />
<th scope="col">High Density</th>
</tr>
<tr><th scope="row">Small</th>
-<td class='cent hi'>1.1%</td>
+<td class='cent hi'>1.4%</td>
<td></td>
<td></td>
</tr>
<tr><th scope="row">Normal</th>
<td></td>
-<td class='cent hi'>57.8%</td>
-<td class='cent hi'>41.0%</td>
+<td class='cent hi'>54.5%</td>
+<td class='cent hi'>44.1%</td>
</tr>
<tr><th scope="row">Large</th>
<td></td>
@@ -76,6 +76,6 @@ Normal%20/%20mdpi|Normal%20/%20hdpi&chco=c4df9b,6fad0c" />
</tr>
</table>
-<p><em>Data collected during two weeks ending on June 16, 2010</em></p>
+<p><em>Data collected during two weeks ending on July 1, 2010</em></p>
</div>
diff --git a/include/media/stagefright/MediaWriter.h b/include/media/stagefright/MediaWriter.h
index e91d066c04f6..8d3a9df39302 100644
--- a/include/media/stagefright/MediaWriter.h
+++ b/include/media/stagefright/MediaWriter.h
@@ -27,7 +27,10 @@ struct MediaSource;
struct MetaData;
struct MediaWriter : public RefBase {
- MediaWriter() {}
+ MediaWriter()
+ : mMaxFileSizeLimitBytes(0),
+ mMaxFileDurationLimitUs(0) {
+ }
virtual status_t addSource(const sp<MediaSource> &source) = 0;
virtual bool reachedEOS() = 0;
diff --git a/libs/surfaceflinger/TextureManager.cpp b/libs/surfaceflinger/TextureManager.cpp
index fa192563cb4b..65260325b21f 100644
--- a/libs/surfaceflinger/TextureManager.cpp
+++ b/libs/surfaceflinger/TextureManager.cpp
@@ -87,7 +87,7 @@ status_t TextureManager::initTexture(Image* pImage, int32_t format)
GLenum target = GL_TEXTURE_2D;
#if defined(GL_OES_texture_external)
if (GLExtensions::getInstance().haveTextureExternal()) {
- if (format && isSupportedYuvFormat(format)) {
+ if (format && isYuvFormat(format)) {
target = GL_TEXTURE_EXTERNAL_OES;
pImage->target = Texture::TEXTURE_EXTERNAL;
}
@@ -105,23 +105,32 @@ status_t TextureManager::initTexture(Image* pImage, int32_t format)
bool TextureManager::isSupportedYuvFormat(int format)
{
- return isYuvFormat(format);
+ switch (format) {
+ case HAL_PIXEL_FORMAT_YV12:
+ case HAL_PIXEL_FORMAT_YV16:
+ return true;
+ }
+ return false;
}
bool TextureManager::isYuvFormat(int format)
{
switch (format) {
- case HAL_PIXEL_FORMAT_NV16:
- case HAL_PIXEL_FORMAT_NV21:
- case HAL_PIXEL_FORMAT_IYUV:
- case HAL_PIXEL_FORMAT_YUV9:
- case HAL_PIXEL_FORMAT_YUY2:
- case HAL_PIXEL_FORMAT_UYVY:
- case HAL_PIXEL_FORMAT_NV12:
- case HAL_PIXEL_FORMAT_NV61:
- case HAL_PIXEL_FORMAT_NV12_ADRENO_TILED:
+ // supported YUV formats
+ case HAL_PIXEL_FORMAT_YV12:
+ case HAL_PIXEL_FORMAT_YV16:
+ // Legacy/deprecated YUV formats
+ case HAL_PIXEL_FORMAT_YCbCr_422_SP:
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+ case HAL_PIXEL_FORMAT_YCbCr_422_I:
+ case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
return true;
}
+
+ // Any OEM format needs to be considered
+ if (format>=0x100 && format<=0x1FF)
+ return true;
+
return false;
}
@@ -255,7 +264,7 @@ status_t TextureManager::loadTexture(Texture* texture,
glTexImage2D(GL_TEXTURE_2D, 0,
GL_RGBA, texture->potWidth, texture->potHeight, 0,
GL_RGBA, GL_UNSIGNED_BYTE, data);
- } else if (isYuvFormat(t.format)) {
+ } else if (isSupportedYuvFormat(t.format)) {
// just show the Y plane of YUV buffers
glTexImage2D(GL_TEXTURE_2D, 0,
GL_LUMINANCE, texture->potWidth, texture->potHeight, 0,
@@ -283,7 +292,7 @@ status_t TextureManager::loadTexture(Texture* texture,
0, bounds.top, t.width, bounds.height(),
GL_RGBA, GL_UNSIGNED_BYTE,
t.data + bounds.top*t.stride*4);
- } else if (isYuvFormat(t.format)) {
+ } else if (isSupportedYuvFormat(t.format)) {
// just show the Y plane of YUV buffers
glTexSubImage2D(GL_TEXTURE_2D, 0,
0, bounds.top, t.width, bounds.height(),
diff --git a/libs/ui/PixelFormat.cpp b/libs/ui/PixelFormat.cpp
index c9a5950d1532..b205418e0f70 100644
--- a/libs/ui/PixelFormat.cpp
+++ b/libs/ui/PixelFormat.cpp
@@ -59,16 +59,13 @@ status_t getPixelFormatInfo(PixelFormat format, PixelFormatInfo* info)
// YUV format from the HAL are handled here
switch (format) {
case HAL_PIXEL_FORMAT_YCbCr_422_SP:
- case HAL_PIXEL_FORMAT_YCrCb_422_SP:
- case HAL_PIXEL_FORMAT_YCbCr_422_P:
case HAL_PIXEL_FORMAT_YCbCr_422_I:
- case HAL_PIXEL_FORMAT_CbYCrY_422_I:
+ case HAL_PIXEL_FORMAT_YV16:
info->bitsPerPixel = 16;
goto done;
- case HAL_PIXEL_FORMAT_YCbCr_420_SP:
case HAL_PIXEL_FORMAT_YCrCb_420_SP:
case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
- case HAL_PIXEL_FORMAT_YCbCr_420_P:
+ case HAL_PIXEL_FORMAT_YV12:
info->bitsPerPixel = 12;
done:
info->format = format;
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 50f0674c28ab..9212708e6c13 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -16,6 +16,7 @@
package android.media;
+import java.util.NoSuchElementException;
import android.app.ActivityManagerNative;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
@@ -1016,7 +1017,11 @@ public class AudioService extends IAudioService.Stub {
} else {
mStartcount--;
if (mStartcount == 0) {
- mCb.unlinkToDeath(this, 0);
+ try {
+ mCb.unlinkToDeath(this, 0);
+ } catch (NoSuchElementException e) {
+ Log.w(TAG, "decCount() going to 0 but not registered to binder");
+ }
}
requestScoState(BluetoothHeadset.AUDIO_STATE_DISCONNECTED);
}
@@ -1025,8 +1030,14 @@ public class AudioService extends IAudioService.Stub {
public void clearCount(boolean stopSco) {
synchronized(mScoClients) {
+ if (mStartcount != 0) {
+ try {
+ mCb.unlinkToDeath(this, 0);
+ } catch (NoSuchElementException e) {
+ Log.w(TAG, "clearCount() mStartcount: "+mStartcount+" != 0 but not registered to binder");
+ }
+ }
mStartcount = 0;
- mCb.unlinkToDeath(this, 0);
if (stopSco) {
requestScoState(BluetoothHeadset.AUDIO_STATE_DISCONNECTED);
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 8bc6e9ac0e50..91c5b92e528c 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -69,6 +69,7 @@ status_t StagefrightRecorder::setAudioSource(audio_source as) {
LOGV("setAudioSource: %d", as);
if (as < AUDIO_SOURCE_DEFAULT ||
as >= AUDIO_SOURCE_LIST_END) {
+ LOGE("Invalid audio source: %d", as);
return BAD_VALUE;
}
@@ -85,6 +86,7 @@ status_t StagefrightRecorder::setVideoSource(video_source vs) {
LOGV("setVideoSource: %d", vs);
if (vs < VIDEO_SOURCE_DEFAULT ||
vs >= VIDEO_SOURCE_LIST_END) {
+ LOGE("Invalid video source: %d", vs);
return BAD_VALUE;
}
@@ -101,6 +103,7 @@ status_t StagefrightRecorder::setOutputFormat(output_format of) {
LOGV("setOutputFormat: %d", of);
if (of < OUTPUT_FORMAT_DEFAULT ||
of >= OUTPUT_FORMAT_LIST_END) {
+ LOGE("Invalid output format: %d", of);
return BAD_VALUE;
}
@@ -117,6 +120,7 @@ status_t StagefrightRecorder::setAudioEncoder(audio_encoder ae) {
LOGV("setAudioEncoder: %d", ae);
if (ae < AUDIO_ENCODER_DEFAULT ||
ae >= AUDIO_ENCODER_LIST_END) {
+ LOGE("Invalid audio encoder: %d", ae);
return BAD_VALUE;
}
@@ -133,6 +137,7 @@ status_t StagefrightRecorder::setVideoEncoder(video_encoder ve) {
LOGV("setVideoEncoder: %d", ve);
if (ve < VIDEO_ENCODER_DEFAULT ||
ve >= VIDEO_ENCODER_LIST_END) {
+ LOGE("Invalid video encoder: %d", ve);
return BAD_VALUE;
}
@@ -176,7 +181,7 @@ status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera) {
LOGV("setCamera");
if (camera == 0) {
LOGE("camera is NULL");
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
int64_t token = IPCThreadState::self()->clearCallingIdentity();
@@ -185,7 +190,7 @@ status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera) {
if (mCamera == 0) {
LOGE("Unable to connect to camera");
IPCThreadState::self()->restoreCallingIdentity(token);
- return UNKNOWN_ERROR;
+ return -EBUSY;
}
LOGV("Connected to camera");
@@ -206,11 +211,11 @@ status_t StagefrightRecorder::setPreviewSurface(const sp<ISurface> &surface) {
}
status_t StagefrightRecorder::setOutputFile(const char *path) {
- LOGE("setOutputFile(const char*) should not be called");
+ LOGE("setOutputFile(const char*) must not be called");
// We don't actually support this at all, as the media_server process
// no longer has permissions to create files.
- return UNKNOWN_ERROR;
+ return -EPERM;
}
status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t length) {
@@ -219,6 +224,11 @@ status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t leng
CHECK_EQ(offset, 0);
CHECK_EQ(length, 0);
+ if (fd < 0) {
+ LOGE("Invalid file descriptor: %d", fd);
+ return -EBADF;
+ }
+
if (mOutputFd >= 0) {
::close(mOutputFd);
}
@@ -294,6 +304,7 @@ status_t StagefrightRecorder::setParamAudioNumberOfChannels(int32_t channels) {
LOGV("setParamAudioNumberOfChannels: %d", channels);
if (channels <= 0 || channels >= 3) {
LOGE("Invalid number of audio channels: %d", channels);
+ return BAD_VALUE;
}
// Additional check on the number of channels will be performed later.
@@ -331,21 +342,23 @@ status_t StagefrightRecorder::setParamVideoEncodingBitRate(int32_t bitRate) {
return OK;
}
-status_t StagefrightRecorder::setParamMaxDurationOrFileSize(int64_t limit,
- bool limit_is_duration) {
- LOGV("setParamMaxDurationOrFileSize: limit (%lld) for %s",
- limit, limit_is_duration?"duration":"size");
- if (limit_is_duration) { // limit is in ms
- if (limit <= 1000) { // XXX: 1 second
- LOGE("Max file duration is too short: %lld us", limit);
- }
- mMaxFileDurationUs = limit * 1000LL;
- } else {
- if (limit <= 1024) { // XXX: 1 kB
- LOGE("Max file size is too small: %lld bytes", limit);
- }
- mMaxFileSizeBytes = limit;
+status_t StagefrightRecorder::setParamMaxFileDurationUs(int64_t timeUs) {
+ LOGV("setParamMaxFileDurationUs: %lld us", timeUs);
+ if (timeUs <= 1000000LL) { // XXX: 1 second
+ LOGE("Max file duration is too short: %lld us", timeUs);
+ return BAD_VALUE;
}
+ mMaxFileDurationUs = timeUs;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamMaxFileSizeBytes(int64_t bytes) {
+ LOGV("setParamMaxFileSizeBytes: %lld bytes", bytes);
+ if (bytes <= 1024) { // XXX: 1 kB
+ LOGE("Max file size is too small: %lld bytes", bytes);
+ return BAD_VALUE;
+ }
+ mMaxFileSizeBytes = bytes;
return OK;
}
@@ -370,7 +383,7 @@ status_t StagefrightRecorder::setParamInterleaveDuration(int32_t durationUs) {
// If interval < 0, only the first frame is I frame, and rest are all P frames
// If interval == 0, all frames are encoded as I frames. No P frames
-// If interval > 0, it is the time spacing between 2 neighboring I frames
+// If interval > 0, it is the time spacing (seconds) between 2 neighboring I frames
status_t StagefrightRecorder::setParamVideoIFramesInterval(int32_t interval) {
LOGV("setParamVideoIFramesInterval: %d seconds", interval);
mIFramesInterval = interval;
@@ -396,6 +409,7 @@ status_t StagefrightRecorder::setParamVideoCameraId(int32_t cameraId) {
status_t StagefrightRecorder::setParamTrackFrameStatus(int32_t nFrames) {
LOGV("setParamTrackFrameStatus: %d", nFrames);
if (nFrames <= 0) {
+ LOGE("Invalid number of frames to track: %d", nFrames);
return BAD_VALUE;
}
mTrackEveryNumberOfFrames = nFrames;
@@ -405,6 +419,7 @@ status_t StagefrightRecorder::setParamTrackFrameStatus(int32_t nFrames) {
status_t StagefrightRecorder::setParamTrackTimeStatus(int64_t timeDurationUs) {
LOGV("setParamTrackTimeStatus: %lld", timeDurationUs);
if (timeDurationUs < 20000) { // Infeasible if shorter than 20 ms?
+ LOGE("Tracking time duration too short: %lld us", timeDurationUs);
return BAD_VALUE;
}
mTrackEveryTimeDurationUs = timeDurationUs;
@@ -417,14 +432,12 @@ status_t StagefrightRecorder::setParameter(
if (key == "max-duration") {
int64_t max_duration_ms;
if (safe_strtoi64(value.string(), &max_duration_ms)) {
- return setParamMaxDurationOrFileSize(
- max_duration_ms, true /* limit_is_duration */);
+ return setParamMaxFileDurationUs(1000LL * max_duration_ms);
}
} else if (key == "max-filesize") {
int64_t max_filesize_bytes;
if (safe_strtoi64(value.string(), &max_filesize_bytes)) {
- return setParamMaxDurationOrFileSize(
- max_filesize_bytes, false /* limit is filesize */);
+ return setParamMaxFileSizeBytes(max_filesize_bytes);
}
} else if (key == "interleave-duration-us") {
int32_t durationUs;
@@ -528,7 +541,10 @@ status_t StagefrightRecorder::prepare() {
}
status_t StagefrightRecorder::start() {
+ CHECK(mOutputFd >= 0);
+
if (mWriter != NULL) {
+ LOGE("File writer is not avaialble");
return UNKNOWN_ERROR;
}
@@ -547,6 +563,7 @@ status_t StagefrightRecorder::start() {
return startAACRecording();
default:
+ LOGE("Unsupported output file format: %d", mOutputFormat);
return UNKNOWN_ERROR;
}
}
@@ -610,7 +627,6 @@ status_t StagefrightRecorder::startAACRecording() {
CHECK(mAudioEncoder == AUDIO_ENCODER_AAC);
CHECK(mAudioSource != AUDIO_SOURCE_LIST_END);
- CHECK(mOutputFd >= 0);
CHECK(0 == "AACWriter is not implemented yet");
@@ -626,34 +642,34 @@ status_t StagefrightRecorder::startAMRRecording() {
mAudioEncoder != AUDIO_ENCODER_AMR_NB) {
LOGE("Invalid encoder %d used for AMRNB recording",
mAudioEncoder);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
if (mSampleRate != 8000) {
LOGE("Invalid sampling rate %d used for AMRNB recording",
mSampleRate);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
} else { // mOutputFormat must be OUTPUT_FORMAT_AMR_WB
if (mAudioEncoder != AUDIO_ENCODER_AMR_WB) {
LOGE("Invlaid encoder %d used for AMRWB recording",
mAudioEncoder);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
if (mSampleRate != 16000) {
LOGE("Invalid sample rate %d used for AMRWB recording",
mSampleRate);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
}
if (mAudioChannels != 1) {
LOGE("Invalid number of audio channels %d used for amr recording",
mAudioChannels);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
if (mAudioSource >= AUDIO_SOURCE_LIST_END) {
LOGE("Invalid audio source: %d", mAudioSource);
- return UNKNOWN_ERROR;
+ return BAD_VALUE;
}
sp<MediaSource> audioEncoder = createAudioSource();
@@ -662,7 +678,6 @@ status_t StagefrightRecorder::startAMRRecording() {
return UNKNOWN_ERROR;
}
- CHECK(mOutputFd >= 0);
mWriter = new AMRWriter(dup(mOutputFd));
mWriter->addSource(audioEncoder);
@@ -729,6 +744,54 @@ void StagefrightRecorder::clipVideoFrameWidth() {
}
}
+status_t StagefrightRecorder::setupCameraSource() {
+ clipVideoBitRate();
+ clipVideoFrameRate();
+ clipVideoFrameWidth();
+ clipVideoFrameHeight();
+
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mCamera == 0) {
+ mCamera = Camera::connect(mCameraId);
+ if (mCamera == 0) {
+ LOGE("Camera connection could not be established.");
+ return -EBUSY;
+ }
+ mFlags &= ~FLAGS_HOT_CAMERA;
+ mCamera->lock();
+ }
+
+ // Set the actual video recording frame size
+ CameraParameters params(mCamera->getParameters());
+ params.setPreviewSize(mVideoWidth, mVideoHeight);
+ params.setPreviewFrameRate(mFrameRate);
+ String8 s = params.flatten();
+ CHECK_EQ(OK, mCamera->setParameters(s));
+ CameraParameters newCameraParams(mCamera->getParameters());
+
+ // Check on video frame size
+ int frameWidth = 0, frameHeight = 0;
+ newCameraParams.getPreviewSize(&frameWidth, &frameHeight);
+ if (frameWidth < 0 || frameWidth != mVideoWidth ||
+ frameHeight < 0 || frameHeight != mVideoHeight) {
+ LOGE("Failed to set the video frame size to %dx%d",
+ mVideoWidth, mVideoHeight);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check on video frame rate
+ int frameRate = newCameraParams.getPreviewFrameRate();
+ if (frameRate < 0 || (frameRate - mFrameRate) != 0) {
+ LOGE("Failed to set frame rate to %d fps. The actual "
+ "frame rate is %d", mFrameRate, frameRate);
+ }
+
+ CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface));
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return OK;
+}
+
void StagefrightRecorder::clipVideoFrameHeight() {
LOGV("clipVideoFrameHeight: encoder %d", mVideoEncoder);
int minFrameHeight = mEncoderProfiles->getVideoEncoderParamByName(
@@ -746,146 +809,110 @@ void StagefrightRecorder::clipVideoFrameHeight() {
}
}
-status_t StagefrightRecorder::startMPEG4Recording() {
- mWriter = new MPEG4Writer(dup(mOutputFd));
- int32_t totalBitRate = 0;
+status_t StagefrightRecorder::setupVideoEncoder(const sp<MediaWriter>& writer) {
+ status_t err = setupCameraSource();
+ if (err != OK) return err;
- // Add audio source first if it exists
- if (mAudioSource != AUDIO_SOURCE_LIST_END) {
- sp<MediaSource> audioEncoder;
- switch(mAudioEncoder) {
- case AUDIO_ENCODER_AMR_NB:
- case AUDIO_ENCODER_AMR_WB:
- case AUDIO_ENCODER_AAC:
- audioEncoder = createAudioSource();
- break;
- default:
- LOGE("Unsupported audio encoder: %d", mAudioEncoder);
- return UNKNOWN_ERROR;
- }
+ sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
+ CHECK(cameraSource != NULL);
- if (audioEncoder == NULL) {
- return UNKNOWN_ERROR;
- }
- totalBitRate += mAudioBitRate;
- mWriter->addSource(audioEncoder);
- }
- if (mVideoSource == VIDEO_SOURCE_DEFAULT
- || mVideoSource == VIDEO_SOURCE_CAMERA) {
-
- clipVideoBitRate();
- clipVideoFrameRate();
- clipVideoFrameWidth();
- clipVideoFrameHeight();
-
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if (mCamera == 0) {
- mCamera = Camera::connect(mCameraId);
- if (mCamera == 0) {
- LOGE("Camera connection could not be established.");
- return -EBUSY;
- }
- mFlags &= ~FLAGS_HOT_CAMERA;
- mCamera->lock();
- }
-
-
- // Set the actual video recording frame size
- CameraParameters params(mCamera->getParameters());
- params.setPreviewSize(mVideoWidth, mVideoHeight);
- params.setPreviewFrameRate(mFrameRate);
- String8 s = params.flatten();
- CHECK_EQ(OK, mCamera->setParameters(s));
- CameraParameters newCameraParams(mCamera->getParameters());
-
- // Check on video frame size
- int frameWidth = 0, frameHeight = 0;
- newCameraParams.getPreviewSize(&frameWidth, &frameHeight);
- if (frameWidth < 0 || frameWidth != mVideoWidth ||
- frameHeight < 0 || frameHeight != mVideoHeight) {
- LOGE("Failed to set the video frame size to %dx%d",
- mVideoWidth, mVideoHeight);
- IPCThreadState::self()->restoreCallingIdentity(token);
- return UNKNOWN_ERROR;
- }
-
- // Check on video frame rate
- int frameRate = newCameraParams.getPreviewFrameRate();
- if (frameRate < 0 || (frameRate - mFrameRate) != 0) {
- LOGE("Failed to set frame rate to %d fps. The actual "
- "frame rate is %d", mFrameRate, frameRate);
- }
+ sp<MetaData> enc_meta = new MetaData;
+ enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
+ enc_meta->setInt32(kKeySampleRate, mFrameRate);
- CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface));
- IPCThreadState::self()->restoreCallingIdentity(token);
+ switch (mVideoEncoder) {
+ case VIDEO_ENCODER_H263:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+ break;
- sp<CameraSource> cameraSource =
- CameraSource::CreateFromCamera(mCamera);
+ case VIDEO_ENCODER_MPEG_4_SP:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+ break;
- CHECK(cameraSource != NULL);
+ case VIDEO_ENCODER_H264:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ break;
- sp<MetaData> enc_meta = new MetaData;
- enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
- enc_meta->setInt32(kKeySampleRate, mFrameRate);
+ default:
+ CHECK(!"Should not be here, unsupported video encoding.");
+ break;
+ }
- switch (mVideoEncoder) {
- case VIDEO_ENCODER_H263:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
- break;
+ sp<MetaData> meta = cameraSource->getFormat();
- case VIDEO_ENCODER_MPEG_4_SP:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
- break;
+ int32_t width, height, stride, sliceHeight;
+ CHECK(meta->findInt32(kKeyWidth, &width));
+ CHECK(meta->findInt32(kKeyHeight, &height));
+ CHECK(meta->findInt32(kKeyStride, &stride));
+ CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
- case VIDEO_ENCODER_H264:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
- break;
+ enc_meta->setInt32(kKeyWidth, width);
+ enc_meta->setInt32(kKeyHeight, height);
+ enc_meta->setInt32(kKeyIFramesInterval, mIFramesInterval);
+ enc_meta->setInt32(kKeyStride, stride);
+ enc_meta->setInt32(kKeySliceHeight, sliceHeight);
- default:
- CHECK(!"Should not be here, unsupported video encoding.");
- break;
- }
+ OMXClient client;
+ CHECK_EQ(client.connect(), OK);
- sp<MetaData> meta = cameraSource->getFormat();
+ sp<MediaSource> encoder = OMXCodec::Create(
+ client.interface(), enc_meta,
+ true /* createEncoder */, cameraSource);
+ if (encoder == NULL) {
+ return UNKNOWN_ERROR;
+ }
- int32_t width, height, stride, sliceHeight;
- CHECK(meta->findInt32(kKeyWidth, &width));
- CHECK(meta->findInt32(kKeyHeight, &height));
- CHECK(meta->findInt32(kKeyStride, &stride));
- CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
+ writer->addSource(encoder);
+ return OK;
+}
- enc_meta->setInt32(kKeyWidth, width);
- enc_meta->setInt32(kKeyHeight, height);
- enc_meta->setInt32(kKeyIFramesInterval, mIFramesInterval);
- enc_meta->setInt32(kKeyStride, stride);
- enc_meta->setInt32(kKeySliceHeight, sliceHeight);
+status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
+ sp<MediaSource> audioEncoder;
+ switch(mAudioEncoder) {
+ case AUDIO_ENCODER_AMR_NB:
+ case AUDIO_ENCODER_AMR_WB:
+ case AUDIO_ENCODER_AAC:
+ audioEncoder = createAudioSource();
+ break;
+ default:
+ LOGE("Unsupported audio encoder: %d", mAudioEncoder);
+ return UNKNOWN_ERROR;
+ }
- OMXClient client;
- CHECK_EQ(client.connect(), OK);
+ if (audioEncoder == NULL) {
+ return UNKNOWN_ERROR;
+ }
+ writer->addSource(audioEncoder);
+ return OK;
+}
- sp<MediaSource> encoder =
- OMXCodec::Create(
- client.interface(), enc_meta,
- true /* createEncoder */, cameraSource);
+status_t StagefrightRecorder::startMPEG4Recording() {
+ int32_t totalBitRate = 0;
+ status_t err = OK;
+ sp<MediaWriter> writer = new MPEG4Writer(dup(mOutputFd));
- CHECK(mOutputFd >= 0);
+ // Add audio source first if it exists
+ if (mAudioSource != AUDIO_SOURCE_LIST_END) {
+ err = setupAudioEncoder(writer);
+ if (err != OK) return err;
+ totalBitRate += mAudioBitRate;
+ }
+ if (mVideoSource == VIDEO_SOURCE_DEFAULT
+ || mVideoSource == VIDEO_SOURCE_CAMERA) {
+ err = setupVideoEncoder(writer);
+ if (err != OK) return err;
totalBitRate += mVideoBitRate;
- mWriter->addSource(encoder);
}
- {
- // MPEGWriter specific handling
- MPEG4Writer *writer = ((MPEG4Writer *) mWriter.get());
- writer->setInterleaveDuration(mInterleaveDurationUs);
- }
+ reinterpret_cast<MPEG4Writer *>(writer.get())->
+ setInterleaveDuration(mInterleaveDurationUs);
if (mMaxFileDurationUs != 0) {
- mWriter->setMaxFileDuration(mMaxFileDurationUs);
+ writer->setMaxFileDuration(mMaxFileDurationUs);
}
if (mMaxFileSizeBytes != 0) {
- mWriter->setMaxFileSize(mMaxFileSizeBytes);
+ writer->setMaxFileSize(mMaxFileSizeBytes);
}
- mWriter->setListener(mListener);
sp<MetaData> meta = new MetaData;
meta->setInt64(kKeyTime, systemTime() / 1000);
meta->setInt32(kKeyFileType, mOutputFormat);
@@ -897,8 +924,9 @@ status_t StagefrightRecorder::startMPEG4Recording() {
if (mTrackEveryTimeDurationUs > 0) {
meta->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
}
- mWriter->start(meta.get());
- return OK;
+ writer->setListener(mListener);
+ mWriter = writer;
+ return mWriter->start(meta.get());
}
status_t StagefrightRecorder::pause() {
@@ -914,7 +942,7 @@ status_t StagefrightRecorder::stop() {
LOGV("stop");
if (mWriter != NULL) {
mWriter->stop();
- mWriter = NULL;
+ mWriter.clear();
}
if (mCamera != 0) {
@@ -925,7 +953,7 @@ status_t StagefrightRecorder::stop() {
mCamera->stopPreview();
}
mCamera->unlock();
- mCamera = NULL;
+ mCamera.clear();
IPCThreadState::self()->restoreCallingIdentity(token);
mFlags = 0;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index f4488b641c3f..cb055718908d 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -97,6 +97,11 @@ private:
status_t startAMRRecording();
status_t startAACRecording();
sp<MediaSource> createAudioSource();
+ status_t setupCameraSource();
+ status_t setupAudioEncoder(const sp<MediaWriter>& writer);
+ status_t setupVideoEncoder(const sp<MediaWriter>& writer);
+
+ // Encoding parameter handling utilities
status_t setParameter(const String8 &key, const String8 &value);
status_t setParamAudioEncodingBitRate(int32_t bitRate);
status_t setParamAudioNumberOfChannels(int32_t channles);
@@ -108,7 +113,8 @@ private:
status_t setParamTrackFrameStatus(int32_t nFrames);
status_t setParamInterleaveDuration(int32_t durationUs);
status_t setParam64BitFileOffset(bool use64BitFileOffset);
- status_t setParamMaxDurationOrFileSize(int64_t limit, bool limit_is_duration);
+ status_t setParamMaxFileDurationUs(int64_t timeUs);
+ status_t setParamMaxFileSizeBytes(int64_t bytes);
void clipVideoBitRate();
void clipVideoFrameRate();
void clipVideoFrameWidth();
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a52c88850320..6a4a13118bc2 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -34,6 +34,8 @@
#include <media/mediarecorder.h>
#include <cutils/properties.h>
+#include "include/ESDS.h"
+
namespace android {
class MPEG4Writer::Track {
@@ -126,6 +128,8 @@ private:
int32_t *min, int32_t *avg, int32_t *max);
void findMinMaxChunkDurations(int64_t *min, int64_t *max);
+ void getCodecSpecificDataFromInputFormatIfPossible();
+
Track(const Track &);
Track &operator=(const Track &);
};
@@ -678,6 +682,38 @@ MPEG4Writer::Track::Track(
mCodecSpecificDataSize(0),
mGotAllCodecSpecificData(false),
mReachedEOS(false) {
+ getCodecSpecificDataFromInputFormatIfPossible();
+}
+
+void MPEG4Writer::Track::getCodecSpecificDataFromInputFormatIfPossible() {
+ const char *mime;
+ CHECK(mMeta->findCString(kKeyMIMEType, &mime));
+
+ if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (mMeta->findData(kKeyAVCC, &type, &data, &size)) {
+ mCodecSpecificData = malloc(size);
+ mCodecSpecificDataSize = size;
+ memcpy(mCodecSpecificData, data, size);
+ mGotAllCodecSpecificData = true;
+ }
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
+ || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (mMeta->findData(kKeyESDS, &type, &data, &size)) {
+ ESDS esds(data, size);
+ if (esds.getCodecSpecificInfo(&data, &size) == OK) {
+ mCodecSpecificData = malloc(size);
+ mCodecSpecificDataSize = size;
+ memcpy(mCodecSpecificData, data, size);
+ mGotAllCodecSpecificData = true;
+ }
+ }
+ }
}
MPEG4Writer::Track::~Track() {
@@ -721,7 +757,10 @@ status_t MPEG4Writer::Track::start(MetaData *params) {
}
int64_t startTimeUs;
- CHECK(params && params->findInt64(kKeyTime, &startTimeUs));
+ if (params == NULL || !params->findInt64(kKeyTime, &startTimeUs)) {
+ startTimeUs = 0;
+ }
+
initTrackingProgressStatus(params);
sp<MetaData> meta = new MetaData;
diff --git a/native/android/Android.mk b/native/android/Android.mk
index 8c621b601736..fe8ed0003a0a 100644
--- a/native/android/Android.mk
+++ b/native/android/Android.mk
@@ -7,7 +7,8 @@ include $(CLEAR_VARS)
#
LOCAL_SRC_FILES:= \
activity.cpp \
- input.cpp
+ input.cpp \
+ native_window.cpp
LOCAL_SHARED_LIBRARIES := \
libandroid_runtime \
diff --git a/native/android/native_window.cpp b/native/android/native_window.cpp
new file mode 100644
index 000000000000..448cbfccb666
--- /dev/null
+++ b/native/android/native_window.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Surface"
+#include <utils/Log.h>
+
+#include <android/native_window.h>
+#include <surfaceflinger/Surface.h>
+
+using android::Surface;
+
+static int32_t getWindowProp(ANativeWindow* window, int what) {
+ int value;
+ int res = window->query(window, what, &value);
+ return res < 0 ? res : value;
+}
+
+int32_t ANativeWindow_getWidth(ANativeWindow* window) {
+ return getWindowProp(window, NATIVE_WINDOW_WIDTH);
+}
+
+int32_t ANativeWindow_getHeight(ANativeWindow* window) {
+ return getWindowProp(window, NATIVE_WINDOW_HEIGHT);
+}
+
+int32_t ANativeWindow_getFormat(ANativeWindow* window) {
+ return getWindowProp(window, NATIVE_WINDOW_FORMAT);
+}
+
+int32_t ANativeWindow_setBuffersGeometry(ANativeWindow* window, int32_t width,
+ int32_t height, int32_t format) {
+ native_window_set_buffers_geometry(window, width, height, format);
+ return 0;
+}
diff --git a/native/include/android/native_activity.h b/native/include/android/native_activity.h
index c5c8f9defda3..bf5c641fc162 100644
--- a/native/include/android/native_activity.h
+++ b/native/include/android/native_activity.h
@@ -24,15 +24,12 @@
#include <jni.h>
#include <android/input.h>
+#include <android/native_window.h>
#ifdef __cplusplus
extern "C" {
#endif
-// Temporary until native surface API is defined.
-struct ASurfaceHolder;
-typedef struct ASurfaceHolder ASurfaceHolder;
-
struct ANativeActivityCallbacks;
/**
@@ -129,30 +126,20 @@ typedef struct ANativeActivityCallbacks {
void (*onWindowFocusChanged)(ANativeActivity* activity, int hasFocus);
/**
- * The drawing surface for this native activity has been created. You
- * can use the given surface object to start drawing. NOTE: surface
- * drawing API is not yet defined.
- */
- void (*onSurfaceCreated)(ANativeActivity* activity, ASurfaceHolder* surface);
-
- /**
- * The drawing surface for this native activity has changed. The surface
- * given here is guaranteed to be the same as the one last given to
- * onSurfaceCreated. This is simply to inform you about interesting
- * changed to that surface.
+ * The drawing window for this native activity has been created. You
+ * can use the given native window object to start drawing.
*/
- void (*onSurfaceChanged)(ANativeActivity* activity, ASurfaceHolder* surface,
- int format, int width, int height);
+ void (*onNativeWindowCreated)(ANativeActivity* activity, ANativeWindow* window);
/**
- * The drawing surface for this native activity is going to be destroyed.
- * You MUST ensure that you do not touch the surface object after returning
- * from this function: in the common case of drawing to the surface from
+ * The drawing window for this native activity is going to be destroyed.
+ * You MUST ensure that you do not touch the window object after returning
+ * from this function: in the common case of drawing to the window from
* another thread, that means the implementation of this callback must
* properly synchronize with the other thread to stop its drawing before
* returning from here.
*/
- void (*onSurfaceDestroyed)(ANativeActivity* activity, ASurfaceHolder* surface);
+ void (*onNativeWindowDestroyed)(ANativeActivity* activity, ANativeWindow* window);
/**
* The input queue for this native activity's window has been created.
diff --git a/native/include/android/native_window.h b/native/include/android/native_window.h
index e6d5fea0a2b1..678ba3d7c7e2 100644
--- a/native/include/android/native_window.h
+++ b/native/include/android/native_window.h
@@ -22,9 +22,51 @@
extern "C" {
#endif
+/*
+ * Pixel formats that a window can use.
+ */
+enum {
+ WINDOW_FORMAT_RGBA_8888 = 1,
+ WINDOW_FORMAT_RGBX_8888 = 2,
+ WINDOW_FORMAT_RGB_565 = 4,
+};
+
struct ANativeWindow;
typedef struct ANativeWindow ANativeWindow;
+/*
+ * Return the current width in pixels of the window surface. Returns a
+ * negative value on error.
+ */
+int32_t ANativeWindow_getWidth(ANativeWindow* window);
+
+/*
+ * Return the current height in pixels of the window surface. Returns a
+ * negative value on error.
+ */
+int32_t ANativeWindow_getHeight(ANativeWindow* window);
+
+/*
+ * Return the current pixel format of the window surface. Returns a
+ * negative value on error.
+ */
+int32_t ANativeWindow_getFormat(ANativeWindow* window);
+
+/*
+ * Change the format and size of the window buffers.
+ *
+ * The width and height control the number of pixels in the buffers, not the
+ * dimensions of the window on screen. If these are different than the
+ * window's physical size, then it buffer will be scaled to match that size
+ * when compositing it to the screen.
+ *
+ * The format may be one of the window format constants above.
+ *
+ * For all of these parameters, if 0 is supplied than the window's base
+ * value will come back in force.
+ */
+int32_t ANativeWindow_setBuffersGeometry(ANativeWindow* window, int32_t width,
+ int32_t height, int32_t format);
#ifdef __cplusplus
};
diff --git a/packages/SystemUI/src/com/android/systemui/statusbar/StatusBarService.java b/packages/SystemUI/src/com/android/systemui/statusbar/StatusBarService.java
index 4ddd45ca97e1..07bcce7056be 100644
--- a/packages/SystemUI/src/com/android/systemui/statusbar/StatusBarService.java
+++ b/packages/SystemUI/src/com/android/systemui/statusbar/StatusBarService.java
@@ -584,7 +584,7 @@ public class StatusBarService extends Service implements CommandQueue.Callbacks
}
if (expanded == null) {
String ident = notification.pkg + "/0x" + Integer.toHexString(notification.id);
- Slog.e(TAG, "couldn't inflate view for notification " + ident);
+ Slog.e(TAG, "couldn't inflate view for notification " + ident, exception);
return null;
} else {
content.addView(expanded);