diff options
Diffstat (limited to 'media')
135 files changed, 19739 insertions, 1994 deletions
diff --git a/media/java/android/media/AsyncPlayer.java b/media/java/android/media/AsyncPlayer.java index 35f0409d09a8..e1e09b96acc0 100644 --- a/media/java/android/media/AsyncPlayer.java +++ b/media/java/android/media/AsyncPlayer.java @@ -19,10 +19,12 @@ package android.media; import android.content.Context; import android.net.Uri; import android.os.PowerManager; +import android.os.SystemClock; import android.util.Log; import java.io.IOException; import java.lang.IllegalStateException; +import java.util.LinkedList; /** * Plays a series of audio URIs, but does all the hard work on another thread @@ -31,14 +33,15 @@ import java.lang.IllegalStateException; public class AsyncPlayer { private static final int PLAY = 1; private static final int STOP = 2; + private static final boolean mDebug = false; private static final class Command { - Command next; int code; Context context; Uri uri; boolean looping; int stream; + long requestTime; public String toString() { return "{ code=" + code + " looping=" + looping + " stream=" + stream @@ -46,6 +49,36 @@ public class AsyncPlayer { } } + private LinkedList<Command> mCmdQueue = new LinkedList(); + + private void startSound(Command cmd) { + // Preparing can be slow, so if there is something else + // is playing, let it continue until we're done, so there + // is less of a glitch. + try { + if (mDebug) Log.d(mTag, "Starting playback"); + MediaPlayer player = new MediaPlayer(); + player.setAudioStreamType(cmd.stream); + player.setDataSource(cmd.context, cmd.uri); + player.setLooping(cmd.looping); + player.prepare(); + player.start(); + if (mPlayer != null) { + mPlayer.release(); + } + mPlayer = player; + long delay = SystemClock.uptimeMillis() - cmd.requestTime; + if (delay > 1000) { + Log.w(mTag, "Notification sound delayed by " + delay + "msecs"); + } + } + catch (IOException e) { + Log.w(mTag, "error loading sound for " + cmd.uri, e); + } catch (IllegalStateException e) { + Log.w(mTag, "IllegalStateException (content provider died?) " + cmd.uri, e); + } + } + private final class Thread extends java.lang.Thread { Thread() { super("AsyncPlayer-" + mTag); @@ -55,41 +88,23 @@ public class AsyncPlayer { while (true) { Command cmd = null; - synchronized (mLock) { - if (mHead != null) { - cmd = mHead; - mHead = cmd.next; - if (mTail == cmd) { - mTail = null; - } - } + synchronized (mCmdQueue) { + if (mDebug) Log.d(mTag, "RemoveFirst"); + cmd = mCmdQueue.removeFirst(); } switch (cmd.code) { case PLAY: - try { - // Preparing can be slow, so if there is something else - // is playing, let it continue until we're done, so there - // is less of a glitch. - MediaPlayer player = new MediaPlayer(); - player.setAudioStreamType(cmd.stream); - player.setDataSource(cmd.context, cmd.uri); - player.setLooping(cmd.looping); - player.prepare(); - player.start(); - if (mPlayer != null) { - mPlayer.release(); - } - mPlayer = player; - } - catch (IOException e) { - Log.w(mTag, "error loading sound for " + cmd.uri, e); - } catch (IllegalStateException e) { - Log.w(mTag, "IllegalStateException (content provider died?) " + cmd.uri, e); - } + if (mDebug) Log.d(mTag, "PLAY"); + startSound(cmd); break; case STOP: + if (mDebug) Log.d(mTag, "STOP"); if (mPlayer != null) { + long delay = SystemClock.uptimeMillis() - cmd.requestTime; + if (delay > 1000) { + Log.w(mTag, "Notification stop delayed by " + delay + "msecs"); + } mPlayer.stop(); mPlayer.release(); mPlayer = null; @@ -99,8 +114,8 @@ public class AsyncPlayer { break; } - synchronized (mLock) { - if (mHead == null) { + synchronized (mCmdQueue) { + if (mCmdQueue.size() == 0) { // nothing left to do, quit // doing this check after we're done prevents the case where they // added it during the operation from spawning two threads and @@ -115,11 +130,8 @@ public class AsyncPlayer { } private String mTag; - private Command mHead; - private Command mTail; private Thread mThread; private MediaPlayer mPlayer; - private Object mLock = new Object(); private PowerManager.WakeLock mWakeLock; // The current state according to the caller. Reality lags behind @@ -154,12 +166,13 @@ public class AsyncPlayer { */ public void play(Context context, Uri uri, boolean looping, int stream) { Command cmd = new Command(); + cmd.requestTime = SystemClock.uptimeMillis(); cmd.code = PLAY; cmd.context = context; cmd.uri = uri; cmd.looping = looping; cmd.stream = stream; - synchronized (mLock) { + synchronized (mCmdQueue) { enqueueLocked(cmd); mState = PLAY; } @@ -170,11 +183,12 @@ public class AsyncPlayer { * at this point. Calling this multiple times has no ill effects. */ public void stop() { - synchronized (mLock) { + synchronized (mCmdQueue) { // This check allows stop to be called multiple times without starting // a thread that ends up doing nothing. if (mState != STOP) { Command cmd = new Command(); + cmd.requestTime = SystemClock.uptimeMillis(); cmd.code = STOP; enqueueLocked(cmd); mState = STOP; @@ -183,12 +197,7 @@ public class AsyncPlayer { } private void enqueueLocked(Command cmd) { - if (mTail == null) { - mHead = cmd; - } else { - mTail.next = cmd; - } - mTail = cmd; + mCmdQueue.add(cmd); if (mThread == null) { acquireWakeLock(); mThread = new Thread(); diff --git a/media/java/android/media/AudioFormat.java b/media/java/android/media/AudioFormat.java index 0732b615e7e5..b3aae728f222 100644 --- a/media/java/android/media/AudioFormat.java +++ b/media/java/android/media/AudioFormat.java @@ -37,15 +37,61 @@ public class AudioFormat { public static final int ENCODING_PCM_8BIT = 3; // accessed by native code /** Invalid audio channel configuration */ - public static final int CHANNEL_CONFIGURATION_INVALID = 0; + /** @deprecated use CHANNEL_INVALID instead */ + @Deprecated public static final int CHANNEL_CONFIGURATION_INVALID = 0; /** Default audio channel configuration */ - public static final int CHANNEL_CONFIGURATION_DEFAULT = 1; + /** @deprecated use CHANNEL_OUT_DEFAULT or CHANNEL_IN_DEFAULT instead */ + @Deprecated public static final int CHANNEL_CONFIGURATION_DEFAULT = 1; /** Mono audio configuration */ - public static final int CHANNEL_CONFIGURATION_MONO = 2; + /** @deprecated use CHANNEL_OUT_MONO or CHANNEL_IN_MONO instead */ + @Deprecated public static final int CHANNEL_CONFIGURATION_MONO = 2; /** Stereo (2 channel) audio configuration */ - public static final int CHANNEL_CONFIGURATION_STEREO = 3; + /** @deprecated use CHANNEL_OUT_STEREO or CHANNEL_IN_STEREO instead */ + @Deprecated public static final int CHANNEL_CONFIGURATION_STEREO = 3; -} + /** Invalid audio channel mask */ + public static final int CHANNEL_INVALID = 0; + /** Default audio channel mask */ + public static final int CHANNEL_OUT_DEFAULT = 1; + // Channel mask definitions must be kept in sync with native values in include/media/AudioSystem.h + public static final int CHANNEL_OUT_FRONT_LEFT = 0x4; + public static final int CHANNEL_OUT_FRONT_RIGHT = 0x8; + public static final int CHANNEL_OUT_FRONT_CENTER = 0x10; + public static final int CHANNEL_OUT_LOW_FREQUENCY = 0x20; + public static final int CHANNEL_OUT_BACK_LEFT = 0x40; + public static final int CHANNEL_OUT_BACK_RIGHT = 0x80; + public static final int CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 0x100; + public static final int CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x200; + public static final int CHANNEL_OUT_BACK_CENTER = 0x400; + public static final int CHANNEL_OUT_MONO = CHANNEL_OUT_FRONT_LEFT; + public static final int CHANNEL_OUT_STEREO = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT); + public static final int CHANNEL_OUT_QUAD = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | + CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT); + public static final int CHANNEL_OUT_SURROUND = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | + CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_BACK_CENTER); + public static final int CHANNEL_OUT_5POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | + CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT); + public static final int CHANNEL_OUT_7POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT | + CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT | + CHANNEL_OUT_FRONT_LEFT_OF_CENTER | CHANNEL_OUT_FRONT_RIGHT_OF_CENTER); + public static final int CHANNEL_IN_DEFAULT = 1; + public static final int CHANNEL_IN_LEFT = 0x4; + public static final int CHANNEL_IN_RIGHT = 0x8; + public static final int CHANNEL_IN_FRONT = 0x10; + public static final int CHANNEL_IN_BACK = 0x20; + public static final int CHANNEL_IN_LEFT_PROCESSED = 0x40; + public static final int CHANNEL_IN_RIGHT_PROCESSED = 0x80; + public static final int CHANNEL_IN_FRONT_PROCESSED = 0x100; + public static final int CHANNEL_IN_BACK_PROCESSED = 0x200; + public static final int CHANNEL_IN_PRESSURE = 0x400; + public static final int CHANNEL_IN_X_AXIS = 0x800; + public static final int CHANNEL_IN_Y_AXIS = 0x1000; + public static final int CHANNEL_IN_Z_AXIS = 0x2000; + public static final int CHANNEL_IN_VOICE_UPLINK = 0x4000; + public static final int CHANNEL_IN_VOICE_DNLINK = 0x8000; + public static final int CHANNEL_IN_MONO = CHANNEL_IN_FRONT; + public static final int CHANNEL_IN_STEREO = (CHANNEL_IN_LEFT | CHANNEL_IN_RIGHT); +} diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java index a65a4172fb9c..bb16215a51de 100644 --- a/media/java/android/media/AudioManager.java +++ b/media/java/android/media/AudioManager.java @@ -140,33 +140,31 @@ public class AudioManager { public static final int STREAM_NOTIFICATION = AudioSystem.STREAM_NOTIFICATION; /** @hide The audio stream for phone calls when connected to bluetooth */ public static final int STREAM_BLUETOOTH_SCO = AudioSystem.STREAM_BLUETOOTH_SCO; + /** @hide The audio stream for enforced system sounds in certain countries (e.g camera in Japan) */ + public static final int STREAM_SYSTEM_ENFORCED = AudioSystem.STREAM_SYSTEM_ENFORCED; + /** The audio stream for DTMF Tones */ + public static final int STREAM_DTMF = AudioSystem.STREAM_DTMF; + /** @hide The audio stream for text to speech (TTS) */ + public static final int STREAM_TTS = AudioSystem.STREAM_TTS; /** Number of audio streams */ /** * @deprecated Use AudioSystem.getNumStreamTypes() instead */ - public static final int NUM_STREAMS = AudioSystem.NUM_STREAMS; + @Deprecated public static final int NUM_STREAMS = AudioSystem.NUM_STREAMS; - /** @hide Maximum volume index values for audio streams */ - public static final int[] MAX_STREAM_VOLUME = new int[] { - 6, // STREAM_VOICE_CALL - 8, // STREAM_SYSTEM - 8, // STREAM_RING - 16, // STREAM_MUSIC - 8, // STREAM_ALARM - 8, // STREAM_NOTIFICATION - 16, // STREAM_BLUETOOTH_SCO - }; - /** @hide Default volume index values for audio streams */ public static final int[] DEFAULT_STREAM_VOLUME = new int[] { 4, // STREAM_VOICE_CALL - 5, // STREAM_SYSTEM + 7, // STREAM_SYSTEM 5, // STREAM_RING 11, // STREAM_MUSIC 6, // STREAM_ALARM 5, // STREAM_NOTIFICATION - 7 // STREAM_BLUETOOTH_SCO + 7, // STREAM_BLUETOOTH_SCO + 7, // STREAM_SYSTEM_ENFORCED + 11, // STREAM_DTMF + 11 // STREAM_TTS }; /** @@ -347,6 +345,9 @@ public class AudioManager { /** * Adjusts the volume of a particular stream by one step in a direction. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param streamType The stream type to adjust. One of {@link #STREAM_VOICE_CALL}, * {@link #STREAM_SYSTEM}, {@link #STREAM_RING}, {@link #STREAM_MUSIC} or @@ -372,6 +373,9 @@ public class AudioManager { * active, it will have the highest priority regardless of if the in-call * screen is showing. Another example, if music is playing in the background * and a call is not active, the music stream will be adjusted. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param direction The direction to adjust the volume. One of * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE}, or @@ -393,6 +397,9 @@ public class AudioManager { /** * Adjusts the volume of the most relevant stream, or the given fallback * stream. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param direction The direction to adjust the volume. One of * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE}, or @@ -543,6 +550,9 @@ public class AudioManager { * <p> * For a better user experience, applications MUST unmute a muted stream * in onPause() and mute is again in onResume() if appropriate. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param streamType The stream to be muted/unmuted. * @param state The required mute state: true for mute ON, false for mute OFF @@ -610,6 +620,9 @@ public class AudioManager { /** * Sets the setting for when the vibrate type should vibrate. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param vibrateType The type of vibrate. One of * {@link #VIBRATE_TYPE_NOTIFICATION} or @@ -632,14 +645,20 @@ public class AudioManager { /** * Sets the speakerphone on or off. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param on set <var>true</var> to turn on speakerphone; * <var>false</var> to turn it off */ public void setSpeakerphoneOn(boolean on){ - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager - setRoutingP(MODE_INVALID, on ? ROUTE_SPEAKER: 0, ROUTE_SPEAKER); + IAudioService service = getService(); + try { + service.setSpeakerphoneOn(on); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in setSpeakerphoneOn", e); + } } /** @@ -648,41 +667,55 @@ public class AudioManager { * @return true if speakerphone is on, false if it's off */ public boolean isSpeakerphoneOn() { - return (getRoutingP(MODE_IN_CALL) & ROUTE_SPEAKER) == 0 ? false : true; + IAudioService service = getService(); + try { + return service.isSpeakerphoneOn(); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in isSpeakerphoneOn", e); + return false; + } } /** - * Sets audio routing to the Bluetooth headset on or off. + * Request use of Bluetooth SCO headset for communications. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * - * @param on set <var>true</var> to route SCO (voice) audio to/from Bluetooth - * headset; <var>false</var> to route audio to/from phone earpiece + * @param on set <var>true</var> to use bluetooth SCO for communications; + * <var>false</var> to not use bluetooth SCO for communications */ public void setBluetoothScoOn(boolean on){ - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager - setRoutingP(MODE_INVALID, on ? ROUTE_BLUETOOTH_SCO: 0, ROUTE_BLUETOOTH_SCO); + IAudioService service = getService(); + try { + service.setBluetoothScoOn(on); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in setBluetoothScoOn", e); + } } /** - * Checks whether audio routing to the Bluetooth headset is on or off. + * Checks whether communications use Bluetooth SCO. * - * @return true if SCO audio is being routed to/from Bluetooth headset; + * @return true if SCO is used for communications; * false if otherwise */ public boolean isBluetoothScoOn() { - return (getRoutingP(MODE_IN_CALL) & ROUTE_BLUETOOTH_SCO) == 0 ? false : true; + IAudioService service = getService(); + try { + return service.isBluetoothScoOn(); + } catch (RemoteException e) { + Log.e(TAG, "Dead object in isBluetoothScoOn", e); + return false; + } } /** - * Sets A2DP audio routing to the Bluetooth headset on or off. - * * @param on set <var>true</var> to route A2DP audio to/from Bluetooth * headset; <var>false</var> disable A2DP audio + * @deprecated Do not use. */ - public void setBluetoothA2dpOn(boolean on){ - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager - setRoutingP(MODE_INVALID, on ? ROUTE_BLUETOOTH_A2DP: 0, ROUTE_BLUETOOTH_A2DP); + @Deprecated public void setBluetoothA2dpOn(boolean on){ } /** @@ -692,7 +725,12 @@ public class AudioManager { * false if otherwise */ public boolean isBluetoothA2dpOn() { - return (getRoutingP(MODE_NORMAL) & ROUTE_BLUETOOTH_A2DP) == 0 ? false : true; + if (AudioSystem.getDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP,"") + == AudioSystem.DEVICE_STATE_UNAVAILABLE) { + return false; + } else { + return true; + } } /** @@ -700,12 +738,9 @@ public class AudioManager { * * @param on set <var>true</var> to route audio to/from wired * headset; <var>false</var> disable wired headset audio - * @hide + * @deprecated Do not use. */ - public void setWiredHeadsetOn(boolean on){ - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // MODE_INVALID indicates to AudioService that setRouting() was initiated by AudioManager - setRoutingP(MODE_INVALID, on ? ROUTE_HEADSET: 0, ROUTE_HEADSET); + @Deprecated public void setWiredHeadsetOn(boolean on){ } /** @@ -713,25 +748,27 @@ public class AudioManager { * * @return true if audio is being routed to/from wired headset; * false if otherwise - * @hide */ public boolean isWiredHeadsetOn() { - return (getRoutingP(MODE_NORMAL) & ROUTE_HEADSET) == 0 ? false : true; + if (AudioSystem.getDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADSET,"") + == AudioSystem.DEVICE_STATE_UNAVAILABLE) { + return false; + } else { + return true; + } } /** * Sets the microphone mute on or off. + * <p> + * This method should only be used by applications that replace the platform-wide + * management of audio settings or the main telephony application. * * @param on set <var>true</var> to mute the microphone; * <var>false</var> to turn mute off */ public void setMicrophoneMute(boolean on){ - IAudioService service = getService(); - try { - service.setMicrophoneMute(on); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in setMicrophoneMute", e); - } + AudioSystem.muteMicrophone(on); } /** @@ -740,17 +777,18 @@ public class AudioManager { * @return true if microphone is muted, false if it's not */ public boolean isMicrophoneMute() { - IAudioService service = getService(); - try { - return service.isMicrophoneMute(); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in isMicrophoneMute", e); - return false; - } + return AudioSystem.isMicrophoneMuted(); } /** * Sets the audio mode. + * <p> + * The audio mode encompasses audio routing AND the behavior of + * the telephony layer. Therefore this method should only be used by applications that + * replace the platform-wide management of audio settings or the main telephony application. + * In particular, the {@link #MODE_IN_CALL} mode should only be used by the telephony + * application when it places a phone call, as it will cause signals from the radio layer + * to feed the platform mixer. * * @param mode the requested audio mode (NORMAL, RINGTONE, or IN_CALL). * Informs the HAL about the current audio state so that @@ -809,32 +847,46 @@ public class AudioManager { /* Routing bits for setRouting/getRouting API */ /** * Routing audio output to earpiece + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_EARPIECE = AudioSystem.ROUTE_EARPIECE; + @Deprecated public static final int ROUTE_EARPIECE = AudioSystem.ROUTE_EARPIECE; /** - * Routing audio output to spaker + * Routing audio output to speaker + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_SPEAKER = AudioSystem.ROUTE_SPEAKER; + @Deprecated public static final int ROUTE_SPEAKER = AudioSystem.ROUTE_SPEAKER; /** * @deprecated use {@link #ROUTE_BLUETOOTH_SCO} + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ @Deprecated public static final int ROUTE_BLUETOOTH = AudioSystem.ROUTE_BLUETOOTH_SCO; /** * Routing audio output to bluetooth SCO + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_BLUETOOTH_SCO = AudioSystem.ROUTE_BLUETOOTH_SCO; + @Deprecated public static final int ROUTE_BLUETOOTH_SCO = AudioSystem.ROUTE_BLUETOOTH_SCO; /** * Routing audio output to headset + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_HEADSET = AudioSystem.ROUTE_HEADSET; + @Deprecated public static final int ROUTE_HEADSET = AudioSystem.ROUTE_HEADSET; /** * Routing audio output to bluetooth A2DP + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_BLUETOOTH_A2DP = AudioSystem.ROUTE_BLUETOOTH_A2DP; + @Deprecated public static final int ROUTE_BLUETOOTH_A2DP = AudioSystem.ROUTE_BLUETOOTH_A2DP; /** * Used for mask parameter of {@link #setRouting(int,int,int)}. + * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), + * setBluetoothScoOn() methods instead. */ - public static final int ROUTE_ALL = AudioSystem.ROUTE_ALL; + @Deprecated public static final int ROUTE_ALL = AudioSystem.ROUTE_ALL; /** * Sets the audio routing for a specified mode @@ -846,16 +898,10 @@ public class AudioManager { * ROUTE_xxx types. Unset bits indicate the route should be left unchanged * * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(), - * setBluetoothScoOn(), setBluetoothA2dpOn() and setWiredHeadsetOn() methods instead. + * setBluetoothScoOn() methods instead. */ - + @Deprecated public void setRouting(int mode, int routes, int mask) { - IAudioService service = getService(); - try { - service.setRouting(mode, routes, mask); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in setRouting", e); - } } /** @@ -869,13 +915,7 @@ public class AudioManager { */ @Deprecated public int getRouting(int mode) { - IAudioService service = getService(); - try { - return service.getRouting(mode); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in getRouting", e); - return -1; - } + return -1; } /** @@ -884,13 +924,7 @@ public class AudioManager { * @return true if any music tracks are active. */ public boolean isMusicActive() { - IAudioService service = getService(); - try { - return service.isMusicActive(); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in isMusicActive", e); - return false; - } + return AudioSystem.isMusicActive(); } /* @@ -906,14 +940,32 @@ public class AudioManager { */ /** * @hide + * @deprecated Use {@link #setPrameters(String)} instead */ - public void setParameter(String key, String value) { - IAudioService service = getService(); - try { - service.setParameter(key, value); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in setParameter", e); - } + @Deprecated public void setParameter(String key, String value) { + setParameters(key+"="+value); + } + + /** + * Sets a variable number of parameter values to audio hardware. + * + * @param keyValuePairs list of parameters key value pairs in the form: + * key1=value1;key2=value2;... + * + */ + public void setParameters(String keyValuePairs) { + AudioSystem.setParameters(keyValuePairs); + } + + /** + * Sets a varaible number of parameter values to audio hardware. + * + * @param keys list of parameters + * @return list of parameters key value pairs in the form: + * key1=value1;key2=value2;... + */ + public String getParameters(String keys) { + return AudioSystem.getParameters(keys); } /* Sound effect identifiers */ @@ -1011,7 +1063,9 @@ public class AudioManager { * {@link #FX_KEYPRESS_SPACEBAR}, * {@link #FX_KEYPRESS_DELETE}, * {@link #FX_KEYPRESS_RETURN}, - * @param volume Sound effect volume + * @param volume Sound effect volume. + * The volume value is a raw scalar so UI controls should be scaled logarithmically. + * If a volume of -1 is specified, the AudioManager.STREAM_MUSIC stream volume minus 3dB will be used. * NOTE: This version is for applications that have their own * settings panel for enabling and controlling volume. */ @@ -1082,31 +1136,4 @@ public class AudioManager { * {@hide} */ private IBinder mICallBack = new Binder(); - - /** - * {@hide} - */ - private void setRoutingP(int mode, int routes, int mask) { - IAudioService service = getService(); - try { - service.setRouting(mode, routes, mask); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in setRouting", e); - } - } - - - /** - * {@hide} - */ - private int getRoutingP(int mode) { - IAudioService service = getService(); - try { - return service.getRouting(mode); - } catch (RemoteException e) { - Log.e(TAG, "Dead object in getRouting", e); - return -1; - } - } - } diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java index 4d1535f9342b..7a47157f9bc8 100644 --- a/media/java/android/media/AudioRecord.java +++ b/media/java/android/media/AudioRecord.java @@ -86,7 +86,7 @@ public class AudioRecord public static final int ERROR_INVALID_OPERATION = -3; private static final int AUDIORECORD_ERROR_SETUP_ZEROFRAMECOUNT = -16; - private static final int AUDIORECORD_ERROR_SETUP_INVALIDCHANNELCOUNT = -17; + private static final int AUDIORECORD_ERROR_SETUP_INVALIDCHANNELMASK = -17; private static final int AUDIORECORD_ERROR_SETUP_INVALIDFORMAT = -18; private static final int AUDIORECORD_ERROR_SETUP_INVALIDSOURCE = -19; private static final int AUDIORECORD_ERROR_SETUP_NATIVEINITFAILED = -20; @@ -133,9 +133,13 @@ public class AudioRecord */ private int mChannelCount = 1; /** + * The audio channel mask + */ + private int mChannels = AudioFormat.CHANNEL_IN_MONO; + /** * The current audio channel configuration */ - private int mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; + private int mChannelConfiguration = AudioFormat.CHANNEL_IN_MONO; /** * The encoding of the audio samples. * @see AudioFormat#ENCODING_PCM_8BIT @@ -193,8 +197,8 @@ public class AudioRecord * @param sampleRateInHz the sample rate expressed in Hertz. Examples of rates are (but * not limited to) 44100, 22050 and 11025. * @param channelConfig describes the configuration of the audio channels. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and - * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO} + * See {@link AudioFormat#CHANNEL_IN_MONO} and + * {@link AudioFormat#CHANNEL_IN_STEREO} * @param audioFormat the format in which the audio data is represented. * See {@link AudioFormat#ENCODING_PCM_16BIT} and * {@link AudioFormat#ENCODING_PCM_8BIT} @@ -224,7 +228,7 @@ public class AudioRecord //TODO: update native initialization when information about hardware init failure // due to capture device already open is available. int initResult = native_setup( new WeakReference<AudioRecord>(this), - mRecordSource, mSampleRate, mChannelCount, mAudioFormat, mNativeBufferSizeInBytes); + mRecordSource, mSampleRate, mChannels, mAudioFormat, mNativeBufferSizeInBytes); if (initResult != SUCCESS) { loge("Error code "+initResult+" when initializing native AudioRecord object."); return; // with mState == STATE_UNINITIALIZED @@ -239,6 +243,7 @@ public class AudioRecord // postconditions: // mRecordSource is valid // mChannelCount is valid + // mChannels is valid // mAudioFormat is valid // mSampleRate is valid private void audioParamCheck(int audioSource, int sampleRateInHz, @@ -264,20 +269,25 @@ public class AudioRecord //-------------- // channel config + mChannelConfiguration = channelConfig; + switch (channelConfig) { - case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT: + case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT + case AudioFormat.CHANNEL_IN_MONO: case AudioFormat.CHANNEL_CONFIGURATION_MONO: mChannelCount = 1; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; + mChannels = AudioFormat.CHANNEL_IN_MONO; break; + case AudioFormat.CHANNEL_IN_STEREO: case AudioFormat.CHANNEL_CONFIGURATION_STEREO: mChannelCount = 2; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + mChannels = AudioFormat.CHANNEL_IN_STEREO; break; default: mChannelCount = 0; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_INVALID; - throw (new IllegalArgumentException("Unsupported channel configuration.")); + mChannels = AudioFormat.CHANNEL_INVALID; + mChannelConfiguration = AudioFormat.CHANNEL_INVALID; + throw (new IllegalArgumentException("Unsupported channel configuration.")); } //-------------- @@ -368,8 +378,8 @@ public class AudioRecord /** * Returns the configured channel configuration. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} - * and {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}. + * See {@link AudioFormat#CHANNEL_IN_MONO} + * and {@link AudioFormat#CHANNEL_IN_STEREO}. */ public int getChannelConfiguration() { return mChannelConfiguration; @@ -425,8 +435,8 @@ public class AudioRecord * will be polled for new data. * @param sampleRateInHz the sample rate expressed in Hertz. * @param channelConfig describes the configuration of the audio channels. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and - * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO} + * See {@link AudioFormat#CHANNEL_IN_MONO} and + * {@link AudioFormat#CHANNEL_IN_STEREO} * @param audioFormat the format in which the audio data is represented. * See {@link AudioFormat#ENCODING_PCM_16BIT}. * @return {@link #ERROR_BAD_VALUE} if the recording parameters are not supported by the @@ -438,14 +448,16 @@ public class AudioRecord static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { int channelCount = 0; switch(channelConfig) { - case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT: + case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT + case AudioFormat.CHANNEL_IN_MONO: case AudioFormat.CHANNEL_CONFIGURATION_MONO: channelCount = 1; break; + case AudioFormat.CHANNEL_IN_STEREO: case AudioFormat.CHANNEL_CONFIGURATION_STEREO: channelCount = 2; break; - case AudioFormat.CHANNEL_CONFIGURATION_INVALID: + case AudioFormat.CHANNEL_INVALID: default: loge("getMinBufferSize(): Invalid channel configuration."); return AudioRecord.ERROR_BAD_VALUE; diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java index 58c04f3a1f74..58a0bba163f4 100644 --- a/media/java/android/media/AudioService.java +++ b/media/java/android/media/AudioService.java @@ -17,9 +17,16 @@ package android.media; import android.app.ActivityManagerNative; +import android.content.BroadcastReceiver; import android.content.ContentResolver; import android.content.Context; import android.content.Intent; +import android.content.IntentFilter; +import android.bluetooth.BluetoothA2dp; +import android.bluetooth.BluetoothClass; +import android.bluetooth.BluetoothDevice; +import android.bluetooth.BluetoothHeadset; + import android.content.pm.PackageManager; import android.database.ContentObserver; import android.media.MediaPlayer.OnCompletionListener; @@ -36,11 +43,16 @@ import android.provider.Settings; import android.provider.Settings.System; import android.util.Log; import android.view.VolumePanel; +import android.os.SystemProperties; import com.android.internal.telephony.ITelephony; import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; /** @@ -94,16 +106,10 @@ public class AudioService extends IAudioService.Stub { /** @see VolumeStreamState */ private VolumeStreamState[] mStreamStates; private SettingsObserver mSettingsObserver; - - private boolean mMicMute; + private int mMode; - private int[] mRoutes = new int[AudioSystem.NUM_MODES]; private Object mSettingsLock = new Object(); private boolean mMediaServerOk; - private boolean mSpeakerIsOn; - private boolean mBluetoothScoIsConnected; - private boolean mHeadsetIsConnected; - private boolean mBluetoothA2dpIsConnected; private SoundPool mSoundPool; private Object mSoundEffectsLock = new Object(); @@ -135,6 +141,36 @@ public class AudioService extends IAudioService.Stub { {4, -1} // FX_FOCUS_RETURN }; + /** @hide Maximum volume index values for audio streams */ + private int[] MAX_STREAM_VOLUME = new int[] { + 5, // STREAM_VOICE_CALL + 7, // STREAM_SYSTEM + 7, // STREAM_RING + 15, // STREAM_MUSIC + 7, // STREAM_ALARM + 7, // STREAM_NOTIFICATION + 15, // STREAM_BLUETOOTH_SCO + 7, // STREAM_SYSTEM_ENFORCED + 15, // STREAM_DTMF + 15 // STREAM_TTS + }; + /* STREAM_VOLUME_ALIAS[] indicates for each stream if it uses the volume settings + * of another stream: This avoids multiplying the volume settings for hidden + * stream types that follow other stream behavior for volume settings + * NOTE: do not create loops in aliases! */ + private int[] STREAM_VOLUME_ALIAS = new int[] { + AudioSystem.STREAM_VOICE_CALL, // STREAM_VOICE_CALL + AudioSystem.STREAM_SYSTEM, // STREAM_SYSTEM + AudioSystem.STREAM_RING, // STREAM_RING + AudioSystem.STREAM_MUSIC, // STREAM_MUSIC + AudioSystem.STREAM_ALARM, // STREAM_ALARM + AudioSystem.STREAM_NOTIFICATION, // STREAM_NOTIFICATION + AudioSystem.STREAM_VOICE_CALL, // STREAM_BLUETOOTH_SCO + AudioSystem.STREAM_SYSTEM, // STREAM_SYSTEM_ENFORCED + AudioSystem.STREAM_VOICE_CALL, // STREAM_DTMF + AudioSystem.STREAM_MUSIC // STREAM_TTS + }; + private AudioSystem.ErrorCallback mAudioSystemCallback = new AudioSystem.ErrorCallback() { public void onError(int error) { switch (error) { @@ -142,12 +178,14 @@ public class AudioService extends IAudioService.Stub { if (mMediaServerOk) { sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED, SHARED_MSG, SENDMSG_NOOP, 0, 0, null, 1500); + mMediaServerOk = false; } break; case AudioSystem.AUDIO_STATUS_OK: if (!mMediaServerOk) { sendMsg(mAudioHandler, MSG_MEDIA_SERVER_STARTED, SHARED_MSG, SENDMSG_NOOP, 0, 0, null, 0); + mMediaServerOk = true; } break; default: @@ -178,6 +216,27 @@ public class AudioService extends IAudioService.Stub { */ private int mVibrateSetting; + /** @see System#NOTIFICATIONS_USE_RING_VOLUME */ + private int mNotificationsUseRingVolume; + + // Broadcast receiver for device connections intent broadcasts + private final BroadcastReceiver mReceiver = new AudioServiceBroadcastReceiver(); + + //TODO: use common definitions with HeadsetObserver + private static final int BIT_HEADSET = (1 << 0); + private static final int BIT_HEADSET_NO_MIC = (1 << 1); + private static final int BIT_TTY = (1 << 2); + private static final int BIT_FM_HEADSET = (1 << 3); + private static final int BIT_FM_SPEAKER = (1 << 4); + + private int mHeadsetState; + + // Devices currently connected + private HashMap <Integer, String> mConnectedDevices = new HashMap <Integer, String>(); + + // Forced device usage for communications + private int mForcedUseForComm; + /////////////////////////////////////////////////////////////////////////// // Construction /////////////////////////////////////////////////////////////////////////// @@ -186,20 +245,31 @@ public class AudioService extends IAudioService.Stub { public AudioService(Context context) { mContext = context; mContentResolver = context.getContentResolver(); + + // Intialized volume + MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL] = SystemProperties.getInt( + "ro.config.vc_call_vol_steps", + MAX_STREAM_VOLUME[AudioSystem.STREAM_VOICE_CALL]); + mVolumePanel = new VolumePanel(context, this); mSettingsObserver = new SettingsObserver(); - + mMode = AudioSystem.MODE_NORMAL; + mHeadsetState = 0; + mForcedUseForComm = AudioSystem.FORCE_NONE; createAudioSystemThread(); - createStreamStates(); readPersistedSettings(); - readAudioSettings(); + createStreamStates(); mMediaServerOk = true; AudioSystem.setErrorCallback(mAudioSystemCallback); loadSoundEffects(); - mSpeakerIsOn = false; - mBluetoothScoIsConnected = false; - mHeadsetIsConnected = false; - mBluetoothA2dpIsConnected = false; + + // Register for device connection intent broadcasts. + IntentFilter intentFilter = + new IntentFilter(Intent.ACTION_HEADSET_PLUG); + intentFilter.addAction(BluetoothA2dp.ACTION_SINK_STATE_CHANGED); + intentFilter.addAction(BluetoothHeadset.ACTION_STATE_CHANGED); + context.registerReceiver(mReceiver, intentFilter); + } private void createAudioSystemThread() { @@ -223,63 +293,23 @@ public class AudioService extends IAudioService.Stub { } private void createStreamStates() { - final int[] volumeLevelsPhone = - createVolumeLevels(0, AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_VOICE_CALL]); - final int[] volumeLevelsCoarse = - createVolumeLevels(0, AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_SYSTEM]); - final int[] volumeLevelsFine = - createVolumeLevels(0, AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_MUSIC]); - final int[] volumeLevelsBtPhone = - createVolumeLevels(0, - AudioManager.MAX_STREAM_VOLUME[AudioManager.STREAM_BLUETOOTH_SCO]); - int numStreamTypes = AudioSystem.getNumStreamTypes(); VolumeStreamState[] streams = mStreamStates = new VolumeStreamState[numStreamTypes]; for (int i = 0; i < numStreamTypes; i++) { - final int[] levels; - - switch (i) { - - case AudioSystem.STREAM_MUSIC: - levels = volumeLevelsFine; - break; - - case AudioSystem.STREAM_VOICE_CALL: - levels = volumeLevelsPhone; - break; - - case AudioSystem.STREAM_BLUETOOTH_SCO: - levels = volumeLevelsBtPhone; - break; - - default: - levels = volumeLevelsCoarse; - break; - } - - if (i == AudioSystem.STREAM_BLUETOOTH_SCO) { - streams[i] = new VolumeStreamState(AudioManager.DEFAULT_STREAM_VOLUME[i], i,levels); - } else { - streams[i] = new VolumeStreamState(System.VOLUME_SETTINGS[i], i, levels); - } - } - } - - private static int[] createVolumeLevels(int offset, int numlevels) { - double curve = 1.0f; // 1.4f - int [] volumes = new int[numlevels + offset]; - for (int i = 0; i < offset; i++) { - volumes[i] = 0; + streams[i] = new VolumeStreamState(System.VOLUME_SETTINGS[STREAM_VOLUME_ALIAS[i]], i); } - double val = 0; - double max = Math.pow(numlevels - 1, curve); - for (int i = 0; i < numlevels; i++) { - val = Math.pow(i, curve) / max; - volumes[offset + i] = (int) (val * 100.0f); + // Correct stream index values for streams with aliases + for (int i = 0; i < numStreamTypes; i++) { + if (STREAM_VOLUME_ALIAS[i] != i) { + int index = rescaleIndex(streams[i].mIndex, STREAM_VOLUME_ALIAS[i], i); + streams[i].mIndex = streams[i].getValidIndex(index); + setStreamVolumeIndex(i, index); + index = rescaleIndex(streams[i].mLastAudibleIndex, STREAM_VOLUME_ALIAS[i], i); + streams[i].mLastAudibleIndex = streams[i].getValidIndex(index); + } } - return volumes; } private void readPersistedSettings() { @@ -291,12 +321,19 @@ public class AudioService extends IAudioService.Stub { mRingerModeAffectedStreams = Settings.System.getInt(cr, Settings.System.MODE_RINGER_STREAMS_AFFECTED, - ((1 << AudioManager.STREAM_RING)|(1 << AudioManager.STREAM_NOTIFICATION)|(1 << AudioManager.STREAM_SYSTEM))); + ((1 << AudioSystem.STREAM_RING)|(1 << AudioSystem.STREAM_NOTIFICATION)| + (1 << AudioSystem.STREAM_SYSTEM)|(1 << AudioSystem.STREAM_SYSTEM_ENFORCED))); mMuteAffectedStreams = System.getInt(cr, System.MUTE_STREAMS_AFFECTED, ((1 << AudioSystem.STREAM_MUSIC)|(1 << AudioSystem.STREAM_RING)|(1 << AudioSystem.STREAM_SYSTEM))); + mNotificationsUseRingVolume = System.getInt(cr, + Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1); + + if (mNotificationsUseRingVolume == 1) { + STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_RING; + } // Each stream will read its own persisted settings // Broadcast the sticky intent @@ -307,25 +344,13 @@ public class AudioService extends IAudioService.Stub { broadcastVibrateSetting(AudioManager.VIBRATE_TYPE_NOTIFICATION); } - private void readAudioSettings() { - synchronized (mSettingsLock) { - mMicMute = AudioSystem.isMicrophoneMuted(); - mMode = AudioSystem.getMode(); - for (int mode = 0; mode < AudioSystem.NUM_MODES; mode++) { - mRoutes[mode] = AudioSystem.getRouting(mode); - } - } + private void setStreamVolumeIndex(int stream, int index) { + AudioSystem.setStreamVolumeIndex(stream, (index + 5)/10); } - private void applyAudioSettings() { - synchronized (mSettingsLock) { - AudioSystem.muteMicrophone(mMicMute); - AudioSystem.setMode(mMode); - for (int mode = 0; mode < AudioSystem.NUM_MODES; mode++) { - AudioSystem.setRouting(mode, mRoutes[mode], AudioSystem.ROUTE_ALL); - } - } - } + private int rescaleIndex(int index, int srcStream, int dstStream) { + return (index * mStreamStates[dstStream].getMaxIndex() + mStreamStates[srcStream].getMaxIndex() / 2) / mStreamStates[srcStream].getMaxIndex(); + } /////////////////////////////////////////////////////////////////////////// // IPC methods @@ -354,44 +379,26 @@ public class AudioService extends IAudioService.Stub { ensureValidDirection(direction); ensureValidStreamType(streamType); - boolean notificationsUseRingVolume = Settings.System.getInt(mContentResolver, - Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1) == 1; - if (notificationsUseRingVolume && streamType == AudioManager.STREAM_NOTIFICATION) { - // Redirect the volume change to the ring stream - streamType = AudioManager.STREAM_RING; - } - VolumeStreamState streamState = mStreamStates[streamType]; + VolumeStreamState streamState = mStreamStates[STREAM_VOLUME_ALIAS[streamType]]; final int oldIndex = streamState.mIndex; boolean adjustVolume = true; // If either the client forces allowing ringer modes for this adjustment, // or the stream type is one that is affected by ringer modes if ((flags & AudioManager.FLAG_ALLOW_RINGER_MODES) != 0 - || streamType == AudioManager.STREAM_RING) { + || streamType == AudioSystem.STREAM_RING) { // Check if the ringer mode changes with this volume adjustment. If // it does, it will handle adjusting the volume, so we won't below adjustVolume = checkForRingerModeChange(oldIndex, direction); } if (adjustVolume && streamState.adjustIndex(direction)) { - - boolean alsoUpdateNotificationVolume = notificationsUseRingVolume && - streamType == AudioManager.STREAM_RING; - if (alsoUpdateNotificationVolume) { - mStreamStates[AudioManager.STREAM_NOTIFICATION].adjustIndex(direction); - } - // Post message to set system volume (it in turn will post a message // to persist). Do not change volume if stream is muted. if (streamState.muteCount() == 0) { - sendMsg(mAudioHandler, MSG_SET_SYSTEM_VOLUME, streamType, SENDMSG_NOOP, 0, 0, + sendMsg(mAudioHandler, MSG_SET_SYSTEM_VOLUME, STREAM_VOLUME_ALIAS[streamType], SENDMSG_NOOP, 0, 0, streamState, 0); - - if (alsoUpdateNotificationVolume) { - sendMsg(mAudioHandler, MSG_SET_SYSTEM_VOLUME, AudioManager.STREAM_NOTIFICATION, - SENDMSG_NOOP, 0, 0, mStreamStates[AudioManager.STREAM_NOTIFICATION], 0); - } } } @@ -404,9 +411,8 @@ public class AudioService extends IAudioService.Stub { /** @see AudioManager#setStreamVolume(int, int, int) */ public void setStreamVolume(int streamType, int index, int flags) { ensureValidStreamType(streamType); - syncRingerAndNotificationStreamVolume(streamType, index, false); - - setStreamVolumeInt(streamType, index, false, true); + index = rescaleIndex(index * 10, streamType, STREAM_VOLUME_ALIAS[streamType]); + setStreamVolumeInt(STREAM_VOLUME_ALIAS[streamType], index, false, true); // UI, etc. mVolumePanel.postVolumeChanged(streamType, flags); @@ -420,37 +426,12 @@ public class AudioService extends IAudioService.Stub { intent.putExtra(AudioManager.EXTRA_VOLUME_STREAM_VALUE, getStreamVolume(streamType)); // Currently, sending the intent only when the stream is BLUETOOTH_SCO - if (streamType == AudioManager.STREAM_BLUETOOTH_SCO) { + if (streamType == AudioSystem.STREAM_BLUETOOTH_SCO) { mContext.sendBroadcast(intent); } } /** - * Sync the STREAM_RING and STREAM_NOTIFICATION volumes if mandated by the - * value in Settings. - * - * @param streamType Type of the stream - * @param index Volume index for the stream - * @param force If true, set the volume even if the current and desired - * volume as same - */ - private void syncRingerAndNotificationStreamVolume(int streamType, int index, boolean force) { - boolean notificationsUseRingVolume = Settings.System.getInt(mContentResolver, - Settings.System.NOTIFICATIONS_USE_RING_VOLUME, 1) == 1; - if (notificationsUseRingVolume) { - if (streamType == AudioManager.STREAM_NOTIFICATION) { - // Redirect the volume change to the ring stream - streamType = AudioManager.STREAM_RING; - } - if (streamType == AudioManager.STREAM_RING) { - // One-off to sync notification volume to ringer volume - setStreamVolumeInt(AudioManager.STREAM_NOTIFICATION, index, force, true); - } - } - } - - - /** * Sets the stream state's index, and posts a message to set system volume. * This will not call out to the UI. Assumes a valid stream type. * @@ -491,13 +472,13 @@ public class AudioService extends IAudioService.Stub { /** @see AudioManager#getStreamVolume(int) */ public int getStreamVolume(int streamType) { ensureValidStreamType(streamType); - return mStreamStates[streamType].mIndex; + return (mStreamStates[streamType].mIndex + 5) / 10; } /** @see AudioManager#getStreamMaxVolume(int) */ public int getStreamMaxVolume(int streamType) { ensureValidStreamType(streamType); - return mStreamStates[streamType].getMaxIndex(); + return (mStreamStates[streamType].getMaxIndex() + 5) / 10; } /** @see AudioManager#getRingerMode() */ @@ -507,11 +488,12 @@ public class AudioService extends IAudioService.Stub { /** @see AudioManager#setRingerMode(int) */ public void setRingerMode(int ringerMode) { - if (ringerMode != mRingerMode) { - setRingerModeInt(ringerMode, true); - - // Send sticky broadcast - broadcastRingerMode(); + synchronized (mSettingsLock) { + if (ringerMode != mRingerMode) { + setRingerModeInt(ringerMode, true); + // Send sticky broadcast + broadcastRingerMode(); + } } } @@ -541,7 +523,7 @@ public class AudioService extends IAudioService.Stub { } } } - + // Post a persist ringer mode msg if (persist) { sendMsg(mAudioHandler, MSG_PERSIST_RINGER_MODE, SHARED_MSG, @@ -606,39 +588,28 @@ public class AudioService extends IAudioService.Stub { return existingValue; } - /** @see AudioManager#setMicrophoneMute(boolean) */ - public void setMicrophoneMute(boolean on) { - if (!checkAudioSettingsPermission("setMicrophoneMute()")) { - return; - } - synchronized (mSettingsLock) { - if (on != mMicMute) { - AudioSystem.muteMicrophone(on); - mMicMute = on; - } - } - } - - /** @see AudioManager#isMicrophoneMute() */ - public boolean isMicrophoneMute() { - return mMicMute; - } - /** @see AudioManager#setMode(int) */ public void setMode(int mode) { if (!checkAudioSettingsPermission("setMode()")) { return; } + + if (mode < AudioSystem.MODE_CURRENT || mode > AudioSystem.MODE_IN_CALL) { + return; + } + synchronized (mSettingsLock) { + if (mode == AudioSystem.MODE_CURRENT) { + mode = mMode; + } if (mode != mMode) { - if (AudioSystem.setMode(mode) == AudioSystem.AUDIO_STATUS_OK) { + if (AudioSystem.setPhoneState(mode) == AudioSystem.AUDIO_STATUS_OK) { mMode = mode; } } int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE); - int index = mStreamStates[streamType].mIndex; - syncRingerAndNotificationStreamVolume(streamType, index, true); - setStreamVolumeInt(streamType, index, true, true); + int index = mStreamStates[STREAM_VOLUME_ALIAS[streamType]].mIndex; + setStreamVolumeInt(STREAM_VOLUME_ALIAS[streamType], index, true, true); } } @@ -647,195 +618,15 @@ public class AudioService extends IAudioService.Stub { return mMode; } - /** @see AudioManager#setRouting(int, int, int) */ - public void setRouting(int mode, int routes, int mask) { - int incallMask = 0; - int ringtoneMask = 0; - int normalMask = 0; - - if (!checkAudioSettingsPermission("setRouting()")) { - return; - } - synchronized (mSettingsLock) { - // Temporary fix for issue #1713090 until audio routing is refactored in eclair release. - // mode AudioSystem.MODE_INVALID is used only by the following AudioManager methods: - // setWiredHeadsetOn(), setBluetoothA2dpOn(), setBluetoothScoOn() and setSpeakerphoneOn(). - // If applications are using AudioManager.setRouting() that is now deprecated, the routing - // command will be ignored. - if (mode == AudioSystem.MODE_INVALID) { - switch (mask) { - case AudioSystem.ROUTE_SPEAKER: - // handle setSpeakerphoneOn() - if (routes != 0 && !mSpeakerIsOn) { - mSpeakerIsOn = true; - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER; - incallMask = AudioSystem.ROUTE_ALL; - } else if (routes == 0 && mSpeakerIsOn) { - mSpeakerIsOn = false; - if (mBluetoothScoIsConnected) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_BLUETOOTH_SCO; - } else if (mHeadsetIsConnected) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET; - } else { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE; - } - incallMask = AudioSystem.ROUTE_ALL; - } - break; - - case AudioSystem.ROUTE_BLUETOOTH_SCO: - // handle setBluetoothScoOn() - if (routes != 0 && !mBluetoothScoIsConnected) { - mBluetoothScoIsConnected = true; - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_BLUETOOTH_SCO; - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_BLUETOOTH_SCO; - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_BLUETOOTH_SCO; - incallMask = AudioSystem.ROUTE_ALL; - // A2DP has higher priority than SCO headset, so headset connect/disconnect events - // should not affect A2DP routing - ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - } else if (routes == 0 && mBluetoothScoIsConnected) { - mBluetoothScoIsConnected = false; - if (mHeadsetIsConnected) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET; - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - (AudioSystem.ROUTE_HEADSET|AudioSystem.ROUTE_SPEAKER); - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_HEADSET; - } else { - if (mSpeakerIsOn) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER; - } else { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE; - } - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_SPEAKER; - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_SPEAKER; - } - incallMask = AudioSystem.ROUTE_ALL; - // A2DP has higher priority than SCO headset, so headset connect/disconnect events - // should not affect A2DP routing - ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - } - break; - - case AudioSystem.ROUTE_HEADSET: - // handle setWiredHeadsetOn() - if (routes != 0 && !mHeadsetIsConnected) { - mHeadsetIsConnected = true; - // do not act upon headset connection if bluetooth SCO is connected to match phone app behavior - if (!mBluetoothScoIsConnected) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_HEADSET; - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - (AudioSystem.ROUTE_HEADSET|AudioSystem.ROUTE_SPEAKER); - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_HEADSET; - incallMask = AudioSystem.ROUTE_ALL; - // A2DP has higher priority than wired headset, so headset connect/disconnect events - // should not affect A2DP routing - ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - } - } else if (routes == 0 && mHeadsetIsConnected) { - mHeadsetIsConnected = false; - // do not act upon headset disconnection if bluetooth SCO is connected to match phone app behavior - if (!mBluetoothScoIsConnected) { - if (mSpeakerIsOn) { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_SPEAKER; - } else { - mRoutes[AudioSystem.MODE_IN_CALL] = AudioSystem.ROUTE_EARPIECE; - } - mRoutes[AudioSystem.MODE_RINGTONE] = (mRoutes[AudioSystem.MODE_RINGTONE] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_SPEAKER; - mRoutes[AudioSystem.MODE_NORMAL] = (mRoutes[AudioSystem.MODE_NORMAL] & AudioSystem.ROUTE_BLUETOOTH_A2DP) | - AudioSystem.ROUTE_SPEAKER; - - incallMask = AudioSystem.ROUTE_ALL; - // A2DP has higher priority than wired headset, so headset connect/disconnect events - // should not affect A2DP routing - ringtoneMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_ALL & ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - } - } - break; - - case AudioSystem.ROUTE_BLUETOOTH_A2DP: - // handle setBluetoothA2dpOn() - if (routes != 0 && !mBluetoothA2dpIsConnected) { - mBluetoothA2dpIsConnected = true; - mRoutes[AudioSystem.MODE_RINGTONE] |= AudioSystem.ROUTE_BLUETOOTH_A2DP; - mRoutes[AudioSystem.MODE_NORMAL] |= AudioSystem.ROUTE_BLUETOOTH_A2DP; - // the audio flinger chooses A2DP as a higher priority, - // so there is no need to disable other routes. - ringtoneMask = AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_BLUETOOTH_A2DP; - } else if (routes == 0 && mBluetoothA2dpIsConnected) { - mBluetoothA2dpIsConnected = false; - mRoutes[AudioSystem.MODE_RINGTONE] &= ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - mRoutes[AudioSystem.MODE_NORMAL] &= ~AudioSystem.ROUTE_BLUETOOTH_A2DP; - // the audio flinger chooses A2DP as a higher priority, - // so there is no need to disable other routes. - ringtoneMask = AudioSystem.ROUTE_BLUETOOTH_A2DP; - normalMask = AudioSystem.ROUTE_BLUETOOTH_A2DP; - } - break; - } - - // incallMask is != 0 means we must apply ne routing to MODE_IN_CALL mode - if (incallMask != 0) { - AudioSystem.setRouting(AudioSystem.MODE_IN_CALL, - mRoutes[AudioSystem.MODE_IN_CALL], - incallMask); - } - // ringtoneMask is != 0 means we must apply ne routing to MODE_RINGTONE mode - if (ringtoneMask != 0) { - AudioSystem.setRouting(AudioSystem.MODE_RINGTONE, - mRoutes[AudioSystem.MODE_RINGTONE], - ringtoneMask); - } - // normalMask is != 0 means we must apply ne routing to MODE_NORMAL mode - if (normalMask != 0) { - AudioSystem.setRouting(AudioSystem.MODE_NORMAL, - mRoutes[AudioSystem.MODE_NORMAL], - normalMask); - } - - int streamType = getActiveStreamType(AudioManager.USE_DEFAULT_STREAM_TYPE); - int index = mStreamStates[streamType].mIndex; - syncRingerAndNotificationStreamVolume(streamType, index, true); - setStreamVolumeInt(streamType, index, true, true); - } - } - } - - /** @see AudioManager#getRouting(int) */ - public int getRouting(int mode) { - return mRoutes[mode]; - } - - /** @see AudioManager#isMusicActive() */ - public boolean isMusicActive() { - return AudioSystem.isMusicActive(); - } - - /** @see AudioManager#setParameter(String, String) */ - public void setParameter(String key, String value) { - AudioSystem.setParameter(key, value); - } - /** @see AudioManager#playSoundEffect(int) */ public void playSoundEffect(int effectType) { sendMsg(mAudioHandler, MSG_PLAY_SOUND_EFFECT, SHARED_MSG, SENDMSG_NOOP, - effectType, SOUND_EFFECT_VOLUME, null, 0); + effectType, -1, null, 0); } /** @see AudioManager#playSoundEffect(int, float) */ public void playSoundEffectVolume(int effectType, float volume) { + loadSoundEffects(); sendMsg(mAudioHandler, MSG_PLAY_SOUND_EFFECT, SHARED_MSG, SENDMSG_NOOP, effectType, (int) (volume * 1000), null, 0); } @@ -846,6 +637,9 @@ public class AudioService extends IAudioService.Stub { */ public boolean loadSoundEffects() { synchronized (mSoundEffectsLock) { + if (mSoundPool != null) { + return true; + } mSoundPool = new SoundPool(NUM_SOUNDPOOL_CHANNELS, AudioSystem.STREAM_SYSTEM, 0); if (mSoundPool == null) { return false; @@ -926,18 +720,29 @@ public class AudioService extends IAudioService.Stub { for (int streamType = 0; streamType < numStreamTypes; streamType++) { VolumeStreamState streamState = mStreamStates[streamType]; - // there is no volume setting for STREAM_BLUETOOTH_SCO - if (streamType != AudioSystem.STREAM_BLUETOOTH_SCO) { - String settingName = System.VOLUME_SETTINGS[streamType]; - String lastAudibleSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE; - - streamState.mIndex = streamState.getValidIndex(Settings.System.getInt(mContentResolver, - settingName, - AudioManager.DEFAULT_STREAM_VOLUME[streamType])); - streamState.mLastAudibleIndex = streamState.getValidIndex(Settings.System.getInt(mContentResolver, - lastAudibleSettingName, - streamState.mIndex > 0 ? streamState.mIndex : AudioManager.DEFAULT_STREAM_VOLUME[streamType])); + String settingName = System.VOLUME_SETTINGS[STREAM_VOLUME_ALIAS[streamType]]; + String lastAudibleSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE; + int index = Settings.System.getInt(mContentResolver, + settingName, + AudioManager.DEFAULT_STREAM_VOLUME[streamType]); + if (STREAM_VOLUME_ALIAS[streamType] != streamType) { + index = rescaleIndex(index * 10, STREAM_VOLUME_ALIAS[streamType], streamType); + } else { + index *= 10; } + streamState.mIndex = streamState.getValidIndex(index); + + index = (index + 5) / 10; + index = Settings.System.getInt(mContentResolver, + lastAudibleSettingName, + (index > 0) ? index : AudioManager.DEFAULT_STREAM_VOLUME[streamType]); + if (STREAM_VOLUME_ALIAS[streamType] != streamType) { + index = rescaleIndex(index * 10, STREAM_VOLUME_ALIAS[streamType], streamType); + } else { + index *= 10; + } + streamState.mLastAudibleIndex = streamState.getValidIndex(index); + // unmute stream that whas muted but is not affect by mute anymore if (streamState.muteCount() != 0 && !isStreamAffectedByMute(streamType)) { int size = streamState.mDeathHandlers.size(); @@ -948,7 +753,7 @@ public class AudioService extends IAudioService.Stub { } // apply stream volume if (streamState.muteCount() == 0) { - AudioSystem.setVolume(streamType, streamState.mVolumes[streamState.mIndex]); + setStreamVolumeIndex(streamType, streamState.mIndex); } } @@ -956,6 +761,54 @@ public class AudioService extends IAudioService.Stub { setRingerModeInt(getRingerMode(), false); } + /** @see AudioManager#setSpeakerphoneOn() */ + public void setSpeakerphoneOn(boolean on){ + if (!checkAudioSettingsPermission("setSpeakerphoneOn()")) { + return; + } + if (on) { + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_SPEAKER); + mForcedUseForComm = AudioSystem.FORCE_SPEAKER; + } else { + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_NONE); + mForcedUseForComm = AudioSystem.FORCE_NONE; + } + } + + /** @see AudioManager#isSpeakerphoneOn() */ + public boolean isSpeakerphoneOn() { + if (mForcedUseForComm == AudioSystem.FORCE_SPEAKER) { + return true; + } else { + return false; + } + } + + /** @see AudioManager#setBluetoothScoOn() */ + public void setBluetoothScoOn(boolean on){ + if (!checkAudioSettingsPermission("setBluetoothScoOn()")) { + return; + } + if (on) { + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_BT_SCO); + AudioSystem.setForceUse(AudioSystem.FOR_RECORD, AudioSystem.FORCE_BT_SCO); + mForcedUseForComm = AudioSystem.FORCE_BT_SCO; + } else { + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, AudioSystem.FORCE_NONE); + AudioSystem.setForceUse(AudioSystem.FOR_RECORD, AudioSystem.FORCE_NONE); + mForcedUseForComm = AudioSystem.FORCE_NONE; + } + } + + /** @see AudioManager#isBluetoothScoOn() */ + public boolean isBluetoothScoOn() { + if (mForcedUseForComm == AudioSystem.FORCE_BT_SCO) { + return true; + } else { + return false; + } + } + /////////////////////////////////////////////////////////////////////////// // Internal methods /////////////////////////////////////////////////////////////////////////// @@ -969,7 +822,7 @@ public class AudioService extends IAudioService.Stub { boolean adjustVolumeIndex = true; int newRingerMode = mRingerMode; - if (mRingerMode == AudioManager.RINGER_MODE_NORMAL && oldIndex == 1 + if (mRingerMode == AudioManager.RINGER_MODE_NORMAL && (oldIndex + 5) / 10 == 1 && direction == AudioManager.ADJUST_LOWER) { newRingerMode = AudioManager.RINGER_MODE_VIBRATE; } else if (mRingerMode == AudioManager.RINGER_MODE_VIBRATE) { @@ -1026,7 +879,7 @@ public class AudioService extends IAudioService.Stub { Log.w(TAG, "Couldn't connect to phone service", e); } - if ((getRouting(AudioSystem.MODE_IN_CALL) & AudioSystem.ROUTE_BLUETOOTH_SCO) != 0) { + if (AudioSystem.getForceUse(AudioSystem.FOR_COMMUNICATION) == AudioSystem.FORCE_BT_SCO) { // Log.v(TAG, "getActiveStreamType: Forcing STREAM_BLUETOOTH_SCO..."); return AudioSystem.STREAM_BLUETOOTH_SCO; } else if (isOffhook) { @@ -1106,51 +959,44 @@ public class AudioService extends IAudioService.Stub { /////////////////////////////////////////////////////////////////////////// public class VolumeStreamState { - private final String mVolumeIndexSettingName; - private final String mLastAudibleVolumeIndexSettingName; private final int mStreamType; - private final int[] mVolumes; + private String mVolumeIndexSettingName; + private String mLastAudibleVolumeIndexSettingName; + private int mIndexMax; private int mIndex; private int mLastAudibleIndex; private ArrayList<VolumeDeathHandler> mDeathHandlers; //handles mute/solo requests client death - private VolumeStreamState(String settingName, int streamType, int[] volumes) { + private VolumeStreamState(String settingName, int streamType) { - mVolumeIndexSettingName = settingName; - mLastAudibleVolumeIndexSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE; + setVolumeIndexSettingName(settingName); mStreamType = streamType; - mVolumes = volumes; final ContentResolver cr = mContentResolver; - mIndex = getValidIndex(Settings.System.getInt(cr, mVolumeIndexSettingName, AudioManager.DEFAULT_STREAM_VOLUME[streamType])); - mLastAudibleIndex = getValidIndex(Settings.System.getInt(cr, - mLastAudibleVolumeIndexSettingName, mIndex > 0 ? mIndex : AudioManager.DEFAULT_STREAM_VOLUME[streamType])); - - AudioSystem.setVolume(streamType, volumes[mIndex]); + mIndexMax = MAX_STREAM_VOLUME[streamType]; + mIndex = Settings.System.getInt(cr, + mVolumeIndexSettingName, + AudioManager.DEFAULT_STREAM_VOLUME[streamType]); + mLastAudibleIndex = Settings.System.getInt(cr, + mLastAudibleVolumeIndexSettingName, + (mIndex > 0) ? mIndex : AudioManager.DEFAULT_STREAM_VOLUME[streamType]); + AudioSystem.initStreamVolume(streamType, 0, mIndexMax); + mIndexMax *= 10; + mIndex = getValidIndex(10 * mIndex); + mLastAudibleIndex = getValidIndex(10 * mLastAudibleIndex); + setStreamVolumeIndex(streamType, mIndex); mDeathHandlers = new ArrayList<VolumeDeathHandler>(); } - /** - * Constructor to be used when there is no setting associated with the VolumeStreamState. - * - * @param defaultVolume Default volume of the stream to use. - * @param streamType Type of the stream. - * @param volumes Volumes levels associated with this stream. - */ - private VolumeStreamState(int defaultVolume, int streamType, int[] volumes) { - mVolumeIndexSettingName = null; - mLastAudibleVolumeIndexSettingName = null; - mIndex = mLastAudibleIndex = defaultVolume; - mStreamType = streamType; - mVolumes = volumes; - AudioSystem.setVolume(mStreamType, defaultVolume); - mDeathHandlers = new ArrayList<VolumeDeathHandler>(); + public void setVolumeIndexSettingName(String settingName) { + mVolumeIndexSettingName = settingName; + mLastAudibleVolumeIndexSettingName = settingName + System.APPEND_FOR_LAST_AUDIBLE; } public boolean adjustIndex(int deltaIndex) { - return setIndex(mIndex + deltaIndex, true); + return setIndex(mIndex + deltaIndex * 10, true); } public boolean setIndex(int index, boolean lastAudible) { @@ -1161,6 +1007,13 @@ public class AudioService extends IAudioService.Stub { if (lastAudible) { mLastAudibleIndex = mIndex; } + // Apply change to all streams using this one as alias + int numStreamTypes = AudioSystem.getNumStreamTypes(); + for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) { + if (streamType != mStreamType && STREAM_VOLUME_ALIAS[streamType] == mStreamType) { + mStreamStates[streamType].setIndex(rescaleIndex(mIndex, mStreamType, streamType), lastAudible); + } + } return true; } else { return false; @@ -1168,7 +1021,7 @@ public class AudioService extends IAudioService.Stub { } public int getMaxIndex() { - return mVolumes.length - 1; + return mIndexMax; } public void mute(IBinder cb, boolean state) { @@ -1183,8 +1036,8 @@ public class AudioService extends IAudioService.Stub { private int getValidIndex(int index) { if (index < 0) { return 0; - } else if (index >= mVolumes.length) { - return mVolumes.length - 1; + } else if (index > mIndexMax) { + return mIndexMax; } return index; @@ -1318,8 +1171,16 @@ public class AudioService extends IAudioService.Stub { private void setSystemVolume(VolumeStreamState streamState) { // Adjust volume - AudioSystem - .setVolume(streamState.mStreamType, streamState.mVolumes[streamState.mIndex]); + setStreamVolumeIndex(streamState.mStreamType, streamState.mIndex); + + // Apply change to all streams using this one as alias + int numStreamTypes = AudioSystem.getNumStreamTypes(); + for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) { + if (streamType != streamState.mStreamType && + STREAM_VOLUME_ALIAS[streamType] == streamState.mStreamType) { + setStreamVolumeIndex(streamType, mStreamStates[streamType].mIndex); + } + } // Post a persist volume msg sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, streamState.mStreamType, @@ -1327,12 +1188,10 @@ public class AudioService extends IAudioService.Stub { } private void persistVolume(VolumeStreamState streamState) { - if (streamState.mStreamType != AudioManager.STREAM_BLUETOOTH_SCO) { - System.putInt(mContentResolver, streamState.mVolumeIndexSettingName, - streamState.mIndex); - System.putInt(mContentResolver, streamState.mLastAudibleVolumeIndexSettingName, - streamState.mLastAudibleIndex); - } + System.putInt(mContentResolver, streamState.mVolumeIndexSettingName, + (streamState.mIndex + 5)/ 10); + System.putInt(mContentResolver, streamState.mLastAudibleVolumeIndexSettingName, + (streamState.mLastAudibleIndex + 5) / 10); } private void persistRingerMode() { @@ -1348,10 +1207,20 @@ public class AudioService extends IAudioService.Stub { if (mSoundPool == null) { return; } + float volFloat; + // use STREAM_MUSIC volume attenuated by 3 dB if volume is not specified by caller + if (volume < 0) { + // Same linear to log conversion as in native AudioSystem::linearToLog() (AudioSystem.cpp) + float dBPerStep = (float)((0.5 * 100) / MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]); + int musicVolIndex = (mStreamStates[AudioSystem.STREAM_MUSIC].mIndex + 5) / 10; + float musicVoldB = dBPerStep * (musicVolIndex - MAX_STREAM_VOLUME[AudioSystem.STREAM_MUSIC]); + volFloat = (float)Math.pow(10, (musicVoldB - 3)/20); + } else { + volFloat = (float) volume / 1000.0f; + } if (SOUND_EFFECT_FILES_MAP[effectType][1] > 0) { - float v = (float) volume / 1000.0f; - mSoundPool.play(SOUND_EFFECT_FILES_MAP[effectType][1], v, v, 0, 0, 1.0f); + mSoundPool.play(SOUND_EFFECT_FILES_MAP[effectType][1], volFloat, volFloat, 0, 0, 1.0f); } else { MediaPlayer mediaPlayer = new MediaPlayer(); if (mediaPlayer != null) { @@ -1360,6 +1229,7 @@ public class AudioService extends IAudioService.Stub { mediaPlayer.setDataSource(filePath); mediaPlayer.setAudioStreamType(AudioSystem.STREAM_SYSTEM); mediaPlayer.prepare(); + mediaPlayer.setVolume(volFloat, volFloat); mediaPlayer.setOnCompletionListener(new OnCompletionListener() { public void onCompletion(MediaPlayer mp) { cleanupPlayer(mp); @@ -1418,29 +1288,50 @@ public class AudioService extends IAudioService.Stub { break; case MSG_MEDIA_SERVER_DIED: - Log.e(TAG, "Media server died."); // Force creation of new IAudioflinger interface - mMediaServerOk = false; - AudioSystem.getMode(); + if (!mMediaServerOk) { + Log.e(TAG, "Media server died."); + AudioSystem.isMusicActive(); + sendMsg(mAudioHandler, MSG_MEDIA_SERVER_DIED, SHARED_MSG, SENDMSG_NOOP, 0, 0, + null, 500); + } break; case MSG_MEDIA_SERVER_STARTED: Log.e(TAG, "Media server started."); - // Restore audio routing and stream volumes - applyAudioSettings(); + // Restore device connection states + Set set = mConnectedDevices.entrySet(); + Iterator i = set.iterator(); + while(i.hasNext()){ + Map.Entry device = (Map.Entry)i.next(); + AudioSystem.setDeviceConnectionState(((Integer)device.getKey()).intValue(), + AudioSystem.DEVICE_STATE_AVAILABLE, + (String)device.getValue()); + } + + // Restore call state + AudioSystem.setPhoneState(mMode); + + // Restore forced usage for communcations and record + AudioSystem.setForceUse(AudioSystem.FOR_COMMUNICATION, mForcedUseForComm); + AudioSystem.setForceUse(AudioSystem.FOR_RECORD, mForcedUseForComm); + + // Restore stream volumes int numStreamTypes = AudioSystem.getNumStreamTypes(); for (int streamType = numStreamTypes - 1; streamType >= 0; streamType--) { - int volume; + int index; VolumeStreamState streamState = mStreamStates[streamType]; + AudioSystem.initStreamVolume(streamType, 0, (streamState.mIndexMax + 5) / 10); if (streamState.muteCount() == 0) { - volume = streamState.mVolumes[streamState.mIndex]; + index = streamState.mIndex; } else { - volume = streamState.mVolumes[0]; + index = 0; } - AudioSystem.setVolume(streamType, volume); + setStreamVolumeIndex(streamType, index); } - setRingerMode(mRingerMode); - mMediaServerOk = true; + + // Restore ringer mode + setRingerModeInt(getRingerMode(), false); break; case MSG_PLAY_SOUND_EFFECT: @@ -1451,28 +1342,191 @@ public class AudioService extends IAudioService.Stub { } private class SettingsObserver extends ContentObserver { - + SettingsObserver() { super(new Handler()); mContentResolver.registerContentObserver(Settings.System.getUriFor( Settings.System.MODE_RINGER_STREAMS_AFFECTED), false, this); + mContentResolver.registerContentObserver(Settings.System.getUriFor( + Settings.System.NOTIFICATIONS_USE_RING_VOLUME), false, this); } @Override public void onChange(boolean selfChange) { super.onChange(selfChange); - - mRingerModeAffectedStreams = Settings.System.getInt(mContentResolver, - Settings.System.MODE_RINGER_STREAMS_AFFECTED, - 0); + synchronized (mSettingsLock) { + int ringerModeAffectedStreams = Settings.System.getInt(mContentResolver, + Settings.System.MODE_RINGER_STREAMS_AFFECTED, + 0); + if (ringerModeAffectedStreams != mRingerModeAffectedStreams) { + /* + * Ensure all stream types that should be affected by ringer mode + * are in the proper state. + */ + mRingerModeAffectedStreams = ringerModeAffectedStreams; + setRingerModeInt(getRingerMode(), false); + } - /* - * Ensure all stream types that should be affected by ringer mode - * are in the proper state. - */ - setRingerModeInt(getRingerMode(), false); + int notificationsUseRingVolume = Settings.System.getInt(mContentResolver, + Settings.System.NOTIFICATIONS_USE_RING_VOLUME, + 1); + if (notificationsUseRingVolume != mNotificationsUseRingVolume) { + mNotificationsUseRingVolume = notificationsUseRingVolume; + if (mNotificationsUseRingVolume == 1) { + STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_RING; + mStreamStates[AudioSystem.STREAM_NOTIFICATION].setVolumeIndexSettingName( + System.VOLUME_SETTINGS[AudioSystem.STREAM_RING]); + } else { + STREAM_VOLUME_ALIAS[AudioSystem.STREAM_NOTIFICATION] = AudioSystem.STREAM_NOTIFICATION; + mStreamStates[AudioSystem.STREAM_NOTIFICATION].setVolumeIndexSettingName( + System.VOLUME_SETTINGS[AudioSystem.STREAM_NOTIFICATION]); + // Persist notification volume volume as it was not persisted while aliased to ring volume + // and persist with no delay as there might be registered observers of the persisted + // notification volume. + sendMsg(mAudioHandler, MSG_PERSIST_VOLUME, AudioSystem.STREAM_NOTIFICATION, + SENDMSG_REPLACE, 0, 0, mStreamStates[AudioSystem.STREAM_NOTIFICATION], 0); + } + } + } + } + } + + /** + * Receiver for misc intent broadcasts the Phone app cares about. + */ + private class AudioServiceBroadcastReceiver extends BroadcastReceiver { + @Override + public void onReceive(Context context, Intent intent) { + String action = intent.getAction(); + + if (action.equals(BluetoothA2dp.ACTION_SINK_STATE_CHANGED)) { + int state = intent.getIntExtra(BluetoothA2dp.EXTRA_SINK_STATE, + BluetoothA2dp.STATE_DISCONNECTED); + BluetoothDevice btDevice = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); + String address = btDevice.getAddress(); + boolean isConnected = (mConnectedDevices.containsKey(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP) && + ((String)mConnectedDevices.get(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP)).equals(address)); + + if (isConnected && + state != BluetoothA2dp.STATE_CONNECTED && state != BluetoothA2dp.STATE_PLAYING) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + address); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP); + } else if (!isConnected && + (state == BluetoothA2dp.STATE_CONNECTED || + state == BluetoothA2dp.STATE_PLAYING)) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, + AudioSystem.DEVICE_STATE_AVAILABLE, + address); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP), + address); + } + } else if (action.equals(BluetoothHeadset.ACTION_STATE_CHANGED)) { + int state = intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, + BluetoothHeadset.STATE_ERROR); + int device = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO; + BluetoothDevice btDevice = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); + String address = null; + if (btDevice != null) { + address = btDevice.getAddress(); + BluetoothClass btClass = btDevice.getBluetoothClass(); + if (btClass != null) { + switch (btClass.getDeviceClass()) { + case BluetoothClass.Device.AUDIO_VIDEO_WEARABLE_HEADSET: + case BluetoothClass.Device.AUDIO_VIDEO_HANDSFREE: + device = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_HEADSET; + break; + case BluetoothClass.Device.AUDIO_VIDEO_CAR_AUDIO: + device = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_CARKIT; + break; + } + } + } + + boolean isConnected = (mConnectedDevices.containsKey(device) && + ((String)mConnectedDevices.get(device)).equals(address)); + + if (isConnected && state != BluetoothHeadset.STATE_CONNECTED) { + AudioSystem.setDeviceConnectionState(device, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + address); + mConnectedDevices.remove(device); + } else if (!isConnected && state == BluetoothHeadset.STATE_CONNECTED) { + AudioSystem.setDeviceConnectionState(device, + AudioSystem.DEVICE_STATE_AVAILABLE, + address); + mConnectedDevices.put(new Integer(device), address); + } + } else if (action.equals(Intent.ACTION_HEADSET_PLUG)) { + int state = intent.getIntExtra("state", 0); + if ((state & BIT_HEADSET) == 0 && + (mHeadsetState & BIT_HEADSET) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADSET, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_WIRED_HEADSET); + } else if ((state & BIT_HEADSET) != 0 && + (mHeadsetState & BIT_HEADSET) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADSET, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_WIRED_HEADSET), ""); + } + if ((state & BIT_HEADSET_NO_MIC) == 0 && + (mHeadsetState & BIT_HEADSET_NO_MIC) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE); + } else if ((state & BIT_HEADSET_NO_MIC) != 0 && + (mHeadsetState & BIT_HEADSET_NO_MIC) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE), ""); + } + if ((state & BIT_TTY) == 0 && + (mHeadsetState & BIT_TTY) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_TTY, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_TTY); + } else if ((state & BIT_TTY) != 0 && + (mHeadsetState & BIT_TTY) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_TTY, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_TTY), ""); + } + if ((state & BIT_FM_HEADSET) == 0 && + (mHeadsetState & BIT_FM_HEADSET) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_HEADPHONE, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_FM_HEADPHONE); + } else if ((state & BIT_FM_HEADSET) != 0 && + (mHeadsetState & BIT_FM_HEADSET) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_HEADPHONE, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_FM_HEADPHONE), ""); + } + if ((state & BIT_FM_SPEAKER) == 0 && + (mHeadsetState & BIT_FM_SPEAKER) != 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_SPEAKER, + AudioSystem.DEVICE_STATE_UNAVAILABLE, + ""); + mConnectedDevices.remove(AudioSystem.DEVICE_OUT_FM_SPEAKER); + } else if ((state & BIT_FM_SPEAKER) != 0 && + (mHeadsetState & BIT_FM_SPEAKER) == 0) { + AudioSystem.setDeviceConnectionState(AudioSystem.DEVICE_OUT_FM_SPEAKER, + AudioSystem.DEVICE_STATE_AVAILABLE, + ""); + mConnectedDevices.put( new Integer(AudioSystem.DEVICE_OUT_FM_SPEAKER), ""); + } + mHeadsetState = state; + } } - } - } diff --git a/media/java/android/media/AudioSystem.java b/media/java/android/media/AudioSystem.java index 5917ab90e6c7..dbf6d9dfe71d 100644 --- a/media/java/android/media/AudioSystem.java +++ b/media/java/android/media/AudioSystem.java @@ -45,38 +45,21 @@ public class AudioSystem public static final int STREAM_NOTIFICATION = 5; /* @hide The audio stream for phone calls when connected on bluetooth */ public static final int STREAM_BLUETOOTH_SCO = 6; + /* @hide The audio stream for enforced system sounds in certain countries (e.g camera in Japan) */ + public static final int STREAM_SYSTEM_ENFORCED = 7; + /* @hide The audio stream for DTMF tones */ + public static final int STREAM_DTMF = 8; + /* @hide The audio stream for text to speech (TTS) */ + public static final int STREAM_TTS = 9; /** * @deprecated Use {@link #numStreamTypes() instead} */ public static final int NUM_STREAMS = 5; // Expose only the getter method publicly so we can change it in the future - private static final int NUM_STREAM_TYPES = 7; + private static final int NUM_STREAM_TYPES = 10; public static final int getNumStreamTypes() { return NUM_STREAM_TYPES; } - /* max and min volume levels */ - /* Maximum volume setting, for use with setVolume(int,int) */ - public static final int MAX_VOLUME = 100; - /* Minimum volume setting, for use with setVolume(int,int) */ - public static final int MIN_VOLUME = 0; - - /* - * Sets the volume of a specified audio stream. - * - * param type the stream type to set the volume of (e.g. STREAM_MUSIC) - * param volume the volume level to set (0-100) - * return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR - */ - public static native int setVolume(int type, int volume); - - /* - * Returns the volume of a specified audio stream. - * - * param type the stream type to get the volume of (e.g. STREAM_MUSIC) - * return the current volume (0-100) - */ - public static native int getVolume(int type); - /* * Sets the microphone mute on or off. * @@ -101,17 +84,23 @@ public class AudioSystem * it can route the audio appropriately. * return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR */ - public static native int setMode(int mode); - + /** @deprecated use {@link #setPhoneState(int)} */ + public static int setMode(int mode) { + return AUDIO_STATUS_ERROR; + } /* * Returns the current audio mode. * * return the current audio mode (NORMAL, RINGTONE, or IN_CALL). * Returns the current current audio state from the HAL. + * */ - public static native int getMode(); + /** @deprecated Do not use. */ + public static int getMode() { + return MODE_INVALID; + } - /* modes for setMode/getMode/setRoute/getRoute */ + /* modes for setPhoneState */ public static final int MODE_INVALID = -2; public static final int MODE_CURRENT = -1; public static final int MODE_NORMAL = 0; @@ -121,15 +110,20 @@ public class AudioSystem /* Routing bits for setRouting/getRouting API */ - public static final int ROUTE_EARPIECE = (1 << 0); - public static final int ROUTE_SPEAKER = (1 << 1); - + /** @deprecated */ + @Deprecated public static final int ROUTE_EARPIECE = (1 << 0); + /** @deprecated */ + @Deprecated public static final int ROUTE_SPEAKER = (1 << 1); /** @deprecated use {@link #ROUTE_BLUETOOTH_SCO} */ @Deprecated public static final int ROUTE_BLUETOOTH = (1 << 2); - public static final int ROUTE_BLUETOOTH_SCO = (1 << 2); - public static final int ROUTE_HEADSET = (1 << 3); - public static final int ROUTE_BLUETOOTH_A2DP = (1 << 4); - public static final int ROUTE_ALL = 0xFFFFFFFF; + /** @deprecated */ + @Deprecated public static final int ROUTE_BLUETOOTH_SCO = (1 << 2); + /** @deprecated */ + @Deprecated public static final int ROUTE_HEADSET = (1 << 3); + /** @deprecated */ + @Deprecated public static final int ROUTE_BLUETOOTH_A2DP = (1 << 4); + /** @deprecated */ + @Deprecated public static final int ROUTE_ALL = 0xFFFFFFFF; /* * Sets the audio routing for a specified mode @@ -141,7 +135,10 @@ public class AudioSystem * ROUTE_xxx types. Unset bits indicate the route should be left unchanged * return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR */ - public static native int setRouting(int mode, int routes, int mask); + /** @deprecated use {@link #setDeviceConnectionState(int,int,String)} */ + public static int setRouting(int mode, int routes, int mask) { + return AUDIO_STATUS_ERROR; + } /* * Returns the current audio routing bit vector for a specified mode. @@ -150,7 +147,10 @@ public class AudioSystem * return an audio route bit vector that can be compared with ROUTE_xxx * bits */ - public static native int getRouting(int mode); + /** @deprecated use {@link #getDeviceConnectionState(int,String)} */ + public static int getRouting(int mode) { + return 0; + } /* * Checks whether any music is active. @@ -160,17 +160,23 @@ public class AudioSystem public static native boolean isMusicActive(); /* - * Sets a generic audio configuration parameter. The use of these parameters + * Sets a group generic audio configuration parameters. The use of these parameters * are platform dependant, see libaudio * - * ** Temporary interface - DO NOT USE - * - * TODO: Replace with a more generic key:value get/set mechanism + * param keyValuePairs list of parameters key value pairs in the form: + * key1=value1;key2=value2;... + */ + public static native int setParameters(String keyValuePairs); + + /* + * Gets a group generic audio configuration parameters. The use of these parameters + * are platform dependant, see libaudio * - * param key name of parameter to set. Must not be null. - * param value value of parameter. Must not be null. + * param keys list of parameters + * return value: list of parameters key value pairs in the form: + * key1=value1;key2=value2;... */ - public static native void setParameter(String key, String value); + public static native String getParameters(String keys); /* private final static String TAG = "audio"; @@ -220,4 +226,68 @@ public class AudioSystem mErrorCallback.onError(error); } } + + /* + * AudioPolicyService methods + */ + + // output devices + public static final int DEVICE_OUT_EARPIECE = 0x1; + public static final int DEVICE_OUT_SPEAKER = 0x2; + public static final int DEVICE_OUT_WIRED_HEADSET = 0x4; + public static final int DEVICE_OUT_WIRED_HEADPHONE = 0x8; + public static final int DEVICE_OUT_BLUETOOTH_SCO = 0x10; + public static final int DEVICE_OUT_BLUETOOTH_SCO_HEADSET = 0x20; + public static final int DEVICE_OUT_BLUETOOTH_SCO_CARKIT = 0x40; + public static final int DEVICE_OUT_BLUETOOTH_A2DP = 0x80; + public static final int DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100; + public static final int DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 0x200; + public static final int DEVICE_OUT_AUX_DIGITAL = 0x400; + public static final int DEVICE_OUT_FM_HEADPHONE = 0x800; + public static final int DEVICE_OUT_FM_SPEAKER = 0x1000; + public static final int DEVICE_OUT_TTY = 0x2000; + public static final int DEVICE_OUT_DEFAULT = 0x8000; + // input devices + public static final int DEVICE_IN_COMMUNICATION = 0x10000; + public static final int DEVICE_IN_AMBIENT = 0x20000; + public static final int DEVICE_IN_BUILTIN_MIC1 = 0x40000; + public static final int DEVICE_IN_BUILTIN_MIC2 = 0x80000; + public static final int DEVICE_IN_MIC_ARRAY = 0x100000; + public static final int DEVICE_IN_BLUETOOTH_SCO_HEADSET = 0x200000; + public static final int DEVICE_IN_WIRED_HEADSET = 0x400000; + public static final int DEVICE_IN_AUX_DIGITAL = 0x800000; + public static final int DEVICE_IN_DEFAULT = 0x80000000; + + // device states + public static final int DEVICE_STATE_UNAVAILABLE = 0; + public static final int DEVICE_STATE_AVAILABLE = 1; + + // phone state + public static final int PHONE_STATE_OFFCALL = 0; + public static final int PHONE_STATE_RINGING = 1; + public static final int PHONE_STATE_INCALL = 2; + + // config for setForceUse + public static final int FORCE_NONE = 0; + public static final int FORCE_SPEAKER = 1; + public static final int FORCE_HEADPHONES = 2; + public static final int FORCE_BT_SCO = 3; + public static final int FORCE_BT_A2DP = 4; + public static final int FORCE_WIRED_ACCESSORY = 5; + public static final int FORCE_DEFAULT = FORCE_NONE; + + // usage for serForceUse + public static final int FOR_COMMUNICATION = 0; + public static final int FOR_MEDIA = 1; + public static final int FOR_RECORD = 2; + + public static native int setDeviceConnectionState(int device, int state, String device_address); + public static native int getDeviceConnectionState(int device, String device_address); + public static native int setPhoneState(int state); + public static native int setRingerMode(int mode, int mask); + public static native int setForceUse(int usage, int config); + public static native int getForceUse(int usage); + public static native int initStreamVolume(int stream, int indexMin, int indexMax); + public static native int setStreamVolumeIndex(int stream, int index); + public static native int getStreamVolumeIndex(int stream); } diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java index 5f1be9d5e118..1e8d72f47a34 100644 --- a/media/java/android/media/AudioTrack.java +++ b/media/java/android/media/AudioTrack.java @@ -120,7 +120,7 @@ public class AudioTrack public static final int ERROR_INVALID_OPERATION = -3; private static final int ERROR_NATIVESETUP_AUDIOSYSTEM = -16; - private static final int ERROR_NATIVESETUP_INVALIDCHANNELCOUNT = -17; + private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK = -17; private static final int ERROR_NATIVESETUP_INVALIDFORMAT = -18; private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE = -19; private static final int ERROR_NATIVESETUP_NATIVEINITFAILED = -20; @@ -181,10 +181,15 @@ public class AudioTrack */ private int mSampleRate = 22050; /** - * The number of input audio channels (1 is mono, 2 is stereo). + * The number of audio output channels (1 is mono, 2 is stereo). */ private int mChannelCount = 1; /** + * The audio channel mask. + */ + private int mChannels = AudioFormat.CHANNEL_OUT_MONO; + + /** * The type of the audio stream to play. See * {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM}, * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC} and @@ -198,7 +203,7 @@ public class AudioTrack /** * The current audio channel configuration. */ - private int mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; + private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO; /** * The encoding of the audio samples. * @see AudioFormat#ENCODING_PCM_8BIT @@ -235,8 +240,8 @@ public class AudioTrack * @param sampleRateInHz the sample rate expressed in Hertz. Examples of rates are (but * not limited to) 44100, 22050 and 11025. * @param channelConfig describes the configuration of the audio channels. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and - * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO} + * See {@link AudioFormat#CHANNEL_OUT_MONO} and + * {@link AudioFormat#CHANNEL_OUT_STEREO} * @param audioFormat the format in which the audio data is represented. * See {@link AudioFormat#ENCODING_PCM_16BIT} and * {@link AudioFormat#ENCODING_PCM_8BIT} @@ -266,7 +271,7 @@ public class AudioTrack // native initialization int initResult = native_setup(new WeakReference<AudioTrack>(this), - mStreamType, mSampleRate, mChannelCount, mAudioFormat, + mStreamType, mSampleRate, mChannels, mAudioFormat, mNativeBufferSizeInBytes, mDataLoadMode); if (initResult != SUCCESS) { loge("Error code "+initResult+" when initializing AudioTrack."); @@ -286,6 +291,7 @@ public class AudioTrack // postconditions: // mStreamType is valid // mChannelCount is valid + // mChannels is valid // mAudioFormat is valid // mSampleRate is valid // mDataLoadMode is valid @@ -298,7 +304,8 @@ public class AudioTrack && (streamType != AudioManager.STREAM_RING) && (streamType != AudioManager.STREAM_SYSTEM) && (streamType != AudioManager.STREAM_VOICE_CALL) && (streamType != AudioManager.STREAM_NOTIFICATION) - && (streamType != AudioManager.STREAM_BLUETOOTH_SCO)) { + && (streamType != AudioManager.STREAM_BLUETOOTH_SCO) + && (streamType != AudioManager.STREAM_DTMF)) { throw (new IllegalArgumentException("Invalid stream type.")); } else { mStreamType = streamType; @@ -315,18 +322,23 @@ public class AudioTrack //-------------- // channel config + mChannelConfiguration = channelConfig; + switch (channelConfig) { - case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT: + case AudioFormat.CHANNEL_OUT_DEFAULT: //AudioFormat.CHANNEL_CONFIGURATION_DEFAULT + case AudioFormat.CHANNEL_OUT_MONO: case AudioFormat.CHANNEL_CONFIGURATION_MONO: mChannelCount = 1; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; + mChannels = AudioFormat.CHANNEL_OUT_MONO; break; + case AudioFormat.CHANNEL_OUT_STEREO: case AudioFormat.CHANNEL_CONFIGURATION_STEREO: mChannelCount = 2; - mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + mChannels = AudioFormat.CHANNEL_OUT_STEREO; break; default: mChannelCount = 0; + mChannels = AudioFormat.CHANNEL_INVALID; mChannelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_INVALID; throw(new IllegalArgumentException("Unsupported channel configuration.")); } @@ -452,8 +464,8 @@ public class AudioTrack /** * Returns the configured channel configuration. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} - * and {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO}. + * See {@link AudioFormat#CHANNEL_OUT_MONO} + * and {@link AudioFormat#CHANNEL_OUT_STEREO}. */ public int getChannelConfiguration() { return mChannelConfiguration; @@ -531,8 +543,8 @@ public class AudioTrack * the expected frequency at which the buffer will be refilled with additional data to play. * @param sampleRateInHz the sample rate expressed in Hertz. * @param channelConfig describes the configuration of the audio channels. - * See {@link AudioFormat#CHANNEL_CONFIGURATION_MONO} and - * {@link AudioFormat#CHANNEL_CONFIGURATION_STEREO} + * See {@link AudioFormat#CHANNEL_OUT_MONO} and + * {@link AudioFormat#CHANNEL_OUT_STEREO} * @param audioFormat the format in which the audio data is represented. * See {@link AudioFormat#ENCODING_PCM_16BIT} and * {@link AudioFormat#ENCODING_PCM_8BIT} @@ -544,9 +556,11 @@ public class AudioTrack static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { int channelCount = 0; switch(channelConfig) { + case AudioFormat.CHANNEL_OUT_MONO: case AudioFormat.CHANNEL_CONFIGURATION_MONO: channelCount = 1; break; + case AudioFormat.CHANNEL_OUT_STEREO: case AudioFormat.CHANNEL_CONFIGURATION_STEREO: channelCount = 2; break; diff --git a/media/java/android/media/ExifInterface.java b/media/java/android/media/ExifInterface.java index 6d7c0ae2facf..6cd4b9227647 100644 --- a/media/java/android/media/ExifInterface.java +++ b/media/java/android/media/ExifInterface.java @@ -16,177 +16,128 @@ package android.media; -import android.util.Log; - +import java.io.IOException; +import java.text.ParsePosition; +import java.text.SimpleDateFormat; +import java.util.Date; import java.util.HashMap; import java.util.Map; /** - * Wrapper for native Exif library - * {@hide} + * This is a class for reading and writing Exif tags in a JPEG file. */ public class ExifInterface { - private static final String TAG = "ExifInterface"; - private String mFilename; - - // Constants used for the Orientation Exif tag. - public static final int ORIENTATION_UNDEFINED = 0; - public static final int ORIENTATION_NORMAL = 1; - - // Constants used for white balance - public static final int WHITEBALANCE_AUTO = 0; - public static final int WHITEBALANCE_MANUAL = 1; - - // left right reversed mirror - public static final int ORIENTATION_FLIP_HORIZONTAL = 2; - public static final int ORIENTATION_ROTATE_180 = 3; - - // upside down mirror - public static final int ORIENTATION_FLIP_VERTICAL = 4; - - // flipped about top-left <--> bottom-right axis - public static final int ORIENTATION_TRANSPOSE = 5; - - // rotate 90 cw to right it - public static final int ORIENTATION_ROTATE_90 = 6; - - // flipped about top-right <--> bottom-left axis - public static final int ORIENTATION_TRANSVERSE = 7; - - // rotate 270 to right it - public static final int ORIENTATION_ROTATE_270 = 8; // The Exif tag names public static final String TAG_ORIENTATION = "Orientation"; - - public static final String TAG_DATE_TIME_ORIGINAL = "DateTimeOriginal"; + public static final String TAG_DATETIME = "DateTime"; public static final String TAG_MAKE = "Make"; public static final String TAG_MODEL = "Model"; public static final String TAG_FLASH = "Flash"; public static final String TAG_IMAGE_WIDTH = "ImageWidth"; public static final String TAG_IMAGE_LENGTH = "ImageLength"; - public static final String TAG_GPS_LATITUDE = "GPSLatitude"; public static final String TAG_GPS_LONGITUDE = "GPSLongitude"; - public static final String TAG_GPS_LATITUDE_REF = "GPSLatitudeRef"; public static final String TAG_GPS_LONGITUDE_REF = "GPSLongitudeRef"; public static final String TAG_WHITE_BALANCE = "WhiteBalance"; - private boolean mSavedAttributes = false; - private boolean mHasThumbnail = false; - private HashMap<String, String> mCachedAttributes = null; + // Constants used for the Orientation Exif tag. + public static final int ORIENTATION_UNDEFINED = 0; + public static final int ORIENTATION_NORMAL = 1; + public static final int ORIENTATION_FLIP_HORIZONTAL = 2; // left right reversed mirror + public static final int ORIENTATION_ROTATE_180 = 3; + public static final int ORIENTATION_FLIP_VERTICAL = 4; // upside down mirror + public static final int ORIENTATION_TRANSPOSE = 5; // flipped about top-left <--> bottom-right axis + public static final int ORIENTATION_ROTATE_90 = 6; // rotate 90 cw to right it + public static final int ORIENTATION_TRANSVERSE = 7; // flipped about top-right <--> bottom-left axis + public static final int ORIENTATION_ROTATE_270 = 8; // rotate 270 to right it + + // Constants used for white balance + public static final int WHITEBALANCE_AUTO = 0; + public static final int WHITEBALANCE_MANUAL = 1; static { System.loadLibrary("exif"); } - private static ExifInterface sExifObj = null; - /** - * Since the underlying jhead native code is not thread-safe, - * ExifInterface should use singleton interface instead of public - * constructor. - */ - private static synchronized ExifInterface instance() { - if (sExifObj == null) { - sExifObj = new ExifInterface(); - } + private String mFilename; + private HashMap<String, String> mAttributes; + private boolean mHasThumbnail; - return sExifObj; - } + // Because the underlying implementation (jhead) uses static variables, + // there can only be one user at a time for the native functions (and + // they cannot keep state in the native code across function calls). We + // use sLock to serialize the accesses. + private static Object sLock = new Object(); /** - * The following 3 static methods are handy routines for atomic operation - * of underlying jhead library. It retrieves EXIF data and then release - * ExifInterface immediately. + * Reads Exif tags from the specified JPEG file. */ - public static synchronized HashMap<String, String> loadExifData(String filename) { - ExifInterface exif = instance(); - HashMap<String, String> exifData = null; - if (exif != null) { - exif.setFilename(filename); - exifData = exif.getAttributes(); - } - return exifData; - } - - public static synchronized void saveExifData(String filename, HashMap<String, String> exifData) { - ExifInterface exif = instance(); - if (exif != null) { - exif.setFilename(filename); - exif.saveAttributes(exifData); - } + public ExifInterface(String filename) throws IOException { + mFilename = filename; + loadAttributes(); } - public static synchronized byte[] getExifThumbnail(String filename) { - ExifInterface exif = instance(); - if (exif != null) { - exif.setFilename(filename); - return exif.getThumbnail(); - } - return null; + /** + * Returns the value of the specified tag or {@code null} if there + * is no such tag in the JPEG file. + * + * @param tag the name of the tag. + */ + public String getAttribute(String tag) { + return mAttributes.get(tag); } - public void setFilename(String filename) { - if (mFilename == null || !mFilename.equals(filename)) { - mFilename = filename; - mCachedAttributes = null; + /** + * Returns the integer value of the specified tag. If there is no such tag + * in the JPEG file or the value cannot be parsed as integer, return + * <var>defaultValue</var>. + * + * @param tag the name of the tag. + * @param defaultValue the value to return if the tag is not available. + */ + public int getAttributeInt(String tag, int defaultValue) { + String value = mAttributes.get(tag); + if (value == null) return defaultValue; + try { + return Integer.valueOf(value); + } catch (NumberFormatException ex) { + return defaultValue; } } /** - * Given a HashMap of Exif tags and associated values, an Exif section in - * the JPG file is created and loaded with the tag data. saveAttributes() - * is expensive because it involves copying all the JPG data from one file - * to another and deleting the old file and renaming the other. It's best - * to collect all the attributes to write and make a single call rather - * than multiple calls for each attribute. You must call "commitChanges()" - * at some point to commit the changes. + * Set the value of the specified tag. + * + * @param tag the name of the tag. + * @param value the value of the tag. */ - public void saveAttributes(HashMap<String, String> attributes) { - // format of string passed to native C code: - // "attrCnt attr1=valueLen value1attr2=value2Len value2..." - // example: - // "4 attrPtr ImageLength=4 1024Model=6 FooImageWidth=4 1280Make=3 FOO" - StringBuilder sb = new StringBuilder(); - int size = attributes.size(); - if (attributes.containsKey("hasThumbnail")) { - --size; - } - sb.append(size + " "); - for (Map.Entry<String, String> iter : attributes.entrySet()) { - String key = iter.getKey(); - if (key.equals("hasThumbnail")) { - // this is a fake attribute not saved as an exif tag - continue; - } - String val = iter.getValue(); - sb.append(key + "="); - sb.append(val.length() + " "); - sb.append(val); - } - String s = sb.toString(); - saveAttributesNative(mFilename, s); - commitChangesNative(mFilename); - mSavedAttributes = true; + public void setAttribute(String tag, String value) { + mAttributes.put(tag, value); } /** - * Returns a HashMap loaded with the Exif attributes of the file. The key - * is the standard tag name and the value is the tag's value: e.g. - * Model -> Nikon. Numeric values are returned as strings. + * Initialize mAttributes with the attributes from the file mFilename. + * + * mAttributes is a HashMap which stores the Exif attributes of the file. + * The key is the standard tag name and the value is the tag's value: e.g. + * Model -> Nikon. Numeric values are stored as strings. + * + * This function also initialize mHasThumbnail to indicate whether the + * file has a thumbnail inside. */ - public HashMap<String, String> getAttributes() { - if (mCachedAttributes != null) { - return mCachedAttributes; - } + private void loadAttributes() throws IOException { // format of string passed from native C code: // "attrCnt attr1=valueLen value1attr2=value2Len value2..." // example: // "4 attrPtr ImageLength=4 1024Model=6 FooImageWidth=4 1280Make=3 FOO" - mCachedAttributes = new HashMap<String, String>(); + mAttributes = new HashMap<String, String>(); - String attrStr = getAttributesNative(mFilename); + String attrStr; + synchronized (sLock) { + attrStr = getAttributesNative(mFilename); + } // get count int ptr = attrStr.indexOf(' '); @@ -212,116 +163,108 @@ public class ExifInterface { if (attrName.equals("hasThumbnail")) { mHasThumbnail = attrValue.equalsIgnoreCase("true"); } else { - mCachedAttributes.put(attrName, attrValue); + mAttributes.put(attrName, attrValue); } } - return mCachedAttributes; } /** - * Given a numerical white balance value, return a - * human-readable string describing it. + * Save the tag data into the JPEG file. This is expensive because it involves + * copying all the JPG data from one file to another and deleting the old file + * and renaming the other. It's best to use {@link #setAttribute(String,String)} + * to set all attributes to write and make a single call rather than multiple + * calls for each attribute. */ - public static String whiteBalanceToString(int whitebalance) { - switch (whitebalance) { - case WHITEBALANCE_AUTO: - return "Auto"; - case WHITEBALANCE_MANUAL: - return "Manual"; - default: - return ""; + public void saveAttributes() throws IOException { + // format of string passed to native C code: + // "attrCnt attr1=valueLen value1attr2=value2Len value2..." + // example: + // "4 attrPtr ImageLength=4 1024Model=6 FooImageWidth=4 1280Make=3 FOO" + StringBuilder sb = new StringBuilder(); + int size = mAttributes.size(); + if (mAttributes.containsKey("hasThumbnail")) { + --size; + } + sb.append(size + " "); + for (Map.Entry<String, String> iter : mAttributes.entrySet()) { + String key = iter.getKey(); + if (key.equals("hasThumbnail")) { + // this is a fake attribute not saved as an exif tag + continue; + } + String val = iter.getValue(); + sb.append(key + "="); + sb.append(val.length() + " "); + sb.append(val); + } + String s = sb.toString(); + synchronized (sLock) { + saveAttributesNative(mFilename, s); + commitChangesNative(mFilename); } } /** - * Given a numerical orientation, return a human-readable string describing - * the orientation. + * Returns true if the JPEG file has a thumbnail. */ - public static String orientationToString(int orientation) { - // TODO: this function needs to be localized and use string resource ids - // rather than strings - String orientationString; - switch (orientation) { - case ORIENTATION_NORMAL: - orientationString = "Normal"; - break; - case ORIENTATION_FLIP_HORIZONTAL: - orientationString = "Flipped horizontal"; - break; - case ORIENTATION_ROTATE_180: - orientationString = "Rotated 180 degrees"; - break; - case ORIENTATION_FLIP_VERTICAL: - orientationString = "Upside down mirror"; - break; - case ORIENTATION_TRANSPOSE: - orientationString = "Transposed"; - break; - case ORIENTATION_ROTATE_90: - orientationString = "Rotated 90 degrees"; - break; - case ORIENTATION_TRANSVERSE: - orientationString = "Transversed"; - break; - case ORIENTATION_ROTATE_270: - orientationString = "Rotated 270 degrees"; - break; - default: - orientationString = "Undefined"; - break; - } - return orientationString; + public boolean hasThumbnail() { + return mHasThumbnail; } /** - * Copies the thumbnail data out of the filename and puts it in the Exif - * data associated with the file used to create this object. You must call - * "commitChanges()" at some point to commit the changes. + * Returns the thumbnail inside the JPEG file, or {@code null} if there is no thumbnail. + * The returned data is in JPEG format and can be decoded using + * {@link android.graphics.BitmapFactory#decodeByteArray(byte[],int,int)} */ - public boolean appendThumbnail(String thumbnailFileName) { - if (!mSavedAttributes) { - throw new RuntimeException("Must call saveAttributes " - + "before calling appendThumbnail"); + public byte[] getThumbnail() { + synchronized (sLock) { + return getThumbnailNative(mFilename); } - mHasThumbnail = appendThumbnailNative(mFilename, thumbnailFileName); - return mHasThumbnail; } - public boolean hasThumbnail() { - if (!mSavedAttributes) { - getAttributes(); + /** + * Stores the latitude and longitude value in a float array. The first element is + * the latitude, and the second element is the longitude. Returns false if the + * Exif tags are not available. + */ + public boolean getLatLong(float output[]) { + String latValue = mAttributes.get(ExifInterface.TAG_GPS_LATITUDE); + String latRef = mAttributes.get(ExifInterface.TAG_GPS_LATITUDE_REF); + String lngValue = mAttributes.get(ExifInterface.TAG_GPS_LONGITUDE); + String lngRef = mAttributes.get(ExifInterface.TAG_GPS_LONGITUDE_REF); + + if (latValue != null && latRef != null && lngValue != null && lngRef != null) { + output[0] = convertRationalLatLonToFloat(latValue, latRef); + output[1] = convertRationalLatLonToFloat(lngValue, lngRef); + return true; + } else { + return false; } - return mHasThumbnail; } - public byte[] getThumbnail() { - return getThumbnailNative(mFilename); - } + private static SimpleDateFormat sFormatter = + new SimpleDateFormat("yyyy:MM:dd HH:mm:ss"); - public static float[] getLatLng(HashMap<String, String> exifData) { - if (exifData == null) { - return null; - } + /** + * Returns number of milliseconds since Jan. 1, 1970, midnight GMT. + * Returns -1 if the date time information if not available. + * @hide + */ + public long getDateTime() { + String dateTimeString = mAttributes.get(TAG_DATETIME); + if (dateTimeString == null) return -1; - String latValue = exifData.get(ExifInterface.TAG_GPS_LATITUDE); - String latRef = exifData.get(ExifInterface.TAG_GPS_LATITUDE_REF); - String lngValue = exifData.get(ExifInterface.TAG_GPS_LONGITUDE); - String lngRef = exifData.get(ExifInterface.TAG_GPS_LONGITUDE_REF); - float[] latlng = null; - - if (latValue != null && latRef != null - && lngValue != null && lngRef != null) { - latlng = new float[2]; - latlng[0] = ExifInterface.convertRationalLatLonToFloat( - latValue, latRef); - latlng[1] = ExifInterface.convertRationalLatLonToFloat( - lngValue, lngRef); + ParsePosition pos = new ParsePosition(0); + try { + Date date = sFormatter.parse(dateTimeString, pos); + if (date == null) return -1; + return date.getTime(); + } catch (IllegalArgumentException ex) { + return -1; } - - return latlng; } - public static float convertRationalLatLonToFloat( + private static float convertRationalLatLonToFloat( String rationalString, String ref) { try { String [] parts = rationalString.split(","); @@ -351,42 +294,6 @@ public class ExifInterface { } } - public static String convertRationalLatLonToDecimalString( - String rationalString, String ref, boolean usePositiveNegative) { - float result = convertRationalLatLonToFloat(rationalString, ref); - - String preliminaryResult = String.valueOf(result); - if (usePositiveNegative) { - String neg = (ref.equals("S") || ref.equals("E")) ? "-" : ""; - return neg + preliminaryResult; - } else { - return preliminaryResult + String.valueOf((char) 186) + " " - + ref; - } - } - - public static String makeLatLongString(double d) { - d = Math.abs(d); - - int degrees = (int) d; - - double remainder = d - degrees; - int minutes = (int) (remainder * 60D); - // really seconds * 1000 - int seconds = (int) (((remainder * 60D) - minutes) * 60D * 1000D); - - String retVal = degrees + "/1," + minutes + "/1," + seconds + "/1000"; - return retVal; - } - - public static String makeLatStringRef(double lat) { - return lat >= 0D ? "N" : "S"; - } - - public static String makeLonStringRef(double lon) { - return lon >= 0D ? "W" : "E"; - } - private native boolean appendThumbnailNative(String fileName, String thumbnailFileName); diff --git a/media/java/android/media/IAudioService.aidl b/media/java/android/media/IAudioService.aidl index 9a8264f031fc..d3d2d29ece8d 100644 --- a/media/java/android/media/IAudioService.aidl +++ b/media/java/android/media/IAudioService.aidl @@ -29,9 +29,9 @@ interface IAudioService { void setStreamVolume(int streamType, int index, int flags); - void setStreamSolo(int streamType, boolean state, IBinder cb); + void setStreamSolo(int streamType, boolean state, IBinder cb); - void setStreamMute(int streamType, boolean state, IBinder cb); + void setStreamMute(int streamType, boolean state, IBinder cb); int getStreamVolume(int streamType); @@ -46,23 +46,11 @@ interface IAudioService { int getVibrateSetting(int vibrateType); boolean shouldVibrate(int vibrateType); - - void setMicrophoneMute(boolean on); - - boolean isMicrophoneMute(); void setMode(int mode); int getMode(); - void setRouting(int mode, int routes, int mask); - - int getRouting(int mode); - - boolean isMusicActive(); - - void setParameter(String key, String value); - oneway void playSoundEffect(int effectType); oneway void playSoundEffectVolume(int effectType, float volume); @@ -72,4 +60,12 @@ interface IAudioService { oneway void unloadSoundEffects(); oneway void reloadAudioSettings(); + + void setSpeakerphoneOn(boolean on); + + boolean isSpeakerphoneOn(); + + void setBluetoothScoOn(boolean on); + + boolean isBluetoothScoOn(); } diff --git a/media/java/android/media/JetPlayer.java b/media/java/android/media/JetPlayer.java index d75d81d92e53..1570db46f226 100644 --- a/media/java/android/media/JetPlayer.java +++ b/media/java/android/media/JetPlayer.java @@ -89,7 +89,7 @@ public class JetPlayer // Jet rendering audio parameters private static final int JET_OUTPUT_RATE = 22050; // _SAMPLE_RATE_22050 in Android.mk private static final int JET_OUTPUT_CHANNEL_CONFIG = - AudioFormat.CHANNEL_CONFIGURATION_STEREO; // NUM_OUTPUT_CHANNELS=2 in Android.mk + AudioFormat.CHANNEL_OUT_STEREO; // NUM_OUTPUT_CHANNELS=2 in Android.mk //-------------------------------------------- diff --git a/media/java/android/media/MediaFile.java b/media/java/android/media/MediaFile.java index 8be11df0fd27..8180a7cfbc30 100644 --- a/media/java/android/media/MediaFile.java +++ b/media/java/android/media/MediaFile.java @@ -17,6 +17,7 @@ package android.media; import android.content.ContentValues; +import android.os.SystemProperties; import android.provider.MediaStore.Audio; import android.provider.MediaStore.Images; import android.provider.MediaStore.Video; @@ -41,8 +42,9 @@ public class MediaFile { public static final int FILE_TYPE_AWB = 5; public static final int FILE_TYPE_WMA = 6; public static final int FILE_TYPE_OGG = 7; + public static final int FILE_TYPE_AAC = 8; private static final int FIRST_AUDIO_FILE_TYPE = FILE_TYPE_MP3; - private static final int LAST_AUDIO_FILE_TYPE = FILE_TYPE_OGG; + private static final int LAST_AUDIO_FILE_TYPE = FILE_TYPE_AAC; // MIDI file types public static final int FILE_TYPE_MID = 11; @@ -57,8 +59,9 @@ public class MediaFile { public static final int FILE_TYPE_3GPP = 23; public static final int FILE_TYPE_3GPP2 = 24; public static final int FILE_TYPE_WMV = 25; + public static final int FILE_TYPE_ASF = 26; private static final int FIRST_VIDEO_FILE_TYPE = FILE_TYPE_MP4; - private static final int LAST_VIDEO_FILE_TYPE = FILE_TYPE_WMV; + private static final int LAST_VIDEO_FILE_TYPE = FILE_TYPE_ASF; // Image file types public static final int FILE_TYPE_JPEG = 31; @@ -101,9 +104,12 @@ public class MediaFile { addFileType("WAV", FILE_TYPE_WAV, "audio/x-wav"); addFileType("AMR", FILE_TYPE_AMR, "audio/amr"); addFileType("AWB", FILE_TYPE_AWB, "audio/amr-wb"); - addFileType("WMA", FILE_TYPE_WMA, "audio/x-ms-wma"); + if (SystemProperties.getInt("ro.media.dec.aud.wma.enabled", 0) != 0) { + addFileType("WMA", FILE_TYPE_WMA, "audio/x-ms-wma"); + } addFileType("OGG", FILE_TYPE_OGG, "application/ogg"); addFileType("OGA", FILE_TYPE_OGG, "application/ogg"); + addFileType("AAC", FILE_TYPE_AAC, "audio/aac"); addFileType("MID", FILE_TYPE_MID, "audio/midi"); addFileType("MIDI", FILE_TYPE_MID, "audio/midi"); @@ -120,7 +126,10 @@ public class MediaFile { addFileType("3GPP", FILE_TYPE_3GPP, "video/3gpp"); addFileType("3G2", FILE_TYPE_3GPP2, "video/3gpp2"); addFileType("3GPP2", FILE_TYPE_3GPP2, "video/3gpp2"); - addFileType("WMV", FILE_TYPE_WMV, "video/x-ms-wmv"); + if (SystemProperties.getInt("ro.media.dec.vid.wmv.enabled", 0) != 0) { + addFileType("WMV", FILE_TYPE_WMV, "video/x-ms-wmv"); + addFileType("ASF", FILE_TYPE_ASF, "video/x-ms-asf"); + } addFileType("JPG", FILE_TYPE_JPEG, "image/jpeg"); addFileType("JPEG", FILE_TYPE_JPEG, "image/jpeg"); diff --git a/media/java/android/media/MediaMetadataRetriever.java b/media/java/android/media/MediaMetadataRetriever.java index 3a49a5f36e64..cecf4f818014 100644 --- a/media/java/android/media/MediaMetadataRetriever.java +++ b/media/java/android/media/MediaMetadataRetriever.java @@ -35,6 +35,7 @@ public class MediaMetadataRetriever { static { System.loadLibrary("media_jni"); + native_init(); } // The field below is accessed by native methods @@ -211,7 +212,8 @@ public class MediaMetadataRetriever * allocated internally. */ public native void release(); - private native void native_setup(); + private native void native_setup(); + private static native void native_init(); private native final void native_finalize(); @@ -252,5 +254,6 @@ public class MediaMetadataRetriever public static final int METADATA_KEY_VIDEO_FORMAT = 18; public static final int METADATA_KEY_VIDEO_HEIGHT = 19; public static final int METADATA_KEY_VIDEO_WIDTH = 20; + public static final int METADATA_KEY_WRITER = 21; // Add more here... } diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java index e94137e7bb67..07542ed7e915 100644 --- a/media/java/android/media/MediaPlayer.java +++ b/media/java/android/media/MediaPlayer.java @@ -23,6 +23,7 @@ import android.net.Uri; import android.os.Handler; import android.os.Looper; import android.os.Message; +import android.os.Parcel; import android.os.ParcelFileDescriptor; import android.os.PowerManager; import android.util.Log; @@ -33,7 +34,7 @@ import android.media.AudioManager; import java.io.FileDescriptor; import java.io.IOException; - +import java.util.Set; import java.lang.ref.WeakReference; /** @@ -430,11 +431,49 @@ import java.lang.ref.WeakReference; */ public class MediaPlayer { + /** + Constant to retrieve only the new metadata since the last + call. + // FIXME: unhide. + // FIXME: add link to getMetadata(boolean, boolean) + {@hide} + */ + public static final boolean METADATA_UPDATE_ONLY = true; + + /** + Constant to retrieve all the metadata. + // FIXME: unhide. + // FIXME: add link to getMetadata(boolean, boolean) + {@hide} + */ + public static final boolean METADATA_ALL = false; + + /** + Constant to enable the metadata filter during retrieval. + // FIXME: unhide. + // FIXME: add link to getMetadata(boolean, boolean) + {@hide} + */ + public static final boolean APPLY_METADATA_FILTER = true; + + /** + Constant to disable the metadata filter during retrieval. + // FIXME: unhide. + // FIXME: add link to getMetadata(boolean, boolean) + {@hide} + */ + public static final boolean BYPASS_METADATA_FILTER = false; + static { System.loadLibrary("media_jni"); + native_init(); } private final static String TAG = "MediaPlayer"; + // Name of the remote interface for the media player. Must be kept + // in sync with the 2nd parameter of the IMPLEMENT_META_INTERFACE + // macro invocation in IMediaPlayer.cpp + private final static String IMEDIA_PLAYER = "android.media.IMediaPlayer"; private int mNativeContext; // accessed by native methods private int mListenerContext; // accessed by native methods @@ -475,6 +514,45 @@ public class MediaPlayer private native void _setVideoSurface(); /** + * Create a request parcel which can be routed to the native media + * player using {@link #invoke(Parcel, Parcel)}. The Parcel + * returned has the proper InterfaceToken set. The caller should + * not overwrite that token, i.e it can only append data to the + * Parcel. + * + * @return A parcel suitable to hold a request for the native + * player. + * {@hide} + */ + public Parcel newRequest() { + Parcel parcel = Parcel.obtain(); + parcel.writeInterfaceToken(IMEDIA_PLAYER); + return parcel; + } + + /** + * Invoke a generic method on the native player using opaque + * parcels for the request and reply. Both payloads' format is a + * convention between the java caller and the native player. + * Must be called after setDataSource to make sure a native player + * exists. + * + * @param request Parcel with the data for the extension. The + * caller must use {@link #newRequest()} to get one. + * + * @param reply Output parcel with the data returned by the + * native player. + * + * @return The status code see utils/Errors.h + * {@hide} + */ + public int invoke(Parcel request, Parcel reply) { + int retcode = native_invoke(request, reply); + reply.setDataPosition(0); + return retcode; + } + + /** * Sets the SurfaceHolder to use for displaying the video portion of the media. * This call is optional. Not calling it when playing back a video will * result in only the audio track being played. @@ -843,6 +921,89 @@ public class MediaPlayer public native int getDuration(); /** + * Gets the media metadata. + * + * @param update_only controls whether the full set of available + * metadata is returned or just the set that changed since the + * last call. See {@see #METADATA_UPDATE_ONLY} and {@see + * #METADATA_ALL}. + * + * @param apply_filter if true only metadata that matches the + * filter is returned. See {@see #APPLY_METADATA_FILTER} and {@see + * #BYPASS_METADATA_FILTER}. + * + * @return The metadata, possibly empty. null if an error occured. + // FIXME: unhide. + * {@hide} + */ + public Metadata getMetadata(final boolean update_only, + final boolean apply_filter) { + Parcel reply = Parcel.obtain(); + Metadata data = new Metadata(); + + if (!native_getMetadata(update_only, apply_filter, reply)) { + reply.recycle(); + return null; + } + + // Metadata takes over the parcel, don't recycle it unless + // there is an error. + if (!data.parse(reply)) { + reply.recycle(); + return null; + } + return data; + } + + /** + * Set a filter for the metadata update notification and update + * retrieval. The caller provides 2 set of metadata keys, allowed + * and blocked. The blocked set always takes precedence over the + * allowed one. + * Metadata.MATCH_ALL and Metadata.MATCH_NONE are 2 sets available as + * shorthands to allow/block all or no metadata. + * + * By default, there is no filter set. + * + * @param allow Is the set of metadata the client is interested + * in receiving new notifications for. + * @param block Is the set of metadata the client is not interested + * in receiving new notifications for. + * @return The call status code. + * + // FIXME: unhide. + * {@hide} + */ + public int setMetadataFilter(Set<Integer> allow, Set<Integer> block) { + // Do our serialization manually instead of calling + // Parcel.writeArray since the sets are made of the same type + // we avoid paying the price of calling writeValue (used by + // writeArray) which burns an extra int per element to encode + // the type. + Parcel request = newRequest(); + + // The parcel starts already with an interface token. There + // are 2 filters. Each one starts with a 4bytes number to + // store the len followed by a number of int (4 bytes as well) + // representing the metadata type. + int capacity = request.dataSize() + 4 * (1 + allow.size() + 1 + block.size()); + + if (request.dataCapacity() < capacity) { + request.setDataCapacity(capacity); + } + + request.writeInt(allow.size()); + for(Integer t: allow) { + request.writeInt(t); + } + request.writeInt(block.size()); + for(Integer t: block) { + request.writeInt(t); + } + return native_setMetadataFilter(request); + } + + /** * Releases resources associated with this MediaPlayer object. * It is considered good practice to call this method when you're * done using the MediaPlayer. @@ -920,8 +1081,46 @@ public class MediaPlayer */ public native Bitmap getFrameAt(int msec) throws IllegalStateException; + /** + * @param request Parcel destinated to the media player. The + * Interface token must be set to the IMediaPlayer + * one to be routed correctly through the system. + * @param reply[out] Parcel that will contain the reply. + * @return The status code. + */ + private native final int native_invoke(Parcel request, Parcel reply); + + + /** + * @param update_only If true fetch only the set of metadata that have + * changed since the last invocation of getMetadata. + * The set is built using the unfiltered + * notifications the native player sent to the + * MediaPlayerService during that period of + * time. If false, all the metadatas are considered. + * @param apply_filter If true, once the metadata set has been built based on + * the value update_only, the current filter is applied. + * @param reply[out] On return contains the serialized + * metadata. Valid only if the call was successful. + * @return The status code. + */ + private native final boolean native_getMetadata(boolean update_only, + boolean apply_filter, + Parcel reply); + + /** + * @param request Parcel with the 2 serialized lists of allowed + * metadata types followed by the one to be + * dropped. Each list starts with an integer + * indicating the number of metadata type elements. + * @return The status code. + */ + private native final int native_setMetadataFilter(Parcel request); + + private static native final void native_init(); private native final void native_setup(Object mediaplayer_this); private native final void native_finalize(); + @Override protected void finalize() { native_finalize(); } @@ -1259,6 +1458,11 @@ public class MediaPlayer */ public static final int MEDIA_INFO_NOT_SEEKABLE = 801; + /** A new set of metadata is available. + * @see android.media.MediaPlayer.OnInfoListener + */ + public static final int MEDIA_INFO_METADATA_UPDATE = 802; + /** * Interface definition of a callback to be invoked to communicate some * info and/or warning about the media or its playback. @@ -1275,6 +1479,7 @@ public class MediaPlayer * <li>{@link #MEDIA_INFO_VIDEO_TRACK_LAGGING} * <li>{@link #MEDIA_INFO_BAD_INTERLEAVING} * <li>{@link #MEDIA_INFO_NOT_SEEKABLE} + * <li>{@link #MEDIA_INFO_METADATA_UPDATE} * </ul> * @param extra an extra code, specific to the info. Typically * implementation dependant. @@ -1296,4 +1501,9 @@ public class MediaPlayer } private OnInfoListener mOnInfoListener; + + /** + * @hide + */ + public native static int snoop(short [] outData, int kind); } diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java index be4b489eb086..4203cbadb001 100644 --- a/media/java/android/media/MediaRecorder.java +++ b/media/java/android/media/MediaRecorder.java @@ -57,6 +57,7 @@ public class MediaRecorder { static { System.loadLibrary("media_jni"); + native_init(); } private final static String TAG = "MediaRecorder"; @@ -134,6 +135,12 @@ public class MediaRecorder /** Voice call uplink + downlink audio source */ public static final int VOICE_CALL = 4; + + /** Microphone audio source with same orientation as camera */ + public static final int CAMCORDER = 5; + + /** Microphone audio source tuned for voice recognition */ + public static final int VOICE_RECOGNITION = 6; } /** @@ -167,7 +174,7 @@ public class MediaRecorder /** The following formats are audio only .aac or .amr formats **/ /** @deprecated Deprecated in favor of AMR_NB */ - /** @todo change link when AMR_NB is exposed. Deprecated in favor of {@link MediaRecorder.OutputFormat#AMR_NB} */ + /** TODO: change link when AMR_NB is exposed. Deprecated in favor of MediaRecorder.OutputFormat.AMR_NB */ public static final int RAW_AMR = 3; /** @hide AMR NB file format */ public static final int AMR_NB = 3; @@ -273,7 +280,7 @@ public class MediaRecorder * Gets the maximum value for audio sources. * @see android.media.MediaRecorder.AudioSource */ - public static final int getAudioSourceMax() { return AudioSource.VOICE_CALL; } + public static final int getAudioSourceMax() { return AudioSource.VOICE_RECOGNITION; } /** * Sets the video source to be used for recording. If this method is not @@ -655,6 +662,8 @@ public class MediaRecorder */ public native void release(); + private static native final void native_init(); + private native final void native_setup(Object mediarecorder_this) throws IllegalStateException; private native final void native_finalize(); diff --git a/media/java/android/media/MediaScanner.java b/media/java/android/media/MediaScanner.java index 8db874a70757..afdc8f77c267 100644 --- a/media/java/android/media/MediaScanner.java +++ b/media/java/android/media/MediaScanner.java @@ -99,6 +99,7 @@ public class MediaScanner { static { System.loadLibrary("media_jni"); + native_init(); } private final static String TAG = "MediaScanner"; @@ -307,10 +308,14 @@ public class MediaScanner private boolean mDefaultRingtoneSet; /** Whether the scanner has set a default sound for the notification ringtone. */ private boolean mDefaultNotificationSet; + /** Whether the scanner has set a default sound for the alarm ringtone. */ + private boolean mDefaultAlarmSet; /** The filename for the default sound for the ringer ringtone. */ private String mDefaultRingtoneFilename; /** The filename for the default sound for the notification ringtone. */ private String mDefaultNotificationFilename; + /** The filename for the default sound for the alarm ringtone. */ + private String mDefaultAlarmAlertFilename; /** * The prefix for system properties that define the default sound for * ringtones. Concatenate the name of the setting from Settings @@ -369,6 +374,8 @@ public class MediaScanner + Settings.System.RINGTONE); mDefaultNotificationFilename = SystemProperties.get(DEFAULT_RINGTONE_PROPERTY_PREFIX + Settings.System.NOTIFICATION_SOUND); + mDefaultAlarmAlertFilename = SystemProperties.get(DEFAULT_RINGTONE_PROPERTY_PREFIX + + Settings.System.ALARM_ALERT); } private MyMediaScannerClient mClient = new MyMediaScannerClient(); @@ -389,6 +396,7 @@ public class MediaScanner private String mPath; private long mLastModified; private long mFileSize; + private String mWriter; public FileCacheEntry beginFile(String path, String mimeType, long lastModified, long fileSize) { @@ -472,11 +480,14 @@ public class MediaScanner mDuration = 0; mPath = path; mLastModified = lastModified; + mWriter = null; return entry; } public void scanFile(String path, long lastModified, long fileSize) { + // This is the callback funtion from native codes. + // Log.v(TAG, "scanFile: "+path); doScanFile(path, null, lastModified, fileSize, false); } @@ -484,6 +495,22 @@ public class MediaScanner doScanFile(path, mimeType, lastModified, fileSize, false); } + private boolean isMetadataSupported(int fileType) { + if (mFileType == MediaFile.FILE_TYPE_MP3 || + mFileType == MediaFile.FILE_TYPE_MP4 || + mFileType == MediaFile.FILE_TYPE_M4A || + mFileType == MediaFile.FILE_TYPE_3GPP || + mFileType == MediaFile.FILE_TYPE_3GPP2 || + mFileType == MediaFile.FILE_TYPE_OGG || + mFileType == MediaFile.FILE_TYPE_AAC || + mFileType == MediaFile.FILE_TYPE_MID || + mFileType == MediaFile.FILE_TYPE_WMA) { + // we only extract metadata from MP3, M4A, OGG, MID, AAC and WMA files. + // check MP4 files, to determine if they contain only audio. + return true; + } + return false; + } public Uri doScanFile(String path, String mimeType, long lastModified, long fileSize, boolean scanAlways) { Uri result = null; // long t1 = System.currentTimeMillis(); @@ -499,16 +526,7 @@ public class MediaScanner boolean music = (lowpath.indexOf(MUSIC_DIR) > 0) || (!ringtones && !notifications && !alarms && !podcasts); - if (mFileType == MediaFile.FILE_TYPE_MP3 || - mFileType == MediaFile.FILE_TYPE_MP4 || - mFileType == MediaFile.FILE_TYPE_M4A || - mFileType == MediaFile.FILE_TYPE_3GPP || - mFileType == MediaFile.FILE_TYPE_3GPP2 || - mFileType == MediaFile.FILE_TYPE_OGG || - mFileType == MediaFile.FILE_TYPE_MID || - mFileType == MediaFile.FILE_TYPE_WMA) { - // we only extract metadata from MP3, M4A, OGG, MID and WMA files. - // check MP4 files, to determine if they contain only audio. + if( isMetadataSupported(mFileType) ) { processFile(path, mimeType, this); } else if (MediaFile.isImageFileType(mFileType)) { // we used to compute the width and height but it's not worth it @@ -586,10 +604,19 @@ public class MediaScanner mTrack = (num * 1000) + (mTrack % 1000); } else if (name.equalsIgnoreCase("duration")) { mDuration = parseSubstring(value, 0, 0); + } else if (name.equalsIgnoreCase("writer") || name.startsWith("writer;")) { + mWriter = value.trim(); } } public void setMimeType(String mimeType) { + if ("audio/mp4".equals(mMimeType) && + mimeType.startsWith("video")) { + // for feature parity with Donut, we force m4a files to keep the + // audio/mp4 mimetype, even if they are really "enhanced podcasts" + // with a video track + return; + } mMimeType = mimeType; mFileType = MediaFile.getFileTypeForMimeType(mimeType); } @@ -701,14 +728,45 @@ public class MediaScanner values.put(Audio.Media.IS_MUSIC, music); values.put(Audio.Media.IS_PODCAST, podcasts); } else if (mFileType == MediaFile.FILE_TYPE_JPEG) { - HashMap<String, String> exifData = - ExifInterface.loadExifData(entry.mPath); - if (exifData != null) { - float[] latlng = ExifInterface.getLatLng(exifData); - if (latlng != null) { + ExifInterface exif = null; + try { + exif = new ExifInterface(entry.mPath); + } catch (IOException ex) { + // exif is null + } + if (exif != null) { + float[] latlng = new float[2]; + if (exif.getLatLong(latlng)) { values.put(Images.Media.LATITUDE, latlng[0]); values.put(Images.Media.LONGITUDE, latlng[1]); } + + long time = exif.getDateTime(); + if (time != -1) { + values.put(Images.Media.DATE_TAKEN, time); + } + + int orientation = exif.getAttributeInt( + ExifInterface.TAG_ORIENTATION, -1); + if (orientation != -1) { + // We only recognize a subset of orientation tag values. + int degree; + switch(orientation) { + case ExifInterface.ORIENTATION_ROTATE_90: + degree = 90; + break; + case ExifInterface.ORIENTATION_ROTATE_180: + degree = 180; + break; + case ExifInterface.ORIENTATION_ROTATE_270: + degree = 270; + break; + default: + degree = 0; + break; + } + values.put(Images.Media.ORIENTATION, degree); + } } } @@ -779,6 +837,12 @@ public class MediaScanner setSettingIfNotSet(Settings.System.RINGTONE, tableUri, rowId); mDefaultRingtoneSet = true; } + } else if (alarms && !mDefaultAlarmSet) { + if (TextUtils.isEmpty(mDefaultAlarmAlertFilename) || + doesPathHaveFilename(entry.mPath, mDefaultAlarmAlertFilename)) { + setSettingIfNotSet(Settings.System.ALARM_ALERT, tableUri, rowId); + mDefaultAlarmSet = true; + } } return result; @@ -803,6 +867,22 @@ public class MediaScanner } } + public void addNoMediaFolder(String path) { + ContentValues values = new ContentValues(); + values.put(MediaStore.Images.ImageColumns.DATA, ""); + String [] pathSpec = new String[] {path + '%'}; + try { + mMediaProvider.update(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values, + MediaStore.Images.ImageColumns.DATA + " LIKE ?", pathSpec); + mMediaProvider.update(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, values, + MediaStore.Images.ImageColumns.DATA + " LIKE ?", pathSpec); + mMediaProvider.update(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, values, + MediaStore.Images.ImageColumns.DATA + " LIKE ?", pathSpec); + } catch (RemoteException e) { + throw new RuntimeException(); + } + } + }; // end of anonymous MediaScannerClient instance private void prescan(String filePath) throws RemoteException { @@ -1200,7 +1280,8 @@ public class MediaScanner } } - if (bestMatch == null) { + // if the match is not for an audio file, bail out + if (bestMatch == null || ! mAudioUri.equals(bestMatch.mTableUri)) { return false; } @@ -1412,6 +1493,7 @@ public class MediaScanner public native byte[] extractAlbumArt(FileDescriptor fd); + private static native final void native_init(); private native final void native_setup(); private native final void native_finalize(); @Override diff --git a/media/java/android/media/MediaScannerClient.java b/media/java/android/media/MediaScannerClient.java index cf1a8da2d5a7..258c3b495605 100644 --- a/media/java/android/media/MediaScannerClient.java +++ b/media/java/android/media/MediaScannerClient.java @@ -25,11 +25,13 @@ public interface MediaScannerClient public void scanFile(String path, String mimeType, long lastModified, long fileSize); + public void addNoMediaFolder(String path); + /** * Called by native code to return metadata extracted from media files. */ public void handleStringTag(String name, String value); - + /** * Called by native code to return mime type extracted from DRM content. */ diff --git a/media/java/android/media/Metadata.java b/media/java/android/media/Metadata.java new file mode 100644 index 000000000000..bd25da21c86e --- /dev/null +++ b/media/java/android/media/Metadata.java @@ -0,0 +1,418 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.media; + +import android.graphics.Bitmap; +import android.os.Parcel; +import android.util.Log; + +import java.util.Calendar; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Set; +import java.util.TimeZone; + + +/** + Class to hold the media's metadata. Metadata are used + for human consumption and can be embedded in the media (e.g + shoutcast) or available from an external source. The source can be + local (e.g thumbnail stored in the DB) or remote (e.g caption + server). + + Metadata is like a Bundle. It is sparse and each key can occur at + most once. The key is an integer and the value is the actual metadata. + + The caller is expected to know the type of the metadata and call + the right get* method to fetch its value. + + // FIXME: unhide. + {@hide} + */ +public class Metadata +{ + // The metadata are keyed using integers rather than more heavy + // weight strings. We considered using Bundle to ship the metadata + // between the native layer and the java layer but dropped that + // option since keeping in sync a native implementation of Bundle + // and the java one would be too burdensome. Besides Bundle uses + // String for its keys. + // The key range [0 8192) is reserved for the system. + // + // We manually serialize the data in Parcels. For large memory + // blob (bitmaps, raw pictures) we use MemoryFile which allow the + // client to make the data purge-able once it is done with it. + // + + public static final int ANY = 0; // Never used for metadata returned, only for filtering. + // Keep in sync with kAny in MediaPlayerService.cpp + + // TODO: Should we use numbers compatible with the metadata retriever? + public static final int TITLE = 1; // String + public static final int COMMENT = 2; // String + public static final int COPYRIGHT = 3; // String + public static final int ALBUM = 4; // String + public static final int ARTIST = 5; // String + public static final int AUTHOR = 6; // String + public static final int COMPOSER = 7; // String + public static final int GENRE = 8; // String + public static final int DATE = 9; // Date + public static final int DURATION = 10; // Integer(millisec) + public static final int CD_TRACK_NUM = 11; // Integer 1-based + public static final int CD_TRACK_MAX = 12; // Integer + public static final int RATING = 13; // String + public static final int ALBUM_ART = 14; // byte[] + public static final int VIDEO_FRAME = 15; // Bitmap + public static final int CAPTION = 16; // TimedText + + public static final int BIT_RATE = 17; // Integer, Aggregate rate of + // all the streams in bps. + + public static final int AUDIO_BIT_RATE = 18; // Integer, bps + public static final int VIDEO_BIT_RATE = 19; // Integer, bps + public static final int AUDIO_SAMPLE_RATE = 20; // Integer, Hz + public static final int VIDEO_FRAME_RATE = 21; // Integer, Hz + + // See RFC2046 and RFC4281. + public static final int MIME_TYPE = 22; // String + public static final int AUDIO_CODEC = 23; // String + public static final int VIDEO_CODEC = 24; // String + + public static final int VIDEO_HEIGHT = 25; // Integer + public static final int VIDEO_WIDTH = 26; // Integer + public static final int NUM_TRACKS = 27; // Integer + public static final int DRM_CRIPPLED = 28; // Boolean + + // Playback capabilities. + public static final int PAUSE_AVAILABLE = 29; // Boolean + public static final int SEEK_BACKWARD_AVAILABLE = 30; // Boolean + public static final int SEEK_FORWARD_AVAILABLE = 31; // Boolean + + private static final int LAST_SYSTEM = 31; + private static final int FIRST_CUSTOM = 8192; + + // Shorthands to set the MediaPlayer's metadata filter. + public static final Set<Integer> MATCH_NONE = Collections.EMPTY_SET; + public static final Set<Integer> MATCH_ALL = Collections.singleton(ANY); + + public static final int STRING_VAL = 1; + public static final int INTEGER_VAL = 2; + public static final int BOOLEAN_VAL = 3; + public static final int LONG_VAL = 4; + public static final int DOUBLE_VAL = 5; + public static final int TIMED_TEXT_VAL = 6; + public static final int DATE_VAL = 7; + public static final int BYTE_ARRAY_VAL = 8; + // FIXME: misses a type for shared heap is missing (MemoryFile). + // FIXME: misses a type for bitmaps. + private static final int LAST_TYPE = 8; + + private static final String TAG = "media.Metadata"; + private static final int kInt32Size = 4; + private static final int kMetaHeaderSize = 2 * kInt32Size; // size + marker + private static final int kRecordHeaderSize = 3 * kInt32Size; // size + id + type + + private static final int kMetaMarker = 0x4d455441; // 'M' 'E' 'T' 'A' + + // After a successful parsing, set the parcel with the serialized metadata. + private Parcel mParcel; + + // Map to associate a Metadata key (e.g TITLE) with the offset of + // the record's payload in the parcel. + // Used to look up if a key was present too. + // Key: Metadata ID + // Value: Offset of the metadata type field in the record. + private final HashMap<Integer, Integer> mKeyToPosMap = + new HashMap<Integer, Integer>(); + + /** + * Helper class to hold a triple (time, duration, text). Can be used to + * implement caption. + */ + public class TimedText { + private Date mTime; + private int mDuration; // millisec + private String mText; + + public TimedText(Date time, int duration, String text) { + mTime = time; + mDuration = duration; + mText = text; + } + + public String toString() { + StringBuilder res = new StringBuilder(80); + res.append(mTime).append("-").append(mDuration) + .append(":").append(mText); + return res.toString(); + } + } + + public Metadata() { } + + /** + * Go over all the records, collecting metadata keys and records' + * type field offset in the Parcel. These are stored in + * mKeyToPosMap for latter retrieval. + * Format of a metadata record: + <pre> + 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | record size | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | metadata key | // TITLE + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | metadata type | // STRING_VAL + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | | + | .... metadata payload .... | + | | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + </pre> + * @param parcel With the serialized records. + * @param bytesLeft How many bytes in the parcel should be processed. + * @return false if an error occurred during parsing. + */ + private boolean scanAllRecords(Parcel parcel, int bytesLeft) { + int recCount = 0; + boolean error = false; + + mKeyToPosMap.clear(); + while (bytesLeft > kRecordHeaderSize) { + final int start = parcel.dataPosition(); + // Check the size. + final int size = parcel.readInt(); + + if (size <= kRecordHeaderSize) { // at least 1 byte should be present. + Log.e(TAG, "Record is too short"); + error = true; + break; + } + + // Check the metadata key. + final int metadataId = parcel.readInt(); + if (!checkMetadataId(metadataId)) { + error = true; + break; + } + + // Store the record offset which points to the type + // field so we can later on read/unmarshall the record + // payload. + if (mKeyToPosMap.containsKey(metadataId)) { + Log.e(TAG, "Duplicate metadata ID found"); + error = true; + break; + } + + mKeyToPosMap.put(metadataId, parcel.dataPosition()); + + // Check the metadata type. + final int metadataType = parcel.readInt(); + if (metadataType <= 0 || metadataType > LAST_TYPE) { + Log.e(TAG, "Invalid metadata type " + metadataType); + error = true; + break; + } + + // Skip to the next one. + parcel.setDataPosition(start + size); + bytesLeft -= size; + ++recCount; + } + + if (0 != bytesLeft || error) { + Log.e(TAG, "Ran out of data or error on record " + recCount); + mKeyToPosMap.clear(); + return false; + } else { + return true; + } + } + + /** + * Check a parcel containing metadata is well formed. The header + * is checked as well as the individual records format. However, the + * data inside the record is not checked because we do lazy access + * (we check/unmarshall only data the user asks for.) + * + * Format of a metadata parcel: + <pre> + 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | metadata total size | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | 'M' | 'E' | 'T' | 'A' | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | | + | .... metadata records .... | + | | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + </pre> + * + * @param parcel With the serialized data. Metadata keeps a + * reference on it to access it later on. The caller + * should not modify the parcel after this call (and + * not call recycle on it.) + * @return false if an error occurred. + */ + public boolean parse(Parcel parcel) { + if (parcel.dataAvail() < kMetaHeaderSize) { + Log.e(TAG, "Not enough data " + parcel.dataAvail()); + return false; + } + + final int pin = parcel.dataPosition(); // to roll back in case of errors. + final int size = parcel.readInt(); + + // The extra kInt32Size below is to account for the int32 'size' just read. + if (parcel.dataAvail() + kInt32Size < size || size < kMetaHeaderSize) { + Log.e(TAG, "Bad size " + size + " avail " + parcel.dataAvail() + " position " + pin); + parcel.setDataPosition(pin); + return false; + } + + // Checks if the 'M' 'E' 'T' 'A' marker is present. + final int kShouldBeMetaMarker = parcel.readInt(); + if (kShouldBeMetaMarker != kMetaMarker ) { + Log.e(TAG, "Marker missing " + Integer.toHexString(kShouldBeMetaMarker)); + parcel.setDataPosition(pin); + return false; + } + + // Scan the records to collect metadata ids and offsets. + if (!scanAllRecords(parcel, size - kMetaHeaderSize)) { + parcel.setDataPosition(pin); + return false; + } + mParcel = parcel; + return true; + } + + /** + * @return The set of metadata ID found. + */ + public Set<Integer> keySet() { + return mKeyToPosMap.keySet(); + } + + /** + * @return true if a value is present for the given key. + */ + public boolean has(final int metadataId) { + if (!checkMetadataId(metadataId)) { + throw new IllegalArgumentException("Invalid key: " + metadataId); + } + return mKeyToPosMap.containsKey(metadataId); + } + + // Accessors. + // Caller must make sure the key is present using the {@code has} + // method otherwise a RuntimeException will occur. + + public String getString(final int key) { + checkType(key, STRING_VAL); + return mParcel.readString(); + } + + public int getInt(final int key) { + checkType(key, INTEGER_VAL); + return mParcel.readInt(); + } + + public boolean getBoolean(final int key) { + checkType(key, BOOLEAN_VAL); + return mParcel.readInt() == 1; + } + + public long getLong(final int key) { + checkType(key, LONG_VAL); + return mParcel.readLong(); + } + + public double getDouble(final int key) { + checkType(key, DOUBLE_VAL); + return mParcel.readDouble(); + } + + public byte[] getByteArray(final int key) { + checkType(key, BYTE_ARRAY_VAL); + return mParcel.createByteArray(); + } + + public Date getDate(final int key) { + checkType(key, DATE_VAL); + final long timeSinceEpoch = mParcel.readLong(); + final String timeZone = mParcel.readString(); + + if (timeZone.length() == 0) { + return new Date(timeSinceEpoch); + } else { + TimeZone tz = TimeZone.getTimeZone(timeZone); + Calendar cal = Calendar.getInstance(tz); + + cal.setTimeInMillis(timeSinceEpoch); + return cal.getTime(); + } + } + + public TimedText getTimedText(final int key) { + checkType(key, TIMED_TEXT_VAL); + final Date startTime = new Date(mParcel.readLong()); // epoch + final int duration = mParcel.readInt(); // millisec + + return new TimedText(startTime, + duration, + mParcel.readString()); + } + + // @return the last available system metadata id. Ids are + // 1-indexed. + public static int lastSytemId() { return LAST_SYSTEM; } + + // @return the first available cutom metadata id. + public static int firstCustomId() { return FIRST_CUSTOM; } + + // @return the last value of known type. Types are 1-indexed. + public static int lastType() { return LAST_TYPE; } + + // Check val is either a system id or a custom one. + // @param val Metadata key to test. + // @return true if it is in a valid range. + private boolean checkMetadataId(final int val) { + if (val <= ANY || (LAST_SYSTEM < val && val < FIRST_CUSTOM)) { + Log.e(TAG, "Invalid metadata ID " + val); + return false; + } + return true; + } + + // Check the type of the data match what is expected. + private void checkType(final int key, final int expectedType) { + final int pos = mKeyToPosMap.get(key); + + mParcel.setDataPosition(pos); + + final int type = mParcel.readInt(); + if (type != expectedType) { + throw new IllegalStateException("Wrong type " + expectedType + " but got " + type); + } + } +} diff --git a/media/java/android/media/MiniThumbFile.java b/media/java/android/media/MiniThumbFile.java new file mode 100644 index 000000000000..f6e63173b67b --- /dev/null +++ b/media/java/android/media/MiniThumbFile.java @@ -0,0 +1,274 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.media; + +import android.graphics.Bitmap; +import android.media.ThumbnailUtil; +import android.net.Uri; +import android.os.Environment; +import android.util.Log; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.channels.FileLock; +import java.util.Hashtable; + +/** + * This class handles the mini-thumb file. A mini-thumb file consists + * of blocks, indexed by id. Each block has BYTES_PER_MINTHUMB bytes in the + * following format: + * + * 1 byte status (0 = empty, 1 = mini-thumb available) + * 8 bytes magic (a magic number to match what's in the database) + * 4 bytes data length (LEN) + * LEN bytes jpeg data + * (the remaining bytes are unused) + * + * @hide This file is shared between MediaStore and MediaProvider and should remained internal use + * only. + */ +public class MiniThumbFile { + private static final String TAG = "MiniThumbFile"; + private static final int MINI_THUMB_DATA_FILE_VERSION = 3; + public static final int BYTES_PER_MINTHUMB = 10000; + private static final int HEADER_SIZE = 1 + 8 + 4; + private Uri mUri; + private RandomAccessFile mMiniThumbFile; + private FileChannel mChannel; + private ByteBuffer mBuffer; + private static Hashtable<String, MiniThumbFile> sThumbFiles = + new Hashtable<String, MiniThumbFile>(); + + /** + * We store different types of thumbnails in different files. To remain backward compatibility, + * we should hashcode of content://media/external/images/media remains the same. + */ + public static synchronized void reset() { + for (MiniThumbFile file : sThumbFiles.values()) { + file.deactivate(); + } + sThumbFiles.clear(); + } + + public static synchronized MiniThumbFile instance(Uri uri) { + String type = uri.getPathSegments().get(1); + MiniThumbFile file = sThumbFiles.get(type); + // Log.v(TAG, "get minithumbfile for type: "+type); + if (file == null) { + file = new MiniThumbFile( + Uri.parse("content://media/external/" + type + "/media")); + sThumbFiles.put(type, file); + } + + return file; + } + + private String randomAccessFilePath(int version) { + String directoryName = + Environment.getExternalStorageDirectory().toString() + + "/DCIM/.thumbnails"; + return directoryName + "/.thumbdata" + version + "-" + mUri.hashCode(); + } + + private void removeOldFile() { + String oldPath = randomAccessFilePath(MINI_THUMB_DATA_FILE_VERSION - 1); + File oldFile = new File(oldPath); + if (oldFile.exists()) { + try { + oldFile.delete(); + } catch (SecurityException ex) { + // ignore + } + } + } + + private RandomAccessFile miniThumbDataFile() { + if (mMiniThumbFile == null) { + removeOldFile(); + String path = randomAccessFilePath(MINI_THUMB_DATA_FILE_VERSION); + File directory = new File(path).getParentFile(); + if (!directory.isDirectory()) { + if (!directory.mkdirs()) { + Log.e(TAG, "Unable to create .thumbnails directory " + + directory.toString()); + } + } + File f = new File(path); + try { + mMiniThumbFile = new RandomAccessFile(f, "rw"); + } catch (IOException ex) { + // Open as read-only so we can at least read the existing + // thumbnails. + try { + mMiniThumbFile = new RandomAccessFile(f, "r"); + } catch (IOException ex2) { + // ignore exception + } + } + if (mMiniThumbFile != null) { + mChannel = mMiniThumbFile.getChannel(); + } + } + return mMiniThumbFile; + } + + public MiniThumbFile(Uri uri) { + mUri = uri; + mBuffer = ByteBuffer.allocateDirect(BYTES_PER_MINTHUMB); + } + + public synchronized void deactivate() { + if (mMiniThumbFile != null) { + try { + mMiniThumbFile.close(); + mMiniThumbFile = null; + } catch (IOException ex) { + // ignore exception + } + } + } + + // Get the magic number for the specified id in the mini-thumb file. + // Returns 0 if the magic is not available. + public synchronized long getMagic(long id) { + // check the mini thumb file for the right data. Right is + // defined as having the right magic number at the offset + // reserved for this "id". + RandomAccessFile r = miniThumbDataFile(); + if (r != null) { + long pos = id * BYTES_PER_MINTHUMB; + FileLock lock = null; + try { + mBuffer.clear(); + mBuffer.limit(1 + 8); + + lock = mChannel.lock(pos, 1 + 8, true); + // check that we can read the following 9 bytes + // (1 for the "status" and 8 for the long) + if (mChannel.read(mBuffer, pos) == 9) { + mBuffer.position(0); + if (mBuffer.get() == 1) { + return mBuffer.getLong(); + } + } + } catch (IOException ex) { + Log.v(TAG, "Got exception checking file magic: ", ex); + } catch (RuntimeException ex) { + // Other NIO related exception like disk full, read only channel..etc + Log.e(TAG, "Got exception when reading magic, id = " + id + + ", disk full or mount read-only? " + ex.getClass()); + } finally { + try { + if (lock != null) lock.release(); + } + catch (IOException ex) { + // ignore it. + } + } + } + return 0; + } + + public synchronized void saveMiniThumbToFile(byte[] data, long id, long magic) + throws IOException { + RandomAccessFile r = miniThumbDataFile(); + if (r == null) return; + + long pos = id * BYTES_PER_MINTHUMB; + FileLock lock = null; + try { + if (data != null) { + if (data.length > BYTES_PER_MINTHUMB - HEADER_SIZE) { + // not enough space to store it. + return; + } + mBuffer.clear(); + mBuffer.put((byte) 1); + mBuffer.putLong(magic); + mBuffer.putInt(data.length); + mBuffer.put(data); + mBuffer.flip(); + + lock = mChannel.lock(pos, BYTES_PER_MINTHUMB, false); + mChannel.write(mBuffer, pos); + } + } catch (IOException ex) { + Log.e(TAG, "couldn't save mini thumbnail data for " + + id + "; ", ex); + throw ex; + } catch (RuntimeException ex) { + // Other NIO related exception like disk full, read only channel..etc + Log.e(TAG, "couldn't save mini thumbnail data for " + + id + "; disk full or mount read-only? " + ex.getClass()); + } finally { + try { + if (lock != null) lock.release(); + } + catch (IOException ex) { + // ignore it. + } + } + } + + /** + * Gallery app can use this method to retrieve mini-thumbnail. Full size + * images share the same IDs with their corresponding thumbnails. + * + * @param id the ID of the image (same of full size image). + * @param data the buffer to store mini-thumbnail. + */ + public synchronized byte [] getMiniThumbFromFile(long id, byte [] data) { + RandomAccessFile r = miniThumbDataFile(); + if (r == null) return null; + + long pos = id * BYTES_PER_MINTHUMB; + FileLock lock = null; + try { + mBuffer.clear(); + lock = mChannel.lock(pos, BYTES_PER_MINTHUMB, true); + int size = mChannel.read(mBuffer, pos); + if (size > 1 + 8 + 4) { // flag, magic, length + mBuffer.position(0); + byte flag = mBuffer.get(); + long magic = mBuffer.getLong(); + int length = mBuffer.getInt(); + + if (size >= 1 + 8 + 4 + length && data.length >= length) { + mBuffer.get(data, 0, length); + return data; + } + } + } catch (IOException ex) { + Log.w(TAG, "got exception when reading thumbnail id=" + id + ", exception: " + ex); + } catch (RuntimeException ex) { + // Other NIO related exception like disk full, read only channel..etc + Log.e(TAG, "Got exception when reading thumbnail, id = " + id + + ", disk full or mount read-only? " + ex.getClass()); + } finally { + try { + if (lock != null) lock.release(); + } + catch (IOException ex) { + // ignore it. + } + } + return null; + } +} diff --git a/media/java/android/media/RingtoneManager.java b/media/java/android/media/RingtoneManager.java index 42edae6937d1..8481410cc8f3 100644 --- a/media/java/android/media/RingtoneManager.java +++ b/media/java/android/media/RingtoneManager.java @@ -122,8 +122,9 @@ public class RingtoneManager { * current ringtone, which will be used to show a checkmark next to the item * for this {@link Uri}. If showing an item for "Default" (@see * {@link #EXTRA_RINGTONE_SHOW_DEFAULT}), this can also be one of - * {@link System#DEFAULT_RINGTONE_URI} or - * {@link System#DEFAULT_NOTIFICATION_URI} to have the "Default" item + * {@link System#DEFAULT_RINGTONE_URI}, + * {@link System#DEFAULT_NOTIFICATION_URI}, or + * {@link System#DEFAULT_ALARM_ALERT_URI} to have the "Default" item * checked. * * @see #ACTION_RINGTONE_PICKER @@ -134,8 +135,9 @@ public class RingtoneManager { /** * Given to the ringtone picker as a {@link Uri}. The {@link Uri} of the * ringtone to play when the user attempts to preview the "Default" - * ringtone. This can be one of {@link System#DEFAULT_RINGTONE_URI} or - * {@link System#DEFAULT_NOTIFICATION_URI} to have the "Default" point to + * ringtone. This can be one of {@link System#DEFAULT_RINGTONE_URI}, + * {@link System#DEFAULT_NOTIFICATION_URI}, or + * {@link System#DEFAULT_ALARM_ALERT_URI} to have the "Default" point to * the current sound for the given default sound type. If you are showing a * ringtone picker for some other type of sound, you are free to provide any * {@link Uri} here. @@ -163,8 +165,9 @@ public class RingtoneManager { * <p> * It will be one of: * <li> the picked ringtone, - * <li> a {@link Uri} that equals {@link System#DEFAULT_RINGTONE_URI} or - * {@link System#DEFAULT_NOTIFICATION_URI} if the default was chosen, + * <li> a {@link Uri} that equals {@link System#DEFAULT_RINGTONE_URI}, + * {@link System#DEFAULT_NOTIFICATION_URI}, or + * {@link System#DEFAULT_ALARM_ALERT_URI} if the default was chosen, * <li> null if the "Silent" item was picked. * * @see #ACTION_RINGTONE_PICKER @@ -602,21 +605,6 @@ public class RingtoneManager { Log.e(TAG, "Failed to open ringtone " + ringtoneUri); } - // Ringtone doesn't exist, use the fallback ringtone. - try { - AssetFileDescriptor afd = context.getResources().openRawResourceFd( - com.android.internal.R.raw.fallbackring); - if (afd != null) { - Ringtone r = new Ringtone(context); - r.open(afd); - afd.close(); - return r; - } - } catch (Exception ex) { - } - - // we should never get here - Log.e(TAG, "unable to find a usable ringtone"); return null; } @@ -627,15 +615,16 @@ public class RingtoneManager { * * @param context A context used for querying. * @param type The type whose default sound should be returned. One of - * {@link #TYPE_RINGTONE} or {@link #TYPE_NOTIFICATION}. + * {@link #TYPE_RINGTONE}, {@link #TYPE_NOTIFICATION}, or + * {@link #TYPE_ALARM}. * @return A {@link Uri} pointing to the default sound for the sound type. * @see #setActualDefaultRingtoneUri(Context, int, Uri) */ public static Uri getActualDefaultRingtoneUri(Context context, int type) { String setting = getSettingForType(type); if (setting == null) return null; - final String uriString = Settings.System.getString(context.getContentResolver(), setting); - return uriString != null ? Uri.parse(uriString) : getValidRingtoneUri(context); + final String uriString = Settings.System.getString(context.getContentResolver(), setting); + return uriString != null ? Uri.parse(uriString) : null; } /** @@ -643,14 +632,16 @@ public class RingtoneManager { * * @param context A context used for querying. * @param type The type whose default sound should be set. One of - * {@link #TYPE_RINGTONE} or {@link #TYPE_NOTIFICATION}. + * {@link #TYPE_RINGTONE}, {@link #TYPE_NOTIFICATION}, or + * {@link #TYPE_ALARM}. * @param ringtoneUri A {@link Uri} pointing to the default sound to set. * @see #getActualDefaultRingtoneUri(Context, int) */ public static void setActualDefaultRingtoneUri(Context context, int type, Uri ringtoneUri) { String setting = getSettingForType(type); if (setting == null) return; - Settings.System.putString(context.getContentResolver(), setting, ringtoneUri.toString()); + Settings.System.putString(context.getContentResolver(), setting, + ringtoneUri != null ? ringtoneUri.toString() : null); } private static String getSettingForType(int type) { @@ -658,6 +649,8 @@ public class RingtoneManager { return Settings.System.RINGTONE; } else if ((type & TYPE_NOTIFICATION) != 0) { return Settings.System.NOTIFICATION_SOUND; + } else if ((type & TYPE_ALARM) != 0) { + return Settings.System.ALARM_ALERT; } else { return null; } @@ -677,8 +670,9 @@ public class RingtoneManager { * Returns the type of a default {@link Uri}. * * @param defaultRingtoneUri The default {@link Uri}. For example, - * {@link System#DEFAULT_RINGTONE_URI} or - * {@link System#DEFAULT_NOTIFICATION_URI}. + * {@link System#DEFAULT_RINGTONE_URI}, + * {@link System#DEFAULT_NOTIFICATION_URI}, or + * {@link System#DEFAULT_ALARM_ALERT_URI}. * @return The type of the defaultRingtoneUri, or -1. */ public static int getDefaultType(Uri defaultRingtoneUri) { @@ -688,6 +682,8 @@ public class RingtoneManager { return TYPE_RINGTONE; } else if (defaultRingtoneUri.equals(Settings.System.DEFAULT_NOTIFICATION_URI)) { return TYPE_NOTIFICATION; + } else if (defaultRingtoneUri.equals(Settings.System.DEFAULT_ALARM_ALERT_URI)) { + return TYPE_ALARM; } else { return -1; } @@ -707,6 +703,8 @@ public class RingtoneManager { return Settings.System.DEFAULT_RINGTONE_URI; } else if ((type & TYPE_NOTIFICATION) != 0) { return Settings.System.DEFAULT_NOTIFICATION_URI; + } else if ((type & TYPE_ALARM) != 0) { + return Settings.System.DEFAULT_ALARM_ALERT_URI; } else { return null; } diff --git a/media/java/android/media/ThumbnailUtil.java b/media/java/android/media/ThumbnailUtil.java new file mode 100644 index 000000000000..f9d69fb7286c --- /dev/null +++ b/media/java/android/media/ThumbnailUtil.java @@ -0,0 +1,544 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.media; + +import android.net.Uri; +import android.os.ParcelFileDescriptor; +import android.provider.BaseColumns; +import android.provider.MediaStore.Images; +import android.provider.MediaStore.Images.Thumbnails; +import android.util.Log; + +import android.content.ContentResolver; +import android.content.ContentUris; +import android.content.ContentValues; +import android.database.Cursor; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Canvas; +import android.graphics.Matrix; +import android.graphics.Rect; +import android.media.MediaMetadataRetriever; + +import java.io.ByteArrayOutputStream; +import java.io.FileDescriptor; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStream; + +/** + * Thumbnail generation routines for media provider. This class should only be used internaly. + * {@hide} THIS IS NOT FOR PUBLIC API. + */ + +public class ThumbnailUtil { + private static final String TAG = "ThumbnailUtil"; + //Whether we should recycle the input (unless the output is the input). + public static final boolean RECYCLE_INPUT = true; + public static final boolean NO_RECYCLE_INPUT = false; + public static final boolean ROTATE_AS_NEEDED = true; + public static final boolean NO_ROTATE = false; + public static final boolean USE_NATIVE = true; + public static final boolean NO_NATIVE = false; + + public static final int THUMBNAIL_TARGET_SIZE = 320; + public static final int MINI_THUMB_TARGET_SIZE = 96; + public static final int THUMBNAIL_MAX_NUM_PIXELS = 512 * 384; + public static final int MINI_THUMB_MAX_NUM_PIXELS = 128 * 128; + public static final int UNCONSTRAINED = -1; + + // Returns Options that set the native alloc flag for Bitmap decode. + public static BitmapFactory.Options createNativeAllocOptions() { + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inNativeAlloc = true; + return options; + } + /** + * Make a bitmap from a given Uri. + * + * @param uri + */ + public static Bitmap makeBitmap(int minSideLength, int maxNumOfPixels, + Uri uri, ContentResolver cr) { + return makeBitmap(minSideLength, maxNumOfPixels, uri, cr, + NO_NATIVE); + } + + /* + * Compute the sample size as a function of minSideLength + * and maxNumOfPixels. + * minSideLength is used to specify that minimal width or height of a + * bitmap. + * maxNumOfPixels is used to specify the maximal size in pixels that is + * tolerable in terms of memory usage. + * + * The function returns a sample size based on the constraints. + * Both size and minSideLength can be passed in as IImage.UNCONSTRAINED, + * which indicates no care of the corresponding constraint. + * The functions prefers returning a sample size that + * generates a smaller bitmap, unless minSideLength = IImage.UNCONSTRAINED. + * + * Also, the function rounds up the sample size to a power of 2 or multiple + * of 8 because BitmapFactory only honors sample size this way. + * For example, BitmapFactory downsamples an image by 2 even though the + * request is 3. So we round up the sample size to avoid OOM. + */ + public static int computeSampleSize(BitmapFactory.Options options, + int minSideLength, int maxNumOfPixels) { + int initialSize = computeInitialSampleSize(options, minSideLength, + maxNumOfPixels); + + int roundedSize; + if (initialSize <= 8 ) { + roundedSize = 1; + while (roundedSize < initialSize) { + roundedSize <<= 1; + } + } else { + roundedSize = (initialSize + 7) / 8 * 8; + } + + return roundedSize; + } + + private static int computeInitialSampleSize(BitmapFactory.Options options, + int minSideLength, int maxNumOfPixels) { + double w = options.outWidth; + double h = options.outHeight; + + int lowerBound = (maxNumOfPixels == UNCONSTRAINED) ? 1 : + (int) Math.ceil(Math.sqrt(w * h / maxNumOfPixels)); + int upperBound = (minSideLength == UNCONSTRAINED) ? 128 : + (int) Math.min(Math.floor(w / minSideLength), + Math.floor(h / minSideLength)); + + if (upperBound < lowerBound) { + // return the larger one when there is no overlapping zone. + return lowerBound; + } + + if ((maxNumOfPixels == UNCONSTRAINED) && + (minSideLength == UNCONSTRAINED)) { + return 1; + } else if (minSideLength == UNCONSTRAINED) { + return lowerBound; + } else { + return upperBound; + } + } + + public static Bitmap makeBitmap(int minSideLength, int maxNumOfPixels, + Uri uri, ContentResolver cr, boolean useNative) { + ParcelFileDescriptor input = null; + try { + input = cr.openFileDescriptor(uri, "r"); + BitmapFactory.Options options = null; + if (useNative) { + options = createNativeAllocOptions(); + } + return makeBitmap(minSideLength, maxNumOfPixels, uri, cr, input, + options); + } catch (IOException ex) { + Log.e(TAG, "", ex); + return null; + } finally { + closeSilently(input); + } + } + + // Rotates the bitmap by the specified degree. + // If a new bitmap is created, the original bitmap is recycled. + public static Bitmap rotate(Bitmap b, int degrees) { + if (degrees != 0 && b != null) { + Matrix m = new Matrix(); + m.setRotate(degrees, + (float) b.getWidth() / 2, (float) b.getHeight() / 2); + try { + Bitmap b2 = Bitmap.createBitmap( + b, 0, 0, b.getWidth(), b.getHeight(), m, true); + if (b != b2) { + b.recycle(); + b = b2; + } + } catch (OutOfMemoryError ex) { + // We have no memory to rotate. Return the original bitmap. + } + } + return b; + } + + private static void closeSilently(ParcelFileDescriptor c) { + if (c == null) return; + try { + c.close(); + } catch (Throwable t) { + // do nothing + } + } + + private static ParcelFileDescriptor makeInputStream( + Uri uri, ContentResolver cr) { + try { + return cr.openFileDescriptor(uri, "r"); + } catch (IOException ex) { + return null; + } + } + + public static Bitmap makeBitmap(int minSideLength, int maxNumOfPixels, + Uri uri, ContentResolver cr, ParcelFileDescriptor pfd, + BitmapFactory.Options options) { + Bitmap b = null; + try { + if (pfd == null) pfd = makeInputStream(uri, cr); + if (pfd == null) return null; + if (options == null) options = new BitmapFactory.Options(); + + FileDescriptor fd = pfd.getFileDescriptor(); + options.inSampleSize = 1; + options.inJustDecodeBounds = true; + BitmapFactory.decodeFileDescriptor(fd, null, options); + if (options.mCancel || options.outWidth == -1 + || options.outHeight == -1) { + return null; + } + options.inSampleSize = computeSampleSize( + options, minSideLength, maxNumOfPixels); + options.inJustDecodeBounds = false; + + options.inDither = false; + options.inPreferredConfig = Bitmap.Config.ARGB_8888; + b = BitmapFactory.decodeFileDescriptor(fd, null, options); + } catch (OutOfMemoryError ex) { + Log.e(TAG, "Got oom exception ", ex); + return null; + } finally { + closeSilently(pfd); + } + return b; + } + + /** + * Creates a centered bitmap of the desired size. + * @param source + * @param recycle whether we want to recycle the input + */ + public static Bitmap extractMiniThumb( + Bitmap source, int width, int height, boolean recycle) { + if (source == null) { + return null; + } + + float scale; + if (source.getWidth() < source.getHeight()) { + scale = width / (float) source.getWidth(); + } else { + scale = height / (float) source.getHeight(); + } + Matrix matrix = new Matrix(); + matrix.setScale(scale, scale); + Bitmap miniThumbnail = transform(matrix, source, width, height, true, recycle); + return miniThumbnail; + } + + /** + * Create a video thumbnail for a video. May return null if the video is + * corrupt. + * + * @param filePath + */ + public static Bitmap createVideoThumbnail(String filePath) { + Bitmap bitmap = null; + MediaMetadataRetriever retriever = new MediaMetadataRetriever(); + try { + retriever.setMode(MediaMetadataRetriever.MODE_CAPTURE_FRAME_ONLY); + retriever.setDataSource(filePath); + bitmap = retriever.captureFrame(); + } catch (IllegalArgumentException ex) { + // Assume this is a corrupt video file + } catch (RuntimeException ex) { + // Assume this is a corrupt video file. + } finally { + try { + retriever.release(); + } catch (RuntimeException ex) { + // Ignore failures while cleaning up. + } + } + return bitmap; + } + + /** + * This method first examines if the thumbnail embedded in EXIF is bigger than our target + * size. If not, then it'll create a thumbnail from original image. Due to efficiency + * consideration, we want to let MediaThumbRequest avoid calling this method twice for + * both kinds, so it only requests for MICRO_KIND and set saveImage to true. + * + * This method always returns a "square thumbnail" for MICRO_KIND thumbnail. + * + * @param cr ContentResolver + * @param filePath file path needed by EXIF interface + * @param uri URI of original image + * @param origId image id + * @param kind either MINI_KIND or MICRO_KIND + * @param saveImage Whether to save MINI_KIND thumbnail obtained in this method. + * @return Bitmap + */ + public static Bitmap createImageThumbnail(ContentResolver cr, String filePath, Uri uri, + long origId, int kind, boolean saveMini) { + boolean wantMini = (kind == Images.Thumbnails.MINI_KIND || saveMini); + int targetSize = wantMini ? + ThumbnailUtil.THUMBNAIL_TARGET_SIZE : ThumbnailUtil.MINI_THUMB_TARGET_SIZE; + int maxPixels = wantMini ? + ThumbnailUtil.THUMBNAIL_MAX_NUM_PIXELS : ThumbnailUtil.MINI_THUMB_MAX_NUM_PIXELS; + byte[] thumbData = createThumbnailFromEXIF(filePath, targetSize); + Bitmap bitmap = null; + + if (thumbData != null) { + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inSampleSize = computeSampleSize(options, targetSize, maxPixels); + options.inDither = false; + options.inPreferredConfig = Bitmap.Config.ARGB_8888; + options.inJustDecodeBounds = false; + bitmap = BitmapFactory.decodeByteArray(thumbData, 0, thumbData.length, options); + } + + if (bitmap == null) { + bitmap = ThumbnailUtil.makeBitmap(targetSize, maxPixels, uri, cr); + } + + if (bitmap == null) { + return null; + } + + if (saveMini) { + if (thumbData != null) { + ThumbnailUtil.storeThumbnail(cr, origId, thumbData, bitmap.getWidth(), + bitmap.getHeight()); + } else { + ThumbnailUtil.storeThumbnail(cr, origId, bitmap); + } + } + + if (kind == Images.Thumbnails.MICRO_KIND) { + // now we make it a "square thumbnail" for MICRO_KIND thumbnail + bitmap = ThumbnailUtil.extractMiniThumb(bitmap, + ThumbnailUtil.MINI_THUMB_TARGET_SIZE, + ThumbnailUtil.MINI_THUMB_TARGET_SIZE, ThumbnailUtil.RECYCLE_INPUT); + } + return bitmap; + } + + public static Bitmap transform(Matrix scaler, + Bitmap source, + int targetWidth, + int targetHeight, + boolean scaleUp, + boolean recycle) { + + int deltaX = source.getWidth() - targetWidth; + int deltaY = source.getHeight() - targetHeight; + if (!scaleUp && (deltaX < 0 || deltaY < 0)) { + /* + * In this case the bitmap is smaller, at least in one dimension, + * than the target. Transform it by placing as much of the image + * as possible into the target and leaving the top/bottom or + * left/right (or both) black. + */ + Bitmap b2 = Bitmap.createBitmap(targetWidth, targetHeight, + Bitmap.Config.ARGB_8888); + Canvas c = new Canvas(b2); + + int deltaXHalf = Math.max(0, deltaX / 2); + int deltaYHalf = Math.max(0, deltaY / 2); + Rect src = new Rect( + deltaXHalf, + deltaYHalf, + deltaXHalf + Math.min(targetWidth, source.getWidth()), + deltaYHalf + Math.min(targetHeight, source.getHeight())); + int dstX = (targetWidth - src.width()) / 2; + int dstY = (targetHeight - src.height()) / 2; + Rect dst = new Rect( + dstX, + dstY, + targetWidth - dstX, + targetHeight - dstY); + c.drawBitmap(source, src, dst, null); + if (recycle) { + source.recycle(); + } + return b2; + } + float bitmapWidthF = source.getWidth(); + float bitmapHeightF = source.getHeight(); + + float bitmapAspect = bitmapWidthF / bitmapHeightF; + float viewAspect = (float) targetWidth / targetHeight; + + if (bitmapAspect > viewAspect) { + float scale = targetHeight / bitmapHeightF; + if (scale < .9F || scale > 1F) { + scaler.setScale(scale, scale); + } else { + scaler = null; + } + } else { + float scale = targetWidth / bitmapWidthF; + if (scale < .9F || scale > 1F) { + scaler.setScale(scale, scale); + } else { + scaler = null; + } + } + + Bitmap b1; + if (scaler != null) { + // this is used for minithumb and crop, so we want to filter here. + b1 = Bitmap.createBitmap(source, 0, 0, + source.getWidth(), source.getHeight(), scaler, true); + } else { + b1 = source; + } + + if (recycle && b1 != source) { + source.recycle(); + } + + int dx1 = Math.max(0, b1.getWidth() - targetWidth); + int dy1 = Math.max(0, b1.getHeight() - targetHeight); + + Bitmap b2 = Bitmap.createBitmap( + b1, + dx1 / 2, + dy1 / 2, + targetWidth, + targetHeight); + + if (b2 != b1) { + if (recycle || b1 != source) { + b1.recycle(); + } + } + + return b2; + } + + private static final String[] THUMB_PROJECTION = new String[] { + BaseColumns._ID // 0 + }; + + /** + * Look up thumbnail uri by given imageId, it will be automatically created if it's not created + * yet. Most of the time imageId is identical to thumbId, but it's not always true. + * @param req + * @param width + * @param height + * @return Uri Thumbnail uri + */ + private static Uri getImageThumbnailUri(ContentResolver cr, long origId, int width, int height) { + Uri thumbUri = Images.Thumbnails.EXTERNAL_CONTENT_URI; + Cursor c = cr.query(thumbUri, THUMB_PROJECTION, + Thumbnails.IMAGE_ID + "=?", + new String[]{String.valueOf(origId)}, null); + try { + if (c.moveToNext()) { + return ContentUris.withAppendedId(thumbUri, c.getLong(0)); + } + } finally { + if (c != null) c.close(); + } + + ContentValues values = new ContentValues(4); + values.put(Thumbnails.KIND, Thumbnails.MINI_KIND); + values.put(Thumbnails.IMAGE_ID, origId); + values.put(Thumbnails.HEIGHT, height); + values.put(Thumbnails.WIDTH, width); + try { + return cr.insert(thumbUri, values); + } catch (Exception ex) { + Log.w(TAG, ex); + return null; + } + } + + /** + * Store a given thumbnail in the database. (Bitmap) + */ + private static boolean storeThumbnail(ContentResolver cr, long origId, Bitmap thumb) { + if (thumb == null) return false; + try { + Uri uri = getImageThumbnailUri(cr, origId, thumb.getWidth(), thumb.getHeight()); + OutputStream thumbOut = cr.openOutputStream(uri); + thumb.compress(Bitmap.CompressFormat.JPEG, 85, thumbOut); + thumbOut.close(); + return true; + } catch (Throwable t) { + Log.e(TAG, "Unable to store thumbnail", t); + return false; + } + } + + /** + * Store a given thumbnail in the database. (byte array) + */ + private static boolean storeThumbnail(ContentResolver cr, long origId, byte[] jpegThumbnail, + int width, int height) { + if (jpegThumbnail == null) return false; + + Uri uri = getImageThumbnailUri(cr, origId, width, height); + if (uri == null) { + return false; + } + try { + OutputStream thumbOut = cr.openOutputStream(uri); + thumbOut.write(jpegThumbnail); + thumbOut.close(); + return true; + } catch (Throwable t) { + Log.e(TAG, "Unable to store thumbnail", t); + return false; + } + } + + // Extract thumbnail in image that meets the targetSize criteria. + static byte[] createThumbnailFromEXIF(String filePath, int targetSize) { + if (filePath == null) return null; + + try { + ExifInterface exif = new ExifInterface(filePath); + if (exif == null) return null; + byte [] thumbData = exif.getThumbnail(); + if (thumbData == null) return null; + // Sniff the size of the EXIF thumbnail before decoding it. Photos + // from the device will pass, but images that are side loaded from + // other cameras may not. + BitmapFactory.Options options = new BitmapFactory.Options(); + options.inJustDecodeBounds = true; + BitmapFactory.decodeByteArray(thumbData, 0, thumbData.length, options); + + int width = options.outWidth; + int height = options.outHeight; + + if (width >= targetSize && height >= targetSize) { + return thumbData; + } + } catch (IOException ex) { + Log.w(TAG, ex); + } + return null; + } +} diff --git a/media/java/android/media/ToneGenerator.java b/media/java/android/media/ToneGenerator.java index e5ee9a31058c..d2322650be88 100644 --- a/media/java/android/media/ToneGenerator.java +++ b/media/java/android/media/ToneGenerator.java @@ -724,9 +724,9 @@ public class ToneGenerator public static final int TONE_CDMA_SIGNAL_OFF = 98; /** Maximum volume, for use with {@link #ToneGenerator(int,int)} */ - public static final int MAX_VOLUME = AudioSystem.MAX_VOLUME; + public static final int MAX_VOLUME = 100; /** Minimum volume setting, for use with {@link #ToneGenerator(int,int)} */ - public static final int MIN_VOLUME = AudioSystem.MIN_VOLUME; + public static final int MIN_VOLUME = 0; /** @@ -744,7 +744,7 @@ public class ToneGenerator * This method starts the playback of a tone of the specified type. * only one tone can play at a time: if a tone is playing while this method is called, * this tone is stopped and replaced by the one requested. - * @param toneType The type of tone generate chosen from the following list: + * @param toneType The type of tone generated chosen from the following list: * <ul> * <li>{@link #TONE_DTMF_0} * <li>{@link #TONE_DTMF_1} @@ -846,7 +846,18 @@ public class ToneGenerator * </ul> * @see #ToneGenerator(int, int) */ - public native boolean startTone(int toneType); + public boolean startTone(int toneType) { + return startTone(toneType, -1); + } + + /** + * This method starts the playback of a tone of the specified type for the specified duration. + * @param toneType The type of tone generated @see {@link #startTone(int)}. + * @param durationMs The tone duration in milliseconds. If the tone is limited in time by definition, + * the actual duration will be the minimum of durationMs and the defined tone duration. Setting durationMs to -1, + * is equivalent to calling {@link #startTone(int)}. + */ + public native boolean startTone(int toneType, int durationMs); /** * This method stops the tone currently playing playback. diff --git a/media/jni/Android.mk b/media/jni/Android.mk index 3b05984cb75f..49a82e6f6b23 100644 --- a/media/jni/Android.mk +++ b/media/jni/Android.mk @@ -1,8 +1,7 @@ +ifneq ($(BUILD_WITHOUT_PV),true) LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) -ifneq ($(BUILD_WITHOUT_PV),true) - LOCAL_SRC_FILES:= \ android_media_MediaPlayer.cpp \ android_media_MediaRecorder.cpp \ @@ -13,20 +12,20 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libopencore_player \ - libopencore_author \ libomx_amrenc_sharedlibrary \ libandroid_runtime \ libnativehelper \ - libcutils \ libutils \ + libbinder \ libmedia \ - libsgl \ + libskia \ libui LOCAL_STATIC_LIBRARIES := LOCAL_C_INCLUDES += \ external/tremor/Tremor \ + frameworks/base/core/jni \ $(PV_INCLUDES) \ $(JNI_H_INCLUDE) \ $(call include-path-for, corecg graphics) @@ -39,7 +38,6 @@ LOCAL_MODULE:= libmedia_jni include $(BUILD_SHARED_LIBRARY) -endif - # build libsoundpool.so include $(LOCAL_PATH)/soundpool/Android.mk +endif diff --git a/media/jni/android_media_AmrInputStream.cpp b/media/jni/android_media_AmrInputStream.cpp index 51cb6c78735c..c4dd07ee9e49 100644 --- a/media/jni/android_media_AmrInputStream.cpp +++ b/media/jni/android_media_AmrInputStream.cpp @@ -169,13 +169,6 @@ static JNINativeMethod gMethods[] = { int register_android_media_AmrInputStream(JNIEnv *env) { const char* const kClassPathName = "android/media/AmrInputStream"; - jclass clazz; - - clazz = env->FindClass(kClassPathName); - if (clazz == NULL) { - LOGE("Can't find %s", kClassPathName); - return -1; - } return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods)); diff --git a/media/jni/android_media_MediaMetadataRetriever.cpp b/media/jni/android_media_MediaMetadataRetriever.cpp index 4624a187a875..3e9ba333cd19 100644 --- a/media/jni/android_media_MediaMetadataRetriever.cpp +++ b/media/jni/android_media_MediaMetadataRetriever.cpp @@ -40,6 +40,7 @@ struct fields_t { static fields_t fields; static Mutex sLock; +static const char* const kClassPathName = "android/media/MediaMetadataRetriever"; static void process_media_retriever_call(JNIEnv *env, status_t opStatus, const char* exception, const char *message) { @@ -195,7 +196,7 @@ static jobject android_media_MediaMetadataRetriever_captureFrame(JNIEnv *env, jo // Since internally SkBitmap uses reference count to manage the reference to // its pixels, it is important that the pixels (along with SkBitmap) be // available after creating the Bitmap is returned to Java app. - return env->NewObject(fields.bitmapClazz, fields.bitmapConstructor, (int) bitmap, true, NULL); + return env->NewObject(fields.bitmapClazz, fields.bitmapConstructor, (int) bitmap, true, NULL, -1); } static jbyteArray android_media_MediaMetadataRetriever_extractAlbumArt(JNIEnv *env, jobject thiz) @@ -269,6 +270,36 @@ static void android_media_MediaMetadataRetriever_native_finalize(JNIEnv *env, jo android_media_MediaMetadataRetriever_release(env, thiz); } +// This function gets a field ID, which in turn causes class initialization. +// It is called from a static block in MediaMetadataRetriever, which won't run until the +// first time an instance of this class is used. +static void android_media_MediaMetadataRetriever_native_init(JNIEnv *env) +{ + jclass clazz = env->FindClass(kClassPathName); + if (clazz == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find android/media/MediaMetadataRetriever"); + return; + } + + fields.context = env->GetFieldID(clazz, "mNativeContext", "I"); + if (fields.context == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaMetadataRetriever.mNativeContext"); + return; + } + + fields.bitmapClazz = env->FindClass("android/graphics/Bitmap"); + if (fields.bitmapClazz == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find android/graphics/Bitmap"); + return; + } + + fields.bitmapConstructor = env->GetMethodID(fields.bitmapClazz, "<init>", "(IZ[BI)V"); + if (fields.bitmapConstructor == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find Bitmap constructor"); + return; + } +} + static void android_media_MediaMetadataRetriever_native_setup(JNIEnv *env, jobject thiz) { LOGV("native_setup"); @@ -292,36 +323,13 @@ static JNINativeMethod nativeMethods[] = { {"release", "()V", (void *)android_media_MediaMetadataRetriever_release}, {"native_finalize", "()V", (void *)android_media_MediaMetadataRetriever_native_finalize}, {"native_setup", "()V", (void *)android_media_MediaMetadataRetriever_native_setup}, + {"native_init", "()V", (void *)android_media_MediaMetadataRetriever_native_init}, }; -// Register native mehtods with Android runtime environment +// This function only registers the native methods, and is called from +// JNI_OnLoad in android_media_MediaPlayer.cpp int register_android_media_MediaMetadataRetriever(JNIEnv *env) { - static const char* const kClassPathName = "android/media/MediaMetadataRetriever"; - jclass clazz = env->FindClass(kClassPathName); - if (clazz == NULL) { - LOGE("Can't find class: %s", kClassPathName); - return -1; - } - - fields.context = env->GetFieldID(clazz, "mNativeContext", "I"); - if (fields.context == NULL) { - LOGE("Can't find MediaMetadataRetriever.mNativeContext"); - return -1; - } - - fields.bitmapClazz = env->FindClass("android/graphics/Bitmap"); - if (fields.bitmapClazz == NULL) { - LOGE("Bitmap class is not found"); - return -1; - } - - fields.bitmapConstructor = env->GetMethodID(fields.bitmapClazz, "<init>", "(IZ[B)V"); - if (fields.bitmapConstructor == NULL) { - LOGE("Bitmap constructor is not found"); - return -1; - } - return AndroidRuntime::registerNativeMethods - (env, kClassPathName, nativeMethods, NELEM(nativeMethods)); + (env, kClassPathName, nativeMethods, NELEM(nativeMethods)); } diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp index 6317fe21b702..a4ac6dcb8f3d 100644 --- a/media/jni/android_media_MediaPlayer.cpp +++ b/media/jni/android_media_MediaPlayer.cpp @@ -20,6 +20,7 @@ #include "utils/Log.h" #include <media/mediaplayer.h> +#include <media/MediaPlayerInterface.h> #include <stdio.h> #include <assert.h> #include <limits.h> @@ -30,6 +31,8 @@ #include "JNIHelp.h" #include "android_runtime/AndroidRuntime.h" #include "utils/Errors.h" // for status_t +#include "android_util_Binder.h" +#include <binder/Parcel.h> // ---------------------------------------------------------------------------- @@ -98,10 +101,9 @@ void JNIMediaPlayerListener::notify(int msg, int ext1, int ext2) // ---------------------------------------------------------------------------- -static sp<Surface> get_surface(JNIEnv* env, jobject clazz) +static Surface* get_surface(JNIEnv* env, jobject clazz) { - Surface* const p = (Surface*)env->GetIntField(clazz, fields.surface_native); - return sp<Surface>(p); + return (Surface*)env->GetIntField(clazz, fields.surface_native); } static sp<MediaPlayer> getMediaPlayer(JNIEnv* env, jobject thiz) @@ -202,7 +204,7 @@ static void setVideoSurface(const sp<MediaPlayer>& mp, JNIEnv *env, jobject thiz { jobject surface = env->GetObjectField(thiz, fields.surface); if (surface != NULL) { - const sp<Surface>& native_surface = get_surface(env, surface); + const sp<Surface> native_surface = get_surface(env, surface); LOGV("prepare: surface=%p (id=%d)", native_surface.get(), native_surface->ID()); mp->setVideoSurface(native_surface); @@ -242,7 +244,7 @@ android_media_MediaPlayer_prepareAsync(JNIEnv *env, jobject thiz) } jobject surface = env->GetObjectField(thiz, fields.surface); if (surface != NULL) { - const sp<Surface>& native_surface = get_surface(env, surface); + const sp<Surface> native_surface = get_surface(env, surface); LOGV("prepareAsync: surface=%p (id=%d)", native_surface.get(), native_surface->ID()); mp->setVideoSurface(native_surface); @@ -442,6 +444,119 @@ android_media_MediaPlayer_getFrameAt(JNIEnv *env, jobject thiz, jint msec) return NULL; } + +// Sends the request and reply parcels to the media player via the +// binder interface. +static jint +android_media_MediaPlayer_invoke(JNIEnv *env, jobject thiz, + jobject java_request, jobject java_reply) +{ + sp<MediaPlayer> media_player = getMediaPlayer(env, thiz); + if (media_player == NULL ) { + jniThrowException(env, "java/lang/IllegalStateException", NULL); + return UNKNOWN_ERROR; + } + + + Parcel *request = parcelForJavaObject(env, java_request); + Parcel *reply = parcelForJavaObject(env, java_reply); + + // Don't use process_media_player_call which use the async loop to + // report errors, instead returns the status. + return media_player->invoke(*request, reply); +} + +// Sends the new filter to the client. +static jint +android_media_MediaPlayer_setMetadataFilter(JNIEnv *env, jobject thiz, jobject request) +{ + sp<MediaPlayer> media_player = getMediaPlayer(env, thiz); + if (media_player == NULL ) { + jniThrowException(env, "java/lang/IllegalStateException", NULL); + return UNKNOWN_ERROR; + } + + Parcel *filter = parcelForJavaObject(env, request); + + if (filter == NULL ) { + jniThrowException(env, "java/lang/RuntimeException", "Filter is null"); + return UNKNOWN_ERROR; + } + + return media_player->setMetadataFilter(*filter); +} + +static jboolean +android_media_MediaPlayer_getMetadata(JNIEnv *env, jobject thiz, jboolean update_only, + jboolean apply_filter, jobject reply) +{ + sp<MediaPlayer> media_player = getMediaPlayer(env, thiz); + if (media_player == NULL ) { + jniThrowException(env, "java/lang/IllegalStateException", NULL); + return false; + } + + Parcel *metadata = parcelForJavaObject(env, reply); + + if (metadata == NULL ) { + jniThrowException(env, "java/lang/RuntimeException", "Reply parcel is null"); + return false; + } + + metadata->freeData(); + // On return metadata is positioned at the beginning of the + // metadata. Note however that the parcel actually starts with the + // return code so you should not rewind the parcel using + // setDataPosition(0). + return media_player->getMetadata(update_only, apply_filter, metadata) == OK; +} + +// This function gets some field IDs, which in turn causes class initialization. +// It is called from a static block in MediaPlayer, which won't run until the +// first time an instance of this class is used. +static void +android_media_MediaPlayer_native_init(JNIEnv *env) +{ + jclass clazz; + + clazz = env->FindClass("android/media/MediaPlayer"); + if (clazz == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find android/media/MediaPlayer"); + return; + } + + fields.context = env->GetFieldID(clazz, "mNativeContext", "I"); + if (fields.context == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.mNativeContext"); + return; + } + + fields.post_event = env->GetStaticMethodID(clazz, "postEventFromNative", + "(Ljava/lang/Object;IIILjava/lang/Object;)V"); + if (fields.post_event == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.postEventFromNative"); + return; + } + + fields.surface = env->GetFieldID(clazz, "mSurface", "Landroid/view/Surface;"); + if (fields.surface == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.mSurface"); + return; + } + + jclass surface = env->FindClass("android/view/Surface"); + if (surface == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find android/view/Surface"); + return; + } + + fields.surface_native = env->GetFieldID(surface, "mSurface", "I"); + if (fields.surface_native == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find Surface.mSurface"); + return; + } +} + static void android_media_MediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this) { @@ -479,6 +594,18 @@ android_media_MediaPlayer_native_finalize(JNIEnv *env, jobject thiz) android_media_MediaPlayer_release(env, thiz); } +static jint +android_media_MediaPlayer_snoop(JNIEnv* env, jobject thiz, jobject data, jint kind) { + jshort* ar = (jshort*)env->GetPrimitiveArrayCritical((jarray)data, 0); + jsize len = env->GetArrayLength((jarray)data); + int ret = 0; + if (ar) { + ret = MediaPlayer::snoop(ar, len, kind); + env->ReleasePrimitiveArrayCritical((jarray)data, ar, 0); + } + return ret; +} + // ---------------------------------------------------------------------------- static JNINativeMethod gMethods[] = { @@ -503,53 +630,20 @@ static JNINativeMethod gMethods[] = { {"isLooping", "()Z", (void *)android_media_MediaPlayer_isLooping}, {"setVolume", "(FF)V", (void *)android_media_MediaPlayer_setVolume}, {"getFrameAt", "(I)Landroid/graphics/Bitmap;", (void *)android_media_MediaPlayer_getFrameAt}, + {"native_invoke", "(Landroid/os/Parcel;Landroid/os/Parcel;)I",(void *)android_media_MediaPlayer_invoke}, + {"native_setMetadataFilter", "(Landroid/os/Parcel;)I", (void *)android_media_MediaPlayer_setMetadataFilter}, + {"native_getMetadata", "(ZZLandroid/os/Parcel;)Z", (void *)android_media_MediaPlayer_getMetadata}, + {"native_init", "()V", (void *)android_media_MediaPlayer_native_init}, {"native_setup", "(Ljava/lang/Object;)V", (void *)android_media_MediaPlayer_native_setup}, {"native_finalize", "()V", (void *)android_media_MediaPlayer_native_finalize}, + {"snoop", "([SI)I", (void *)android_media_MediaPlayer_snoop}, }; static const char* const kClassPathName = "android/media/MediaPlayer"; +// This function only registers the native methods static int register_android_media_MediaPlayer(JNIEnv *env) { - jclass clazz; - - clazz = env->FindClass("android/media/MediaPlayer"); - if (clazz == NULL) { - LOGE("Can't find android/media/MediaPlayer"); - return -1; - } - - fields.context = env->GetFieldID(clazz, "mNativeContext", "I"); - if (fields.context == NULL) { - LOGE("Can't find MediaPlayer.mNativeContext"); - return -1; - } - - fields.post_event = env->GetStaticMethodID(clazz, "postEventFromNative", - "(Ljava/lang/Object;IIILjava/lang/Object;)V"); - if (fields.post_event == NULL) { - LOGE("Can't find MediaPlayer.postEventFromNative"); - return -1; - } - - fields.surface = env->GetFieldID(clazz, "mSurface", "Landroid/view/Surface;"); - if (fields.surface == NULL) { - LOGE("Can't find MediaPlayer.mSurface"); - return -1; - } - - jclass surface = env->FindClass("android/view/Surface"); - if (surface == NULL) { - LOGE("Can't find android/view/Surface"); - return -1; - } - - fields.surface_native = env->GetFieldID(surface, "mSurface", "I"); - if (fields.surface_native == NULL) { - LOGE("Can't find Surface fields"); - return -1; - } - return AndroidRuntime::registerNativeMethods(env, "android/media/MediaPlayer", gMethods, NELEM(gMethods)); } diff --git a/media/jni/android_media_MediaRecorder.cpp b/media/jni/android_media_MediaRecorder.cpp index 0273a5aac06b..cad65b336408 100644 --- a/media/jni/android_media_MediaRecorder.cpp +++ b/media/jni/android_media_MediaRecorder.cpp @@ -276,7 +276,7 @@ static void android_media_MediaRecorder_setVideoFrameRate(JNIEnv *env, jobject thiz, jint rate) { LOGV("setVideoFrameRate(%d)", rate); - if (rate <= 0 || rate > MEDIA_RECORDER_MAX_FRAME_RATE) { + if (rate <= 0) { jniThrowException(env, "java/lang/IllegalArgumentException", "invalid frame rate"); return; } @@ -371,6 +371,53 @@ android_media_MediaRecorder_release(JNIEnv *env, jobject thiz) } } +// This function gets some field IDs, which in turn causes class initialization. +// It is called from a static block in MediaRecorder, which won't run until the +// first time an instance of this class is used. +static void +android_media_MediaRecorder_native_init(JNIEnv *env) +{ + jclass clazz; + + clazz = env->FindClass("android/media/MediaRecorder"); + if (clazz == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find android/media/MediaRecorder"); + return; + } + + fields.context = env->GetFieldID(clazz, "mNativeContext", "I"); + if (fields.context == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaRecorder.mNativeContext"); + return; + } + + fields.surface = env->GetFieldID(clazz, "mSurface", "Landroid/view/Surface;"); + if (fields.surface == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaRecorder.mSurface"); + return; + } + + jclass surface = env->FindClass("android/view/Surface"); + if (surface == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find android/view/Surface"); + return; + } + + fields.surface_native = env->GetFieldID(surface, "mSurface", "I"); + if (fields.surface_native == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find Surface.mSurface"); + return; + } + + fields.post_event = env->GetStaticMethodID(clazz, "postEventFromNative", + "(Ljava/lang/Object;IIILjava/lang/Object;)V"); + if (fields.post_event == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "MediaRecorder.postEventFromNative"); + return; + } +} + + static void android_media_MediaRecorder_native_setup(JNIEnv *env, jobject thiz, jobject weak_this) { @@ -418,55 +465,19 @@ static JNINativeMethod gMethods[] = { {"getMaxAmplitude", "()I", (void *)android_media_MediaRecorder_native_getMaxAmplitude}, {"start", "()V", (void *)android_media_MediaRecorder_start}, {"stop", "()V", (void *)android_media_MediaRecorder_stop}, - {"native_reset", "()V", (void *)android_media_MediaRecorder_native_reset}, + {"native_reset", "()V", (void *)android_media_MediaRecorder_native_reset}, {"release", "()V", (void *)android_media_MediaRecorder_release}, + {"native_init", "()V", (void *)android_media_MediaRecorder_native_init}, {"native_setup", "(Ljava/lang/Object;)V", (void *)android_media_MediaRecorder_native_setup}, {"native_finalize", "()V", (void *)android_media_MediaRecorder_native_finalize}, }; static const char* const kClassPathName = "android/media/MediaRecorder"; +// This function only registers the native methods, and is called from +// JNI_OnLoad in android_media_MediaPlayer.cpp int register_android_media_MediaRecorder(JNIEnv *env) { - jclass clazz; - - clazz = env->FindClass("android/media/MediaRecorder"); - if (clazz == NULL) { - LOGE("Can't find android/media/MediaRecorder"); - return -1; - } - - fields.context = env->GetFieldID(clazz, "mNativeContext", "I"); - if (fields.context == NULL) { - LOGE("Can't find MediaRecorder.mNativeContext"); - return -1; - } - - fields.surface = env->GetFieldID(clazz, "mSurface", "Landroid/view/Surface;"); - if (fields.surface == NULL) { - LOGE("Can't find MediaRecorder.mSurface"); - return -1; - } - - jclass surface = env->FindClass("android/view/Surface"); - if (surface == NULL) { - LOGE("Can't find android/view/Surface"); - return -1; - } - - fields.surface_native = env->GetFieldID(surface, "mSurface", "I"); - if (fields.surface_native == NULL) { - LOGE("Can't find Surface fields"); - return -1; - } - - fields.post_event = env->GetStaticMethodID(clazz, "postEventFromNative", - "(Ljava/lang/Object;IIILjava/lang/Object;)V"); - if (fields.post_event == NULL) { - LOGE("Can't find MediaRecorder.postEventFromNative"); - return -1; - } - return AndroidRuntime::registerNativeMethods(env, "android/media/MediaRecorder", gMethods, NELEM(gMethods)); } diff --git a/media/jni/android_media_MediaScanner.cpp b/media/jni/android_media_MediaScanner.cpp index 8764a7030e01..6a5404e33e58 100644 --- a/media/jni/android_media_MediaScanner.cpp +++ b/media/jni/android_media_MediaScanner.cpp @@ -65,6 +65,8 @@ public: "(Ljava/lang/String;Ljava/lang/String;)V"); mSetMimeTypeMethodID = env->GetMethodID(mediaScannerClientInterface, "setMimeType", "(Ljava/lang/String;)V"); + mAddNoMediaFolderMethodID = env->GetMethodID(mediaScannerClientInterface, "addNoMediaFolder", + "(Ljava/lang/String;)V"); } } @@ -111,12 +113,26 @@ public: return (!mEnv->ExceptionCheck()); } + // returns true if it succeeded, false if an exception occured in the Java code + virtual bool addNoMediaFolder(const char* path) + { + jstring pathStr; + if ((pathStr = mEnv->NewStringUTF(path)) == NULL) return false; + + mEnv->CallVoidMethod(mClient, mAddNoMediaFolderMethodID, pathStr); + + mEnv->DeleteLocalRef(pathStr); + return (!mEnv->ExceptionCheck()); + } + + private: JNIEnv *mEnv; jobject mClient; jmethodID mScanFileMethodID; jmethodID mHandleStringTagMethodID; jmethodID mSetMimeTypeMethodID; + jmethodID mAddNoMediaFolderMethodID; }; @@ -241,6 +257,27 @@ done: return array; } +// This function gets a field ID, which in turn causes class initialization. +// It is called from a static block in MediaScanner, which won't run until the +// first time an instance of this class is used. +static void +android_media_MediaScanner_native_init(JNIEnv *env) +{ + jclass clazz; + + clazz = env->FindClass("android/media/MediaScanner"); + if (clazz == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find android/media/MediaScanner"); + return; + } + + fields.context = env->GetFieldID(clazz, "mNativeContext", "I"); + if (fields.context == NULL) { + jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaScanner.mNativeContext"); + return; + } +} + static void android_media_MediaScanner_native_setup(JNIEnv *env, jobject thiz) { @@ -275,28 +312,17 @@ static JNINativeMethod gMethods[] = { (void *)android_media_MediaScanner_processFile}, {"setLocale", "(Ljava/lang/String;)V", (void *)android_media_MediaScanner_setLocale}, {"extractAlbumArt", "(Ljava/io/FileDescriptor;)[B", (void *)android_media_MediaScanner_extractAlbumArt}, + {"native_init", "()V", (void *)android_media_MediaScanner_native_init}, {"native_setup", "()V", (void *)android_media_MediaScanner_native_setup}, {"native_finalize", "()V", (void *)android_media_MediaScanner_native_finalize}, }; static const char* const kClassPathName = "android/media/MediaScanner"; +// This function only registers the native methods, and is called from +// JNI_OnLoad in android_media_MediaPlayer.cpp int register_android_media_MediaScanner(JNIEnv *env) { - jclass clazz; - - clazz = env->FindClass("android/media/MediaScanner"); - if (clazz == NULL) { - LOGE("Can't find android/media/MediaScanner"); - return -1; - } - - fields.context = env->GetFieldID(clazz, "mNativeContext", "I"); - if (fields.context == NULL) { - LOGE("Can't find MediaScanner.mNativeContext"); - return -1; - } - return AndroidRuntime::registerNativeMethods(env, "android/media/MediaScanner", gMethods, NELEM(gMethods)); } diff --git a/media/jni/android_media_ResampleInputStream.cpp b/media/jni/android_media_ResampleInputStream.cpp index 0247cdb03bef..f248557a574f 100644 --- a/media/jni/android_media_ResampleInputStream.cpp +++ b/media/jni/android_media_ResampleInputStream.cpp @@ -128,13 +128,6 @@ static JNINativeMethod gMethods[] = { int register_android_media_ResampleInputStream(JNIEnv *env) { const char* const kClassPathName = "android/media/ResampleInputStream"; - jclass clazz; - - clazz = env->FindClass(kClassPathName); - if (clazz == NULL) { - LOGE("Can't find %s", kClassPathName); - return -1; - } return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods)); diff --git a/media/jni/soundpool/Android.mk b/media/jni/soundpool/Android.mk index 374ddebd2062..9ff2e243eabd 100644 --- a/media/jni/soundpool/Android.mk +++ b/media/jni/soundpool/Android.mk @@ -9,6 +9,7 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libcutils \ libutils \ + libbinder \ libandroid_runtime \ libnativehelper \ libmedia diff --git a/media/jni/soundpool/SoundPool.cpp b/media/jni/soundpool/SoundPool.cpp index 00a121bfc6e5..b17e31b2266a 100644 --- a/media/jni/soundpool/SoundPool.cpp +++ b/media/jni/soundpool/SoundPool.cpp @@ -524,13 +524,14 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV // wrong audio audio buffer size (mAudioBufferSize) unsigned long toggle = mToggle ^ 1; void *userData = (void *)((unsigned long)this | toggle); + uint32_t channels = (numChannels == 2) ? AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO; #ifdef USE_SHARED_MEM_BUFFER newTrack = new AudioTrack(streamType, sampleRate, sample->format(), - numChannels, sample->getIMemory(), 0, callback, userData); + channels, sample->getIMemory(), 0, callback, userData); #else newTrack = new AudioTrack(streamType, sampleRate, sample->format(), - numChannels, frameCount, 0, callback, userData, bufferFrames); + channels, frameCount, 0, callback, userData, bufferFrames); #endif if (newTrack->initCheck() != NO_ERROR) { LOGE("Error creating AudioTrack"); diff --git a/media/libdrm/mobile2/include/rights/RoManager.h b/media/libdrm/mobile2/include/rights/RoManager.h index cf398b3d274b..71e9eef2b807 100644 --- a/media/libdrm/mobile2/include/rights/RoManager.h +++ b/media/libdrm/mobile2/include/rights/RoManager.h @@ -64,12 +64,6 @@ public: vector<Ro*> getAllRo(); /** - * Get the private key of the device. - * @return the private key. - */ - const string& getDevicePrivateKey() const; - - /** * Get ro which contained rights of specific content. * @param contentID the specific id of content. * @return NULL if not fount otherwise the related ro. diff --git a/media/libdrm/mobile2/src/rights/RoManager.cpp b/media/libdrm/mobile2/src/rights/RoManager.cpp index 848c2baab4c4..a115d21f1f33 100644 --- a/media/libdrm/mobile2/src/rights/RoManager.cpp +++ b/media/libdrm/mobile2/src/rights/RoManager.cpp @@ -121,9 +121,3 @@ bool RoManager::checkRoInCache(const string& roID) return true; } -/** see RoManager.h */ -const string& RoManager::getDevicePrivateKey() const -{ - string pk; - return pk; -} diff --git a/media/libdrm/mobile2/src/util/domcore/NodeIterator.cpp b/media/libdrm/mobile2/src/util/domcore/NodeIterator.cpp index f076cda3c1f6..fe136692d2e0 100644 --- a/media/libdrm/mobile2/src/util/domcore/NodeIterator.cpp +++ b/media/libdrm/mobile2/src/util/domcore/NodeIterator.cpp @@ -88,7 +88,7 @@ NodeImpl* NodeIterator::findPreviousOrderNode(NodeImpl* node) node = node->getLastChild();
} else {
if (node == scopeNode)
- node == NULL;
+ node = NULL;
else
node = node->getParentNode();
}
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 8020da2b6100..3c0ee1cd6adf 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -2,31 +2,34 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - AudioTrack.cpp \ - IAudioFlinger.cpp \ - IAudioFlingerClient.cpp \ - IAudioTrack.cpp \ - IAudioRecord.cpp \ - AudioRecord.cpp \ - AudioSystem.cpp \ - mediaplayer.cpp \ - IMediaPlayerService.cpp \ - IMediaPlayerClient.cpp \ - IMediaPlayer.cpp \ - IMediaRecorder.cpp \ - mediarecorder.cpp \ - IMediaMetadataRetriever.cpp \ - mediametadataretriever.cpp \ - ToneGenerator.cpp \ - JetPlayer.cpp + AudioTrack.cpp \ + IAudioFlinger.cpp \ + IAudioFlingerClient.cpp \ + IAudioTrack.cpp \ + IAudioRecord.cpp \ + AudioRecord.cpp \ + AudioSystem.cpp \ + mediaplayer.cpp \ + IMediaPlayerService.cpp \ + IMediaPlayerClient.cpp \ + IMediaPlayer.cpp \ + IMediaRecorder.cpp \ + Metadata.cpp \ + mediarecorder.cpp \ + IMediaMetadataRetriever.cpp \ + mediametadataretriever.cpp \ + ToneGenerator.cpp \ + JetPlayer.cpp \ + IOMX.cpp \ + IAudioPolicyService.cpp LOCAL_SHARED_LIBRARIES := \ - libui libcutils libutils libsonivox + libui libcutils libutils libbinder libsonivox LOCAL_MODULE:= libmedia ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true) -LOCAL_LDLIBS += -ldl +LOCAL_LDLIBS += -ldl -lpthread endif ifneq ($(TARGET_SIMULATOR),true) @@ -34,6 +37,12 @@ LOCAL_SHARED_LIBRARIES += libdl endif LOCAL_C_INCLUDES := \ - $(call include-path-for, graphics corecg) + $(JNI_H_INCLUDE) \ + $(call include-path-for, graphics corecg) \ + $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include \ + external/speex/include \ + external/speex/libspeex + +LOCAL_STATIC_LIBRARIES := libspeex include $(BUILD_SHARED_LIBRARY) diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index e56efbb2dc22..e63c0d2dd281 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -28,12 +28,13 @@ #include <media/AudioSystem.h> #include <media/AudioRecord.h> +#include <media/mediarecorder.h> -#include <utils/IServiceManager.h> +#include <binder/IServiceManager.h> #include <utils/Log.h> -#include <utils/MemoryDealer.h> -#include <utils/Parcel.h> -#include <utils/IPCThreadState.h> +#include <binder/MemoryDealer.h> +#include <binder/Parcel.h> +#include <binder/IPCThreadState.h> #include <utils/Timers.h> #include <cutils/atomic.h> @@ -45,7 +46,7 @@ namespace android { // --------------------------------------------------------------------------- AudioRecord::AudioRecord() - : mStatus(NO_INIT) + : mStatus(NO_INIT), mInput(0) { } @@ -53,15 +54,15 @@ AudioRecord::AudioRecord( int inputSource, uint32_t sampleRate, int format, - int channelCount, + uint32_t channels, int frameCount, uint32_t flags, callback_t cbf, void* user, int notificationFrames) - : mStatus(NO_INIT) + : mStatus(NO_INIT), mInput(0) { - mStatus = set(inputSource, sampleRate, format, channelCount, + mStatus = set(inputSource, sampleRate, format, channels, frameCount, flags, cbf, user, notificationFrames); } @@ -78,6 +79,7 @@ AudioRecord::~AudioRecord() } mAudioRecord.clear(); IPCThreadState::self()->flushCommands(); + AudioSystem::releaseInput(mInput); } } @@ -85,7 +87,7 @@ status_t AudioRecord::set( int inputSource, uint32_t sampleRate, int format, - int channelCount, + uint32_t channels, int frameCount, uint32_t flags, callback_t cbf, @@ -94,18 +96,13 @@ status_t AudioRecord::set( bool threadCanCallJava) { - LOGV("set(): sampleRate %d, channelCount %d, frameCount %d",sampleRate, channelCount, frameCount); + LOGV("set(): sampleRate %d, channels %d, frameCount %d",sampleRate, channels, frameCount); if (mAudioRecord != 0) { return INVALID_OPERATION; } - const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger(); - if (audioFlinger == 0) { - return NO_INIT; - } - - if (inputSource == DEFAULT_INPUT) { - inputSource = MIC_INPUT; + if (inputSource == AUDIO_SOURCE_DEFAULT) { + inputSource = AUDIO_SOURCE_MIC; } if (sampleRate == 0) { @@ -115,15 +112,21 @@ status_t AudioRecord::set( if (format == 0) { format = AudioSystem::PCM_16_BIT; } - if (channelCount == 0) { - channelCount = 1; + // validate parameters + if (!AudioSystem::isValidFormat(format)) { + LOGE("Invalid format"); + return BAD_VALUE; } - // validate parameters - if (format != AudioSystem::PCM_16_BIT) { + if (!AudioSystem::isInputChannel(channels)) { return BAD_VALUE; } - if (channelCount != 1 && channelCount != 2) { + int channelCount = AudioSystem::popCount(channels); + + mInput = AudioSystem::getInput(inputSource, + sampleRate, format, channels, (AudioSystem::audio_in_acoustics)flags); + if (mInput == 0) { + LOGE("Could not get audio output for stream type %d", inputSource); return BAD_VALUE; } @@ -132,14 +135,22 @@ status_t AudioRecord::set( if (AudioSystem::getInputBufferSize(sampleRate, format, channelCount, &inputBuffSizeInBytes) != NO_ERROR) { LOGE("AudioSystem could not query the input buffer size."); - return NO_INIT; + return NO_INIT; } + if (inputBuffSizeInBytes == 0) { LOGE("Recording parameters are not supported: sampleRate %d, channelCount %d, format %d", sampleRate, channelCount, format); return BAD_VALUE; } + int frameSizeInBytes = channelCount * (format == AudioSystem::PCM_16_BIT ? 2 : 1); + if (AudioSystem::isLinearPCM(format)) { + frameSizeInBytes = channelCount * (format == AudioSystem::PCM_16_BIT ? sizeof(int16_t) : sizeof(int8_t)); + } else { + frameSizeInBytes = sizeof(int8_t); + } + // We use 2* size of input buffer for ping pong use of record buffer. int minFrameCount = 2 * inputBuffSizeInBytes / frameSizeInBytes; @@ -155,22 +166,14 @@ status_t AudioRecord::set( notificationFrames = frameCount/2; } - // open record channel - status_t status; - sp<IAudioRecord> record = audioFlinger->openRecord(getpid(), inputSource, - sampleRate, format, - channelCount, - frameCount, - ((uint16_t)flags) << 16, - &status); - if (record == 0) { - LOGE("AudioFlinger could not create record track, status: %d", status); + // create the IAudioRecord + status_t status = openRecord(sampleRate, format, channelCount, + frameCount, flags); + + if (status != NO_ERROR) { return status; } - sp<IMemory> cblk = record->getCblk(); - if (cblk == 0) { - return NO_INIT; - } + if (cbf != 0) { mClientRecordThread = new ClientRecordThread(*this, threadCanCallJava); if (mClientRecordThread == 0) { @@ -180,15 +183,10 @@ status_t AudioRecord::set( mStatus = NO_ERROR; - mAudioRecord = record; - mCblkMemory = cblk; - mCblk = static_cast<audio_track_cblk_t*>(cblk->pointer()); - mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); - mCblk->out = 0; mFormat = format; // Update buffer size in case it has been limited by AudioFlinger during track creation mFrameCount = mCblk->frameCount; - mChannelCount = channelCount; + mChannelCount = (uint8_t)channelCount; mActive = 0; mCbf = cbf; mNotificationFrames = notificationFrames; @@ -201,6 +199,7 @@ status_t AudioRecord::set( mNewPosition = 0; mUpdatePeriod = 0; mInputSource = (uint8_t)inputSource; + mFlags = flags; return NO_ERROR; } @@ -234,7 +233,11 @@ uint32_t AudioRecord::frameCount() const int AudioRecord::frameSize() const { - return channelCount()*((format() == AudioSystem::PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t)); + if (AudioSystem::isLinearPCM(mFormat)) { + return channelCount()*((format() == AudioSystem::PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t)); + } else { + return sizeof(uint8_t); + } } int AudioRecord::inputSource() const @@ -262,15 +265,29 @@ status_t AudioRecord::start() } if (android_atomic_or(1, &mActive) == 0) { - mNewPosition = mCblk->user + mUpdatePeriod; - mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; - mCblk->waitTimeMs = 0; - if (t != 0) { - t->run("ClientRecordThread", THREAD_PRIORITY_AUDIO_CLIENT); - } else { - setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT); + ret = AudioSystem::startInput(mInput); + if (ret == NO_ERROR) { + ret = mAudioRecord->start(); + if (ret == DEAD_OBJECT) { + LOGV("start() dead IAudioRecord: creating a new one"); + ret = openRecord(mCblk->sampleRate, mFormat, mChannelCount, + mFrameCount, mFlags); + } + if (ret == NO_ERROR) { + mNewPosition = mCblk->user + mUpdatePeriod; + mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + mCblk->waitTimeMs = 0; + if (t != 0) { + t->run("ClientRecordThread", THREAD_PRIORITY_AUDIO_CLIENT); + } else { + setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT); + } + } else { + LOGV("start() failed"); + AudioSystem::stopInput(mInput); + android_atomic_and(~1, &mActive); + } } - ret = mAudioRecord->start(); } if (t != 0) { @@ -301,6 +318,7 @@ status_t AudioRecord::stop() } else { setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_NORMAL); } + AudioSystem::stopInput(mInput); } if (t != 0) { @@ -372,10 +390,48 @@ status_t AudioRecord::getPosition(uint32_t *position) // ------------------------------------------------------------------------- +status_t AudioRecord::openRecord( + uint32_t sampleRate, + int format, + int channelCount, + int frameCount, + uint32_t flags) +{ + status_t status; + const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger(); + if (audioFlinger == 0) { + return NO_INIT; + } + + sp<IAudioRecord> record = audioFlinger->openRecord(getpid(), mInput, + sampleRate, format, + channelCount, + frameCount, + ((uint16_t)flags) << 16, + &status); + if (record == 0) { + LOGE("AudioFlinger could not create record track, status: %d", status); + return status; + } + sp<IMemory> cblk = record->getCblk(); + if (cblk == 0) { + LOGE("Could not get control block"); + return NO_INIT; + } + mAudioRecord.clear(); + mAudioRecord = record; + mCblkMemory.clear(); + mCblkMemory = cblk; + mCblk = static_cast<audio_track_cblk_t*>(cblk->pointer()); + mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); + mCblk->out = 0; + + return NO_ERROR; +} + status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) { int active; - int timeout = 0; status_t result; audio_track_cblk_t* cblk = mCblk; uint32_t framesReq = audioBuffer->frameCount; @@ -387,25 +443,40 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) uint32_t framesReady = cblk->framesReady(); if (framesReady == 0) { - Mutex::Autolock _l(cblk->lock); + cblk->lock.lock(); goto start_loop_here; while (framesReady == 0) { active = mActive; - if (UNLIKELY(!active)) + if (UNLIKELY(!active)) { + cblk->lock.unlock(); return NO_MORE_BUFFERS; - if (UNLIKELY(!waitCount)) + } + if (UNLIKELY(!waitCount)) { + cblk->lock.unlock(); return WOULD_BLOCK; - timeout = 0; + } result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); if (__builtin_expect(result!=NO_ERROR, false)) { cblk->waitTimeMs += waitTimeMs; if (cblk->waitTimeMs >= cblk->bufferTimeoutMs) { LOGW( "obtainBuffer timed out (is the CPU pegged?) " "user=%08x, server=%08x", cblk->user, cblk->server); - timeout = 1; + cblk->lock.unlock(); + result = mAudioRecord->start(); + if (result == DEAD_OBJECT) { + LOGW("obtainBuffer() dead IAudioRecord: creating a new one"); + result = openRecord(cblk->sampleRate, mFormat, mChannelCount, + mFrameCount, mFlags); + if (result == NO_ERROR) { + cblk = mCblk; + cblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + } + } + cblk->lock.lock(); cblk->waitTimeMs = 0; } if (--waitCount == 0) { + cblk->lock.unlock(); return TIMED_OUT; } } @@ -413,15 +484,11 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) start_loop_here: framesReady = cblk->framesReady(); } + cblk->lock.unlock(); } - LOGW_IF(timeout, - "*** SERIOUS WARNING *** obtainBuffer() timed out " - "but didn't need to be locked. We recovered, but " - "this shouldn't happen (user=%08x, server=%08x)", cblk->user, cblk->server); - cblk->waitTimeMs = 0; - + if (framesReq > framesReady) { framesReq = framesReady; } @@ -437,7 +504,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) audioBuffer->channelCount= mChannelCount; audioBuffer->format = mFormat; audioBuffer->frameCount = framesReq; - audioBuffer->size = framesReq*mChannelCount*sizeof(int16_t); + audioBuffer->size = framesReq*cblk->frameSize; audioBuffer->raw = (int8_t*)cblk->buffer(u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); @@ -468,7 +535,7 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize) do { - audioBuffer.frameCount = userSize/mChannelCount/sizeof(int16_t); + audioBuffer.frameCount = userSize/frameSize(); // Calling obtainBuffer() with a negative wait count causes // an (almost) infinite wait time. @@ -519,8 +586,8 @@ bool AudioRecord::processAudioBuffer(const sp<ClientRecordThread>& thread) do { audioBuffer.frameCount = frames; - // Calling obtainBuffer() with a wait count of 1 - // limits wait time to WAIT_PERIOD_MS. This prevents from being + // Calling obtainBuffer() with a wait count of 1 + // limits wait time to WAIT_PERIOD_MS. This prevents from being // stuck here not being able to handle timed events (position, markers). status_t err = obtainBuffer(&audioBuffer, 1); if (err < NO_ERROR) { @@ -548,14 +615,14 @@ bool AudioRecord::processAudioBuffer(const sp<ClientRecordThread>& thread) if (readSize > reqSize) readSize = reqSize; audioBuffer.size = readSize; - audioBuffer.frameCount = readSize/mChannelCount/sizeof(int16_t); + audioBuffer.frameCount = readSize/frameSize(); frames -= audioBuffer.frameCount; releaseBuffer(&audioBuffer); } while (frames); - + // Manage overrun callback if (mActive && (mCblk->framesAvailable_l() == 0)) { LOGV("Overrun user: %x, server: %x, flowControlFlag %d", mCblk->user, mCblk->server, mCblk->flowControlFlag); diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index a21a7a481b2b..5352234f7984 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -18,10 +18,20 @@ //#define LOG_NDEBUG 0 #include <utils/Log.h> -#include <utils/IServiceManager.h> +#include <binder/IServiceManager.h> #include <media/AudioSystem.h> +#include <media/IAudioPolicyService.h> #include <math.h> +// ---------------------------------------------------------------------------- +// the sim build doesn't have gettid + +#ifndef HAVE_GETTID +# define gettid getpid +#endif + +// ---------------------------------------------------------------------------- + namespace android { // client singleton for AudioFlinger binder interface @@ -30,10 +40,9 @@ sp<IAudioFlinger> AudioSystem::gAudioFlinger; sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient; audio_error_callback AudioSystem::gAudioErrorCallback = NULL; // Cached values -int AudioSystem::gOutSamplingRate[NUM_AUDIO_OUTPUT_TYPES]; -int AudioSystem::gOutFrameCount[NUM_AUDIO_OUTPUT_TYPES]; -uint32_t AudioSystem::gOutLatency[NUM_AUDIO_OUTPUT_TYPES]; -bool AudioSystem::gA2dpEnabled; +DefaultKeyedVector<int, audio_io_handle_t> AudioSystem::gStreamOutputMap(0); +DefaultKeyedVector<audio_io_handle_t, AudioSystem::OutputDescriptor *> AudioSystem::gOutputs(0); + // Cached values for recording queries uint32_t AudioSystem::gPrevInSamplingRate = 16000; int AudioSystem::gPrevInFormat = AudioSystem::PCM_16_BIT; @@ -65,42 +74,10 @@ const sp<IAudioFlinger>& AudioSystem::get_audio_flinger() binder->linkToDeath(gAudioFlingerClient); gAudioFlinger = interface_cast<IAudioFlinger>(binder); gAudioFlinger->registerClient(gAudioFlingerClient); - // Cache frequently accessed parameters - for (int output = 0; output < NUM_AUDIO_OUTPUT_TYPES; output++) { - gOutFrameCount[output] = (int)gAudioFlinger->frameCount(output); - gOutSamplingRate[output] = (int)gAudioFlinger->sampleRate(output); - gOutLatency[output] = gAudioFlinger->latency(output); - } - gA2dpEnabled = gAudioFlinger->isA2dpEnabled(); } LOGE_IF(gAudioFlinger==0, "no AudioFlinger!?"); - return gAudioFlinger; -} -// routing helper functions -status_t AudioSystem::speakerphone(bool state) { - uint32_t routes = state ? ROUTE_SPEAKER : ROUTE_EARPIECE; - return setRouting(MODE_IN_CALL, routes, ROUTE_ALL); -} - -status_t AudioSystem::isSpeakerphoneOn(bool* state) { - uint32_t routes = 0; - status_t s = getRouting(MODE_IN_CALL, &routes); - *state = !!(routes & ROUTE_SPEAKER); - return s; -} - -status_t AudioSystem::bluetoothSco(bool state) { - uint32_t mask = ROUTE_BLUETOOTH_SCO; - uint32_t routes = state ? mask : ROUTE_EARPIECE; - return setRouting(MODE_IN_CALL, routes, ROUTE_ALL); -} - -status_t AudioSystem::isBluetoothScoOn(bool* state) { - uint32_t routes = 0; - status_t s = getRouting(MODE_IN_CALL, &routes); - *state = !!(routes & ROUTE_BLUETOOTH_SCO); - return s; + return gAudioFlinger; } status_t AudioSystem::muteMicrophone(bool state) { @@ -148,12 +125,12 @@ status_t AudioSystem::getMasterMute(bool* mute) return NO_ERROR; } -status_t AudioSystem::setStreamVolume(int stream, float value) +status_t AudioSystem::setStreamVolume(int stream, float value, int output) { if (uint32_t(stream) >= NUM_STREAM_TYPES) return BAD_VALUE; const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); if (af == 0) return PERMISSION_DENIED; - af->setStreamVolume(stream, value); + af->setStreamVolume(stream, value, output); return NO_ERROR; } @@ -166,12 +143,12 @@ status_t AudioSystem::setStreamMute(int stream, bool mute) return NO_ERROR; } -status_t AudioSystem::getStreamVolume(int stream, float* volume) +status_t AudioSystem::getStreamVolume(int stream, float* volume, int output) { if (uint32_t(stream) >= NUM_STREAM_TYPES) return BAD_VALUE; const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); if (af == 0) return PERMISSION_DENIED; - *volume = af->streamVolume(stream); + *volume = af->streamVolume(stream, output); return NO_ERROR; } @@ -192,43 +169,28 @@ status_t AudioSystem::setMode(int mode) return af->setMode(mode); } -status_t AudioSystem::getMode(int* mode) -{ + +status_t AudioSystem::isMusicActive(bool* state) { const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); if (af == 0) return PERMISSION_DENIED; - *mode = af->getMode(); + *state = af->isMusicActive(); return NO_ERROR; } -status_t AudioSystem::setRouting(int mode, uint32_t routes, uint32_t mask) -{ - const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); - if (af == 0) return PERMISSION_DENIED; - return af->setRouting(mode, routes, mask); -} -status_t AudioSystem::getRouting(int mode, uint32_t* routes) -{ +status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) { const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); if (af == 0) return PERMISSION_DENIED; - uint32_t r = af->getRouting(mode); - *routes = r; - return NO_ERROR; + return af->setParameters(ioHandle, keyValuePairs); } -status_t AudioSystem::isMusicActive(bool* state) { +String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) { const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); - if (af == 0) return PERMISSION_DENIED; - *state = af->isMusicActive(); - return NO_ERROR; -} + String8 result = String8(""); + if (af == 0) return result; -// Temporary interface, do not use -// TODO: Replace with a more generic key:value get/set mechanism -status_t AudioSystem::setParameter(const char* key, const char* value) { - const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); - if (af == 0) return PERMISSION_DENIED; - return af->setParameter(key, value); + result = af->getParameters(ioHandle, keys); + return result; } // convert volume steps to natural log scale @@ -257,55 +219,108 @@ int AudioSystem::logToLinear(float volume) status_t AudioSystem::getOutputSamplingRate(int* samplingRate, int streamType) { - int output = getOutput(streamType); - - if (output == NUM_AUDIO_OUTPUT_TYPES) return PERMISSION_DENIED; + OutputDescriptor *outputDesc; + audio_io_handle_t output; + + if (streamType == DEFAULT) { + streamType = MUSIC; + } + + output = getOutput((stream_type)streamType); + if (output == 0) { + return PERMISSION_DENIED; + } + + gLock.lock(); + outputDesc = AudioSystem::gOutputs.valueFor(output); + if (outputDesc == 0) { + LOGV("getOutputSamplingRate() no output descriptor for output %d in gOutputs", output); + gLock.unlock(); + const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); + if (af == 0) return PERMISSION_DENIED; + *samplingRate = af->sampleRate(output); + } else { + LOGV("getOutputSamplingRate() reading from output desc"); + *samplingRate = outputDesc->samplingRate; + gLock.unlock(); + } + + LOGV("getOutputSamplingRate() streamType %d, output %d, sampling rate %d", streamType, output, *samplingRate); - // gOutSamplingRate[] is updated by getOutput() which calls get_audio_flinger() - LOGV("getOutputSamplingRate() streamType %d, output %d, sampling rate %d", streamType, output, gOutSamplingRate[output]); - - *samplingRate = gOutSamplingRate[output]; - return NO_ERROR; } status_t AudioSystem::getOutputFrameCount(int* frameCount, int streamType) { - int output = getOutput(streamType); + OutputDescriptor *outputDesc; + audio_io_handle_t output; - if (output == NUM_AUDIO_OUTPUT_TYPES) return PERMISSION_DENIED; + if (streamType == DEFAULT) { + streamType = MUSIC; + } - // gOutFrameCount[] is updated by getOutput() which calls get_audio_flinger() - LOGV("getOutputFrameCount() streamType %d, output %d, frame count %d", streamType, output, gOutFrameCount[output]); + output = getOutput((stream_type)streamType); + if (output == 0) { + return PERMISSION_DENIED; + } + + gLock.lock(); + outputDesc = AudioSystem::gOutputs.valueFor(output); + if (outputDesc == 0) { + gLock.unlock(); + const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); + if (af == 0) return PERMISSION_DENIED; + *frameCount = af->frameCount(output); + } else { + *frameCount = outputDesc->frameCount; + gLock.unlock(); + } + + LOGV("getOutputFrameCount() streamType %d, output %d, frameCount %d", streamType, output, *frameCount); - *frameCount = gOutFrameCount[output]; - return NO_ERROR; } status_t AudioSystem::getOutputLatency(uint32_t* latency, int streamType) { - int output = getOutput(streamType); + OutputDescriptor *outputDesc; + audio_io_handle_t output; - if (output == NUM_AUDIO_OUTPUT_TYPES) return PERMISSION_DENIED; + if (streamType == DEFAULT) { + streamType = MUSIC; + } - // gOutLatency[] is updated by getOutput() which calls get_audio_flinger() - LOGV("getOutputLatency() streamType %d, output %d, latency %d", streamType, output, gOutLatency[output]); + output = getOutput((stream_type)streamType); + if (output == 0) { + return PERMISSION_DENIED; + } + + gLock.lock(); + outputDesc = AudioSystem::gOutputs.valueFor(output); + if (outputDesc == 0) { + gLock.unlock(); + const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); + if (af == 0) return PERMISSION_DENIED; + *latency = af->latency(output); + } else { + *latency = outputDesc->latency; + gLock.unlock(); + } + + LOGV("getOutputLatency() streamType %d, output %d, latency %d", streamType, output, *latency); - *latency = gOutLatency[output]; - return NO_ERROR; } -status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, int format, int channelCount, +status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, int format, int channelCount, size_t* buffSize) { // Do we have a stale gInBufferSize or are we requesting the input buffer size for new values - if ((gInBuffSize == 0) || (sampleRate != gPrevInSamplingRate) || (format != gPrevInFormat) + if ((gInBuffSize == 0) || (sampleRate != gPrevInSamplingRate) || (format != gPrevInFormat) || (channelCount != gPrevInChannelCount)) { // save the request params gPrevInSamplingRate = sampleRate; - gPrevInFormat = format; + gPrevInFormat = format; gPrevInChannelCount = channelCount; gInBuffSize = 0; @@ -314,24 +329,25 @@ status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, int format, int ch return PERMISSION_DENIED; } gInBuffSize = af->getInputBufferSize(sampleRate, format, channelCount); - } + } *buffSize = gInBuffSize; - + return NO_ERROR; } +status_t AudioSystem::setVoiceVolume(float value) +{ + const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); + if (af == 0) return PERMISSION_DENIED; + return af->setVoiceVolume(value); +} + // --------------------------------------------------------------------------- -void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who) { +void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who) { Mutex::Autolock _l(AudioSystem::gLock); - AudioSystem::gAudioFlinger.clear(); - for (int output = 0; output < NUM_AUDIO_OUTPUT_TYPES; output++) { - gOutFrameCount[output] = 0; - gOutSamplingRate[output] = 0; - gOutLatency[output] = 0; - } - AudioSystem::gInBuffSize = 0; + AudioSystem::gAudioFlinger.clear(); if (gAudioErrorCallback) { gAudioErrorCallback(DEAD_OBJECT); @@ -339,33 +355,82 @@ void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who) { LOGW("AudioFlinger server died!"); } -void AudioSystem::AudioFlingerClient::a2dpEnabledChanged(bool enabled) { - gA2dpEnabled = enabled; - LOGV("AudioFlinger A2DP enabled status changed! %d", enabled); -} +void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, int ioHandle, void *param2) { + LOGV("ioConfigChanged() event %d", event); + OutputDescriptor *desc; + uint32_t stream; + + if (ioHandle == 0) return; -void AudioSystem::setErrorCallback(audio_error_callback cb) { Mutex::Autolock _l(AudioSystem::gLock); - gAudioErrorCallback = cb; -} -int AudioSystem::getOutput(int streamType) -{ - // make sure that gA2dpEnabled is valid by calling get_audio_flinger() which in turn - // will call gAudioFlinger->isA2dpEnabled() - const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); - if (af == 0) return NUM_AUDIO_OUTPUT_TYPES; + switch (event) { + case STREAM_CONFIG_CHANGED: + if (param2 == 0) break; + stream = *(uint32_t *)param2; + LOGV("ioConfigChanged() STREAM_CONFIG_CHANGED stream %d, output %d", stream, ioHandle); + if (gStreamOutputMap.indexOfKey(stream) >= 0) { + gStreamOutputMap.replaceValueFor(stream, ioHandle); + } + break; + case OUTPUT_OPENED: { + if (gOutputs.indexOfKey(ioHandle) >= 0) { + LOGV("ioConfigChanged() opening already existing output! %d", ioHandle); + break; + } + if (param2 == 0) break; + desc = (OutputDescriptor *)param2; + + OutputDescriptor *outputDesc = new OutputDescriptor(*desc); + gOutputs.add(ioHandle, outputDesc); + LOGV("ioConfigChanged() new output samplingRate %d, format %d channels %d frameCount %d latency %d", + outputDesc->samplingRate, outputDesc->format, outputDesc->channels, outputDesc->frameCount, outputDesc->latency); + } break; + case OUTPUT_CLOSED: { + if (gOutputs.indexOfKey(ioHandle) < 0) { + LOGW("ioConfigChanged() closing unknow output! %d", ioHandle); + break; + } + LOGV("ioConfigChanged() output %d closed", ioHandle); + + gOutputs.removeItem(ioHandle); + for (int i = gStreamOutputMap.size() - 1; i >= 0 ; i--) { + if (gStreamOutputMap.valueAt(i) == ioHandle) { + gStreamOutputMap.removeItemsAt(i); + } + } + } break; + + case OUTPUT_CONFIG_CHANGED: { + int index = gOutputs.indexOfKey(ioHandle); + if (index < 0) { + LOGW("ioConfigChanged() modifying unknow output! %d", ioHandle); + break; + } + if (param2 == 0) break; + desc = (OutputDescriptor *)param2; + + LOGV("ioConfigChanged() new config for output %d samplingRate %d, format %d channels %d frameCount %d latency %d", + ioHandle, desc->samplingRate, desc->format, + desc->channels, desc->frameCount, desc->latency); + OutputDescriptor *outputDesc = gOutputs.valueAt(index); + delete outputDesc; + outputDesc = new OutputDescriptor(*desc); + gOutputs.replaceValueFor(ioHandle, outputDesc); + } break; + case INPUT_OPENED: + case INPUT_CLOSED: + case INPUT_CONFIG_CHANGED: + break; - if (streamType == DEFAULT) { - streamType = MUSIC; - } - if (gA2dpEnabled && routedToA2dpOutput(streamType)) { - return AUDIO_OUTPUT_A2DP; - } else { - return AUDIO_OUTPUT_HARDWARE; } } +void AudioSystem::setErrorCallback(audio_error_callback cb) { + Mutex::Autolock _l(gLock); + gAudioErrorCallback = cb; +} + bool AudioSystem::routedToA2dpOutput(int streamType) { switch(streamType) { case MUSIC: @@ -379,6 +444,461 @@ bool AudioSystem::routedToA2dpOutput(int streamType) { } +// client singleton for AudioPolicyService binder interface +sp<IAudioPolicyService> AudioSystem::gAudioPolicyService; +sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient; + + +// establish binder interface to AudioFlinger service +const sp<IAudioPolicyService>& AudioSystem::get_audio_policy_service() +{ + gLock.lock(); + if (gAudioPolicyService.get() == 0) { + sp<IServiceManager> sm = defaultServiceManager(); + sp<IBinder> binder; + do { + binder = sm->getService(String16("media.audio_policy")); + if (binder != 0) + break; + LOGW("AudioPolicyService not published, waiting..."); + usleep(500000); // 0.5 s + } while(true); + if (gAudioPolicyServiceClient == NULL) { + gAudioPolicyServiceClient = new AudioPolicyServiceClient(); + } + binder->linkToDeath(gAudioPolicyServiceClient); + gAudioPolicyService = interface_cast<IAudioPolicyService>(binder); + gLock.unlock(); + } else { + gLock.unlock(); + } + return gAudioPolicyService; +} + +status_t AudioSystem::setDeviceConnectionState(audio_devices device, + device_connection_state state, + const char *device_address) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + + return aps->setDeviceConnectionState(device, state, device_address); +} + +AudioSystem::device_connection_state AudioSystem::getDeviceConnectionState(audio_devices device, + const char *device_address) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return DEVICE_STATE_UNAVAILABLE; + + return aps->getDeviceConnectionState(device, device_address); +} + +status_t AudioSystem::setPhoneState(int state) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + + return aps->setPhoneState(state); +} + +status_t AudioSystem::setRingerMode(uint32_t mode, uint32_t mask) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + return aps->setRingerMode(mode, mask); +} + +status_t AudioSystem::setForceUse(force_use usage, forced_config config) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + return aps->setForceUse(usage, config); +} + +AudioSystem::forced_config AudioSystem::getForceUse(force_use usage) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return FORCE_NONE; + return aps->getForceUse(usage); +} + + +audio_io_handle_t AudioSystem::getOutput(stream_type stream, + uint32_t samplingRate, + uint32_t format, + uint32_t channels, + output_flags flags) +{ + audio_io_handle_t output = 0; + if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) == 0) { + Mutex::Autolock _l(gLock); + output = AudioSystem::gStreamOutputMap.valueFor(stream); + LOGV_IF((output != 0), "getOutput() read %d from cache for stream %d", output, stream); + } + if (output == 0) { + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return 0; + output = aps->getOutput(stream, samplingRate, format, channels, flags); + if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) == 0) { + Mutex::Autolock _l(gLock); + AudioSystem::gStreamOutputMap.add(stream, output); + } + } + return output; +} + +status_t AudioSystem::startOutput(audio_io_handle_t output, AudioSystem::stream_type stream) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + return aps->startOutput(output, stream); +} + +status_t AudioSystem::stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + return aps->stopOutput(output, stream); +} + +void AudioSystem::releaseOutput(audio_io_handle_t output) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return; + aps->releaseOutput(output); +} + +audio_io_handle_t AudioSystem::getInput(int inputSource, + uint32_t samplingRate, + uint32_t format, + uint32_t channels, + audio_in_acoustics acoustics) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return 0; + return aps->getInput(inputSource, samplingRate, format, channels, acoustics); +} + +status_t AudioSystem::startInput(audio_io_handle_t input) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + return aps->startInput(input); +} + +status_t AudioSystem::stopInput(audio_io_handle_t input) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + return aps->stopInput(input); +} + +void AudioSystem::releaseInput(audio_io_handle_t input) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return; + aps->releaseInput(input); +} + +status_t AudioSystem::initStreamVolume(stream_type stream, + int indexMin, + int indexMax) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + return aps->initStreamVolume(stream, indexMin, indexMax); +} + +status_t AudioSystem::setStreamVolumeIndex(stream_type stream, int index) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + return aps->setStreamVolumeIndex(stream, index); +} + +status_t AudioSystem::getStreamVolumeIndex(stream_type stream, int *index) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + return aps->getStreamVolumeIndex(stream, index); +} + +// --------------------------------------------------------------------------- + +void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who) { + Mutex::Autolock _l(AudioSystem::gLock); + AudioSystem::gAudioPolicyService.clear(); + + LOGW("AudioPolicyService server died!"); +} + +// --------------------------------------------------------------------------- + + +// use emulated popcount optimization +// http://www.df.lth.se/~john_e/gems/gem002d.html +uint32_t AudioSystem::popCount(uint32_t u) +{ + u = ((u&0x55555555) + ((u>>1)&0x55555555)); + u = ((u&0x33333333) + ((u>>2)&0x33333333)); + u = ((u&0x0f0f0f0f) + ((u>>4)&0x0f0f0f0f)); + u = ((u&0x00ff00ff) + ((u>>8)&0x00ff00ff)); + u = ( u&0x0000ffff) + (u>>16); + return u; +} + +bool AudioSystem::isOutputDevice(audio_devices device) +{ + if ((popCount(device) == 1 ) && + ((device & ~AudioSystem::DEVICE_OUT_ALL) == 0)) { + return true; + } else { + return false; + } +} + +bool AudioSystem::isInputDevice(audio_devices device) +{ + if ((popCount(device) == 1 ) && + ((device & ~AudioSystem::DEVICE_IN_ALL) == 0)) { + return true; + } else { + return false; + } +} + +bool AudioSystem::isA2dpDevice(audio_devices device) +{ + if ((popCount(device) == 1 ) && + (device & (AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP | + AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES | + AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER))) { + return true; + } else { + return false; + } +} + +bool AudioSystem::isBluetoothScoDevice(audio_devices device) +{ + if ((popCount(device) == 1 ) && + (device & (AudioSystem::DEVICE_OUT_BLUETOOTH_SCO | + AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_HEADSET | + AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT))) { + return true; + } else { + return false; + } +} + +bool AudioSystem::isLowVisibility(stream_type stream) +{ + if (stream == AudioSystem::SYSTEM || stream == AudioSystem::NOTIFICATION) { + return true; + } else { + return false; + } +} + +bool AudioSystem::isInputChannel(uint32_t channel) +{ + if ((channel & ~AudioSystem::CHANNEL_IN_ALL) == 0) { + return true; + } else { + return false; + } +} + +bool AudioSystem::isOutputChannel(uint32_t channel) +{ + if ((channel & ~AudioSystem::CHANNEL_OUT_ALL) == 0) { + return true; + } else { + return false; + } +} + +bool AudioSystem::isValidFormat(uint32_t format) +{ + switch (format & MAIN_FORMAT_MASK) { + case PCM: + case MP3: + case AMR_NB: + case AMR_WB: + case AAC: + case HE_AAC_V1: + case HE_AAC_V2: + case VORBIS: + return true; + default: + return false; + } +} + +bool AudioSystem::isLinearPCM(uint32_t format) +{ + switch (format) { + case PCM_16_BIT: + case PCM_8_BIT: + return true; + default: + return false; + } +} + +//------------------------- AudioParameter class implementation --------------- + +const char *AudioParameter::keyRouting = "routing"; +const char *AudioParameter::keySamplingRate = "sampling_rate"; +const char *AudioParameter::keyFormat = "format"; +const char *AudioParameter::keyChannels = "channels"; +const char *AudioParameter::keyFrameCount = "frame_count"; + +AudioParameter::AudioParameter(const String8& keyValuePairs) +{ + char *str = new char[keyValuePairs.length()+1]; + mKeyValuePairs = keyValuePairs; + + strcpy(str, keyValuePairs.string()); + char *pair = strtok(str, ";"); + while (pair != NULL) { + if (strlen(pair) != 0) { + size_t eqIdx = strcspn(pair, "="); + String8 key = String8(pair, eqIdx); + String8 value; + if (eqIdx == strlen(pair)) { + value = String8(""); + } else { + value = String8(pair + eqIdx + 1); + } + if (mParameters.indexOfKey(key) < 0) { + mParameters.add(key, value); + } else { + mParameters.replaceValueFor(key, value); + } + } else { + LOGV("AudioParameter() cstor empty key value pair"); + } + pair = strtok(NULL, ";"); + } + + delete[] str; +} + +AudioParameter::~AudioParameter() +{ + mParameters.clear(); +} + +String8 AudioParameter::toString() +{ + String8 str = String8(""); + + size_t size = mParameters.size(); + for (size_t i = 0; i < size; i++) { + str += mParameters.keyAt(i); + str += "="; + str += mParameters.valueAt(i); + if (i < (size - 1)) str += ";"; + } + return str; +} +status_t AudioParameter::add(const String8& key, const String8& value) +{ + if (mParameters.indexOfKey(key) < 0) { + mParameters.add(key, value); + return NO_ERROR; + } else { + mParameters.replaceValueFor(key, value); + return ALREADY_EXISTS; + } +} + +status_t AudioParameter::addInt(const String8& key, const int value) +{ + char str[12]; + if (snprintf(str, 12, "%d", value) > 0) { + String8 str8 = String8(str); + return add(key, str8); + } else { + return BAD_VALUE; + } +} + +status_t AudioParameter::addFloat(const String8& key, const float value) +{ + char str[23]; + if (snprintf(str, 23, "%.10f", value) > 0) { + String8 str8 = String8(str); + return add(key, str8); + } else { + return BAD_VALUE; + } +} + +status_t AudioParameter::remove(const String8& key) +{ + if (mParameters.indexOfKey(key) >= 0) { + mParameters.removeItem(key); + return NO_ERROR; + } else { + return BAD_VALUE; + } +} + +status_t AudioParameter::get(const String8& key, String8& value) +{ + if (mParameters.indexOfKey(key) >= 0) { + value = mParameters.valueFor(key); + return NO_ERROR; + } else { + return BAD_VALUE; + } +} + +status_t AudioParameter::getInt(const String8& key, int& value) +{ + String8 str8; + status_t result = get(key, str8); + value = 0; + if (result == NO_ERROR) { + int val; + if (sscanf(str8.string(), "%d", &val) == 1) { + value = val; + } else { + result = INVALID_OPERATION; + } + } + return result; +} + +status_t AudioParameter::getFloat(const String8& key, float& value) +{ + String8 str8; + status_t result = get(key, str8); + value = 0; + if (result == NO_ERROR) { + float val; + if (sscanf(str8.string(), "%f", &val) == 1) { + value = val; + } else { + result = INVALID_OPERATION; + } + } + return result; +} + +status_t AudioParameter::getAt(size_t index, String8& key, String8& value) +{ + if (mParameters.size() > index) { + key = mParameters.keyAt(index); + value = mParameters.valueAt(index); + return NO_ERROR; + } else { + return BAD_VALUE; + } +} }; // namespace android diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index b2c067b1290f..8529a8e46fa1 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -32,9 +32,9 @@ #include <media/AudioTrack.h> #include <utils/Log.h> -#include <utils/MemoryDealer.h> -#include <utils/Parcel.h> -#include <utils/IPCThreadState.h> +#include <binder/MemoryDealer.h> +#include <binder/Parcel.h> +#include <binder/IPCThreadState.h> #include <utils/Timers.h> #include <cutils/atomic.h> @@ -54,7 +54,7 @@ AudioTrack::AudioTrack( int streamType, uint32_t sampleRate, int format, - int channelCount, + int channels, int frameCount, uint32_t flags, callback_t cbf, @@ -62,7 +62,7 @@ AudioTrack::AudioTrack( int notificationFrames) : mStatus(NO_INIT) { - mStatus = set(streamType, sampleRate, format, channelCount, + mStatus = set(streamType, sampleRate, format, channels, frameCount, flags, cbf, user, notificationFrames, 0); } @@ -70,7 +70,7 @@ AudioTrack::AudioTrack( int streamType, uint32_t sampleRate, int format, - int channelCount, + int channels, const sp<IMemory>& sharedBuffer, uint32_t flags, callback_t cbf, @@ -78,7 +78,7 @@ AudioTrack::AudioTrack( int notificationFrames) : mStatus(NO_INIT) { - mStatus = set(streamType, sampleRate, format, channelCount, + mStatus = set(streamType, sampleRate, format, channels, 0, flags, cbf, user, notificationFrames, sharedBuffer); } @@ -97,6 +97,7 @@ AudioTrack::~AudioTrack() } mAudioTrack.clear(); IPCThreadState::self()->flushCommands(); + AudioSystem::releaseOutput(getOutput()); } } @@ -104,7 +105,7 @@ status_t AudioTrack::set( int streamType, uint32_t sampleRate, int format, - int channelCount, + int channels, int frameCount, uint32_t flags, callback_t cbf, @@ -121,11 +122,6 @@ status_t AudioTrack::set( return INVALID_OPERATION; } - const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger(); - if (audioFlinger == 0) { - LOGE("Could not get audioflinger"); - return NO_INIT; - } int afSampleRate; if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) { return NO_INIT; @@ -150,73 +146,82 @@ status_t AudioTrack::set( if (format == 0) { format = AudioSystem::PCM_16_BIT; } - if (channelCount == 0) { - channelCount = 2; + if (channels == 0) { + channels = AudioSystem::CHANNEL_OUT_STEREO; } // validate parameters - if (((format != AudioSystem::PCM_8_BIT) || sharedBuffer != 0) && - (format != AudioSystem::PCM_16_BIT)) { + if (!AudioSystem::isValidFormat(format)) { LOGE("Invalid format"); return BAD_VALUE; } - if (channelCount != 1 && channelCount != 2) { - LOGE("Invalid channel number"); + + // force direct flag if format is not linear PCM + if (!AudioSystem::isLinearPCM(format)) { + flags |= AudioSystem::OUTPUT_FLAG_DIRECT; + } + + if (!AudioSystem::isOutputChannel(channels)) { + LOGE("Invalid channel mask"); return BAD_VALUE; } + uint32_t channelCount = AudioSystem::popCount(channels); - // Ensure that buffer depth covers at least audio hardware latency - uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate); - if (minBufCount < 2) minBufCount = 2; + audio_io_handle_t output = AudioSystem::getOutput((AudioSystem::stream_type)streamType, + sampleRate, format, channels, (AudioSystem::output_flags)flags); - // When playing from shared buffer, playback will start even if last audioflinger - // block is partly filled. - if (sharedBuffer != 0 && minBufCount > 1) { - minBufCount--; + if (output == 0) { + LOGE("Could not get audio output for stream type %d", streamType); + return BAD_VALUE; } - int minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; - - if (sharedBuffer == 0) { - if (frameCount == 0) { - frameCount = minFrameCount; - } - if (notificationFrames == 0) { - notificationFrames = frameCount/2; - } - // Make sure that application is notified with sufficient margin - // before underrun - if (notificationFrames > frameCount/2) { - notificationFrames = frameCount/2; + if (!AudioSystem::isLinearPCM(format)) { + if (sharedBuffer != 0) { + frameCount = sharedBuffer->size(); } } else { - // Ensure that buffer alignment matches channelcount - if (((uint32_t)sharedBuffer->pointer() & (channelCount | 1)) != 0) { - LOGE("Invalid buffer alignement: address %p, channelCount %d", sharedBuffer->pointer(), channelCount); - return BAD_VALUE; - } - frameCount = sharedBuffer->size()/channelCount/sizeof(int16_t); - } + // Ensure that buffer depth covers at least audio hardware latency + uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate); + if (minBufCount < 2) minBufCount = 2; - if (frameCount < minFrameCount) { - LOGE("Invalid buffer size: minFrameCount %d, frameCount %d", minFrameCount, frameCount); - return BAD_VALUE; + int minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; + + if (sharedBuffer == 0) { + if (frameCount == 0) { + frameCount = minFrameCount; + } + if (notificationFrames == 0) { + notificationFrames = frameCount/2; + } + // Make sure that application is notified with sufficient margin + // before underrun + if (notificationFrames > frameCount/2) { + notificationFrames = frameCount/2; + } + if (frameCount < minFrameCount) { + LOGE("Invalid buffer size: minFrameCount %d, frameCount %d", minFrameCount, frameCount); + return BAD_VALUE; + } + } else { + // Ensure that buffer alignment matches channelcount + if (((uint32_t)sharedBuffer->pointer() & (channelCount | 1)) != 0) { + LOGE("Invalid buffer alignement: address %p, channelCount %d", sharedBuffer->pointer(), channelCount); + return BAD_VALUE; + } + frameCount = sharedBuffer->size()/channelCount/sizeof(int16_t); + } } - // create the track - status_t status; - sp<IAudioTrack> track = audioFlinger->createTrack(getpid(), - streamType, sampleRate, format, channelCount, frameCount, flags, sharedBuffer, &status); + mVolume[LEFT] = 1.0f; + mVolume[RIGHT] = 1.0f; + // create the IAudioTrack + status_t status = createTrack(streamType, sampleRate, format, channelCount, + frameCount, flags, sharedBuffer, output); - if (track == 0) { - LOGE("AudioFlinger could not create track, status: %d", status); + if (status != NO_ERROR) { return status; } - sp<IMemory> cblk = track->getCblk(); - if (cblk == 0) { - LOGE("Could not get control block"); - return NO_INIT; - } + if (cbf != 0) { mAudioTrackThread = new AudioTrackThread(*this, threadCanCallJava); if (mAudioTrackThread == 0) { @@ -227,24 +232,9 @@ status_t AudioTrack::set( mStatus = NO_ERROR; - mAudioTrack = track; - mCblkMemory = cblk; - mCblk = static_cast<audio_track_cblk_t*>(cblk->pointer()); - mCblk->out = 1; - // Update buffer size in case it has been limited by AudioFlinger during track creation - mFrameCount = mCblk->frameCount; - if (sharedBuffer == 0) { - mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); - } else { - mCblk->buffers = sharedBuffer->pointer(); - // Force buffer full condition as data is already present in shared memory - mCblk->stepUser(mFrameCount); - } - mCblk->volume[0] = mCblk->volume[1] = 0x1000; - mVolume[LEFT] = 1.0f; - mVolume[RIGHT] = 1.0f; mStreamType = streamType; mFormat = format; + mChannels = channels; mChannelCount = channelCount; mSharedBuffer = sharedBuffer; mMuted = false; @@ -259,6 +249,7 @@ status_t AudioTrack::set( mMarkerReached = false; mNewPosition = 0; mUpdatePeriod = 0; + mFlags = flags; return NO_ERROR; } @@ -297,7 +288,11 @@ uint32_t AudioTrack::frameCount() const int AudioTrack::frameSize() const { - return channelCount()*((format() == AudioSystem::PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t)); + if (AudioSystem::isLinearPCM(mFormat)) { + return channelCount()*((format() == AudioSystem::PCM_8_BIT) ? sizeof(uint8_t) : sizeof(int16_t)); + } else { + return sizeof(uint8_t); + } } sp<IMemory>& AudioTrack::sharedBuffer() @@ -323,15 +318,27 @@ void AudioTrack::start() } if (android_atomic_or(1, &mActive) == 0) { - mNewPosition = mCblk->server + mUpdatePeriod; - mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; - mCblk->waitTimeMs = 0; - if (t != 0) { - t->run("AudioTrackThread", THREAD_PRIORITY_AUDIO_CLIENT); + audio_io_handle_t output = AudioTrack::getOutput(); + status_t status = mAudioTrack->start(); + if (status == DEAD_OBJECT) { + LOGV("start() dead IAudioTrack: creating a new one"); + status = createTrack(mStreamType, mCblk->sampleRate, mFormat, mChannelCount, + mFrameCount, mFlags, mSharedBuffer, output); + } + if (status == NO_ERROR) { + AudioSystem::startOutput(output, (AudioSystem::stream_type)mStreamType); + mNewPosition = mCblk->server + mUpdatePeriod; + mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; + mCblk->waitTimeMs = 0; + if (t != 0) { + t->run("AudioTrackThread", THREAD_PRIORITY_AUDIO_CLIENT); + } else { + setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT); + } } else { - setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT); + LOGV("start() failed"); + android_atomic_and(~1, &mActive); } - mAudioTrack->start(); } if (t != 0) { @@ -367,6 +374,7 @@ void AudioTrack::stop() } else { setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_NORMAL); } + AudioSystem::stopOutput(getOutput(), (AudioSystem::stream_type)mStreamType); } if (t != 0) { @@ -382,12 +390,12 @@ bool AudioTrack::stopped() const void AudioTrack::flush() { LOGV("flush"); - + // clear playback marker and periodic update counter mMarkerPosition = 0; mMarkerReached = false; mUpdatePeriod = 0; - + if (!mActive) { mAudioTrack->flush(); @@ -403,6 +411,7 @@ void AudioTrack::pause() if (android_atomic_and(~1, &mActive) == 1) { mActive = 0; mAudioTrack->pause(); + AudioSystem::stopOutput(getOutput(), (AudioSystem::stream_type)mStreamType); } } @@ -455,7 +464,6 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount { audio_track_cblk_t* cblk = mCblk; - Mutex::Autolock _l(cblk->lock); if (loopCount == 0) { @@ -476,7 +484,7 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount LOGE("setLoop invalid value: loop markers beyond data: loopStart %d, loopEnd %d, framecount %d", loopStart, loopEnd, mFrameCount); return BAD_VALUE; - } + } cblk->loopStart = loopStart; cblk->loopEnd = loopEnd; @@ -555,7 +563,7 @@ status_t AudioTrack::setPosition(uint32_t position) mCblk->server = position; mCblk->forceReady = 1; - + return NO_ERROR; } @@ -571,7 +579,7 @@ status_t AudioTrack::getPosition(uint32_t *position) status_t AudioTrack::reload() { if (!stopped()) return INVALID_OPERATION; - + flush(); mCblk->stepUser(mFrameCount); @@ -579,12 +587,75 @@ status_t AudioTrack::reload() return NO_ERROR; } +audio_io_handle_t AudioTrack::getOutput() +{ + return AudioSystem::getOutput((AudioSystem::stream_type)mStreamType, + mCblk->sampleRate, mFormat, mChannels, (AudioSystem::output_flags)mFlags); +} + // ------------------------------------------------------------------------- +status_t AudioTrack::createTrack( + int streamType, + uint32_t sampleRate, + int format, + int channelCount, + int frameCount, + uint32_t flags, + const sp<IMemory>& sharedBuffer, + audio_io_handle_t output) +{ + status_t status; + const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger(); + if (audioFlinger == 0) { + LOGE("Could not get audioflinger"); + return NO_INIT; + } + + sp<IAudioTrack> track = audioFlinger->createTrack(getpid(), + streamType, + sampleRate, + format, + channelCount, + frameCount, + ((uint16_t)flags) << 16, + sharedBuffer, + output, + &status); + + if (track == 0) { + LOGE("AudioFlinger could not create track, status: %d", status); + return status; + } + sp<IMemory> cblk = track->getCblk(); + if (cblk == 0) { + LOGE("Could not get control block"); + return NO_INIT; + } + mAudioTrack.clear(); + mAudioTrack = track; + mCblkMemory.clear(); + mCblkMemory = cblk; + mCblk = static_cast<audio_track_cblk_t*>(cblk->pointer()); + mCblk->out = 1; + // Update buffer size in case it has been limited by AudioFlinger during track creation + mFrameCount = mCblk->frameCount; + if (sharedBuffer == 0) { + mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); + } else { + mCblk->buffers = sharedBuffer->pointer(); + // Force buffer full condition as data is already present in shared memory + mCblk->stepUser(mFrameCount); + } + + mCblk->volumeLR = (int32_t(int16_t(mVolume[LEFT] * 0x1000)) << 16) | int16_t(mVolume[RIGHT] * 0x1000); + + return NO_ERROR; +} + status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) { int active; - int timeout = 0; status_t result; audio_track_cblk_t* cblk = mCblk; uint32_t framesReq = audioBuffer->frameCount; @@ -596,19 +667,22 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) uint32_t framesAvail = cblk->framesAvailable(); if (framesAvail == 0) { - Mutex::Autolock _l(cblk->lock); + cblk->lock.lock(); goto start_loop_here; while (framesAvail == 0) { active = mActive; if (UNLIKELY(!active)) { LOGV("Not active and NO_MORE_BUFFERS"); + cblk->lock.unlock(); return NO_MORE_BUFFERS; } - if (UNLIKELY(!waitCount)) + if (UNLIKELY(!waitCount)) { + cblk->lock.unlock(); return WOULD_BLOCK; - timeout = 0; + } + result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); - if (__builtin_expect(result!=NO_ERROR, false)) { + if (__builtin_expect(result!=NO_ERROR, false)) { cblk->waitTimeMs += waitTimeMs; if (cblk->waitTimeMs >= cblk->bufferTimeoutMs) { // timing out when a loop has been set and we have already written upto loop end @@ -616,16 +690,25 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) if (cblk->user < cblk->loopEnd) { LOGW( "obtainBuffer timed out (is the CPU pegged?) %p " "user=%08x, server=%08x", this, cblk->user, cblk->server); - //unlock cblk mutex before calling mAudioTrack->start() (see issue #1617140) + //unlock cblk mutex before calling mAudioTrack->start() (see issue #1617140) cblk->lock.unlock(); - mAudioTrack->start(); + result = mAudioTrack->start(); + if (result == DEAD_OBJECT) { + LOGW("obtainBuffer() dead IAudioTrack: creating a new one"); + result = createTrack(mStreamType, cblk->sampleRate, mFormat, mChannelCount, + mFrameCount, mFlags, mSharedBuffer, getOutput()); + if (result == NO_ERROR) { + cblk = mCblk; + cblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + } + } cblk->lock.lock(); - timeout = 1; } cblk->waitTimeMs = 0; } - + if (--waitCount == 0) { + cblk->lock.unlock(); return TIMED_OUT; } } @@ -633,10 +716,11 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) start_loop_here: framesAvail = cblk->framesAvailable_l(); } + cblk->lock.unlock(); } cblk->waitTimeMs = 0; - + if (framesReq > framesAvail) { framesReq = framesAvail; } @@ -648,17 +732,16 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) framesReq = bufferEnd - u; } - LOGW_IF(timeout, - "*** SERIOUS WARNING *** obtainBuffer() timed out " - "but didn't need to be locked. We recovered, but " - "this shouldn't happen (user=%08x, server=%08x)", cblk->user, cblk->server); - - audioBuffer->flags = mMuted ? Buffer::MUTE : 0; - audioBuffer->channelCount= mChannelCount; - audioBuffer->format = AudioSystem::PCM_16_BIT; - audioBuffer->frameCount = framesReq; - audioBuffer->size = framesReq*mChannelCount*sizeof(int16_t); - audioBuffer->raw = (int8_t *)cblk->buffer(u); + audioBuffer->flags = mMuted ? Buffer::MUTE : 0; + audioBuffer->channelCount = mChannelCount; + audioBuffer->frameCount = framesReq; + audioBuffer->size = framesReq * cblk->frameSize; + if (AudioSystem::isLinearPCM(mFormat)) { + audioBuffer->format = AudioSystem::PCM_16_BIT; + } else { + audioBuffer->format = mFormat; + } + audioBuffer->raw = (int8_t *)cblk->buffer(u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); } @@ -690,10 +773,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize) Buffer audioBuffer; do { - audioBuffer.frameCount = userSize/mChannelCount; - if (mFormat == AudioSystem::PCM_16_BIT) { - audioBuffer.frameCount >>= 1; - } + audioBuffer.frameCount = userSize/frameSize(); + // Calling obtainBuffer() with a negative wait count causes // an (almost) infinite wait time. status_t err = obtainBuffer(&audioBuffer, -1); @@ -705,7 +786,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize) } size_t toWrite; - if (mFormat == AudioSystem::PCM_8_BIT) { + + if (mFormat == AudioSystem::PCM_8_BIT && !(mFlags & AudioSystem::OUTPUT_FLAG_DIRECT)) { // Divide capacity by 2 to take expansion into account toWrite = audioBuffer.size>>1; // 8 to 16 bit conversion @@ -714,7 +796,7 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize) while(count--) { *dst++ = (int16_t)(*src++^0x80) << 8; } - }else { + } else { toWrite = audioBuffer.size; memcpy(audioBuffer.i8, src, toWrite); src += toWrite; @@ -742,13 +824,13 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) if (mCblk->flowControlFlag == 0) { mCbf(EVENT_UNDERRUN, mUserData, 0); if (mCblk->server == mCblk->frameCount) { - mCbf(EVENT_BUFFER_END, mUserData, 0); + mCbf(EVENT_BUFFER_END, mUserData, 0); } mCblk->flowControlFlag = 1; if (mSharedBuffer != 0) return false; } } - + // Manage loop end callback while (mLoopCount > mCblk->loopCount) { int loopCount = -1; @@ -767,7 +849,7 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) } // Manage new position callback - if(mUpdatePeriod > 0) { + if (mUpdatePeriod > 0) { while (mCblk->server >= mNewPosition) { mCbf(EVENT_NEW_POS, mUserData, (void *)&mNewPosition); mNewPosition += mUpdatePeriod; @@ -784,10 +866,10 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) do { audioBuffer.frameCount = frames; - - // Calling obtainBuffer() with a wait count of 1 - // limits wait time to WAIT_PERIOD_MS. This prevents from being - // stuck here not being able to handle timed events (position, markers, loops). + + // Calling obtainBuffer() with a wait count of 1 + // limits wait time to WAIT_PERIOD_MS. This prevents from being + // stuck here not being able to handle timed events (position, markers, loops). status_t err = obtainBuffer(&audioBuffer, 1); if (err < NO_ERROR) { if (err != TIMED_OUT) { @@ -801,7 +883,7 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) // Divide buffer size by 2 to take into account the expansion // due to 8 to 16 bit conversion: the callback must fill only half // of the destination buffer - if (mFormat == AudioSystem::PCM_8_BIT) { + if (mFormat == AudioSystem::PCM_8_BIT && !(mFlags & AudioSystem::OUTPUT_FLAG_DIRECT)) { audioBuffer.size >>= 1; } @@ -820,7 +902,7 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) } if (writtenSize > reqSize) writtenSize = reqSize; - if (mFormat == AudioSystem::PCM_8_BIT) { + if (mFormat == AudioSystem::PCM_8_BIT && !(mFlags & AudioSystem::OUTPUT_FLAG_DIRECT)) { // 8 to 16 bit conversion const int8_t *src = audioBuffer.i8 + writtenSize-1; int count = writtenSize; @@ -832,7 +914,11 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) } audioBuffer.size = writtenSize; - audioBuffer.frameCount = writtenSize/mChannelCount/sizeof(int16_t); + // NOTE: mCblk->frameSize is not equal to AudioTrack::frameSize() for + // 8 bit PCM data: in this case, mCblk->frameSize is based on a sampel size of + // 16 bit. + audioBuffer.frameCount = writtenSize/mCblk->frameSize; + frames -= audioBuffer.frameCount; releaseBuffer(&audioBuffer); @@ -891,7 +977,7 @@ void AudioTrack::AudioTrackThread::onFirstRef() // ========================================================================= audio_track_cblk_t::audio_track_cblk_t() - : user(0), server(0), userBase(0), serverBase(0), buffers(0), frameCount(0), + : lock(Mutex::SHARED), user(0), server(0), userBase(0), serverBase(0), buffers(0), frameCount(0), loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), volumeLR(0), flowControlFlag(1), forceReady(0) { } @@ -948,8 +1034,8 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount) // Mark that we have read the first buffer so that next time stepUser() is called // we switch to normal obtainBuffer() timeout period if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) { - bufferTimeoutMs = MAX_RUN_TIMEOUT_MS - 1; - } + bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS - 1; + } // It is possible that we receive a flush() // while the mixer is processing a block: in this case, // stepServer() is called After the flush() has reset u & s and @@ -981,7 +1067,7 @@ bool audio_track_cblk_t::stepServer(uint32_t frameCount) void* audio_track_cblk_t::buffer(uint32_t offset) const { - return (int16_t *)this->buffers + (offset-userBase)*this->channels; + return (int8_t *)this->buffers + (offset - userBase) * this->frameSize; } uint32_t audio_track_cblk_t::framesAvailable() diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index eeaa54fb4e24..0eff20528511 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -16,12 +16,13 @@ */ #define LOG_TAG "IAudioFlinger" +//#define LOG_NDEBUG 0 #include <utils/Log.h> #include <stdint.h> #include <sys/types.h> -#include <utils/Parcel.h> +#include <binder/Parcel.h> #include <media/IAudioFlinger.h> @@ -44,17 +45,22 @@ enum { STREAM_VOLUME, STREAM_MUTE, SET_MODE, - GET_MODE, - SET_ROUTING, - GET_ROUTING, SET_MIC_MUTE, GET_MIC_MUTE, IS_MUSIC_ACTIVE, - SET_PARAMETER, + SET_PARAMETERS, + GET_PARAMETERS, REGISTER_CLIENT, GET_INPUTBUFFERSIZE, - WAKE_UP, - IS_A2DP_ENABLED + OPEN_OUTPUT, + OPEN_DUPLICATE_OUTPUT, + CLOSE_OUTPUT, + SUSPEND_OUTPUT, + RESTORE_OUTPUT, + OPEN_INPUT, + CLOSE_INPUT, + SET_STREAM_OUTPUT, + SET_VOICE_VOLUME }; class BpAudioFlinger : public BpInterface<IAudioFlinger> @@ -74,9 +80,11 @@ public: int frameCount, uint32_t flags, const sp<IMemory>& sharedBuffer, + int output, status_t *status) { Parcel data, reply; + sp<IAudioTrack> track; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); data.writeInt32(pid); data.writeInt32(streamType); @@ -86,20 +94,23 @@ public: data.writeInt32(frameCount); data.writeInt32(flags); data.writeStrongBinder(sharedBuffer->asBinder()); + data.writeInt32(output); status_t lStatus = remote()->transact(CREATE_TRACK, data, &reply); if (lStatus != NO_ERROR) { LOGE("createTrack error: %s", strerror(-lStatus)); + } else { + lStatus = reply.readInt32(); + track = interface_cast<IAudioTrack>(reply.readStrongBinder()); } - lStatus = reply.readInt32(); if (status) { *status = lStatus; } - return interface_cast<IAudioTrack>(reply.readStrongBinder()); + return track; } virtual sp<IAudioRecord> openRecord( pid_t pid, - int inputSource, + int input, uint32_t sampleRate, int format, int channelCount, @@ -108,20 +119,26 @@ public: status_t *status) { Parcel data, reply; + sp<IAudioRecord> record; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); data.writeInt32(pid); - data.writeInt32(inputSource); + data.writeInt32(input); data.writeInt32(sampleRate); data.writeInt32(format); data.writeInt32(channelCount); data.writeInt32(frameCount); data.writeInt32(flags); - remote()->transact(OPEN_RECORD, data, &reply); - status_t lStatus = reply.readInt32(); + status_t lStatus = remote()->transact(OPEN_RECORD, data, &reply); + if (lStatus != NO_ERROR) { + LOGE("openRecord error: %s", strerror(-lStatus)); + } else { + lStatus = reply.readInt32(); + record = interface_cast<IAudioRecord>(reply.readStrongBinder()); + } if (status) { *status = lStatus; } - return interface_cast<IAudioRecord>(reply.readStrongBinder()); + return record; } virtual uint32_t sampleRate(int output) const @@ -203,12 +220,13 @@ public: return reply.readInt32(); } - virtual status_t setStreamVolume(int stream, float value) + virtual status_t setStreamVolume(int stream, float value, int output) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); data.writeInt32(stream); data.writeFloat(value); + data.writeInt32(output); remote()->transact(SET_STREAM_VOLUME, data, &reply); return reply.readInt32(); } @@ -223,11 +241,12 @@ public: return reply.readInt32(); } - virtual float streamVolume(int stream) const + virtual float streamVolume(int stream, int output) const { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); data.writeInt32(stream); + data.writeInt32(output); remote()->transact(STREAM_VOLUME, data, &reply); return reply.readFloat(); } @@ -241,111 +260,210 @@ public: return reply.readInt32(); } - virtual status_t setRouting(int mode, uint32_t routes, uint32_t mask) + virtual status_t setMode(int mode) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); data.writeInt32(mode); - data.writeInt32(routes); - data.writeInt32(mask); - remote()->transact(SET_ROUTING, data, &reply); + remote()->transact(SET_MODE, data, &reply); return reply.readInt32(); } - virtual uint32_t getRouting(int mode) const + virtual status_t setMicMute(bool state) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(mode); - remote()->transact(GET_ROUTING, data, &reply); + data.writeInt32(state); + remote()->transact(SET_MIC_MUTE, data, &reply); return reply.readInt32(); } - virtual status_t setMode(int mode) + virtual bool getMicMute() const { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(mode); - remote()->transact(SET_MODE, data, &reply); + remote()->transact(GET_MIC_MUTE, data, &reply); return reply.readInt32(); } - virtual int getMode() const + virtual bool isMusicActive() const { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - remote()->transact(GET_MODE, data, &reply); + remote()->transact(IS_MUSIC_ACTIVE, data, &reply); return reply.readInt32(); } - virtual status_t setMicMute(bool state) + virtual status_t setParameters(int ioHandle, const String8& keyValuePairs) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(state); - remote()->transact(SET_MIC_MUTE, data, &reply); + data.writeInt32(ioHandle); + data.writeString8(keyValuePairs); + remote()->transact(SET_PARAMETERS, data, &reply); return reply.readInt32(); } - virtual bool getMicMute() const + virtual String8 getParameters(int ioHandle, const String8& keys) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - remote()->transact(GET_MIC_MUTE, data, &reply); + data.writeInt32(ioHandle); + data.writeString8(keys); + remote()->transact(GET_PARAMETERS, data, &reply); + return reply.readString8(); + } + + virtual void registerClient(const sp<IAudioFlingerClient>& client) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); + data.writeStrongBinder(client->asBinder()); + remote()->transact(REGISTER_CLIENT, data, &reply); + } + + virtual size_t getInputBufferSize(uint32_t sampleRate, int format, int channelCount) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); + data.writeInt32(sampleRate); + data.writeInt32(format); + data.writeInt32(channelCount); + remote()->transact(GET_INPUTBUFFERSIZE, data, &reply); return reply.readInt32(); } - virtual bool isMusicActive() const + virtual int openOutput(uint32_t *pDevices, + uint32_t *pSamplingRate, + uint32_t *pFormat, + uint32_t *pChannels, + uint32_t *pLatencyMs, + uint32_t flags) { Parcel data, reply; + uint32_t devices = pDevices ? *pDevices : 0; + uint32_t samplingRate = pSamplingRate ? *pSamplingRate : 0; + uint32_t format = pFormat ? *pFormat : 0; + uint32_t channels = pChannels ? *pChannels : 0; + uint32_t latency = pLatencyMs ? *pLatencyMs : 0; + data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - remote()->transact(IS_MUSIC_ACTIVE, data, &reply); + data.writeInt32(devices); + data.writeInt32(samplingRate); + data.writeInt32(format); + data.writeInt32(channels); + data.writeInt32(latency); + data.writeInt32(flags); + remote()->transact(OPEN_OUTPUT, data, &reply); + int output = reply.readInt32(); + LOGV("openOutput() returned output, %p", output); + devices = reply.readInt32(); + if (pDevices) *pDevices = devices; + samplingRate = reply.readInt32(); + if (pSamplingRate) *pSamplingRate = samplingRate; + format = reply.readInt32(); + if (pFormat) *pFormat = format; + channels = reply.readInt32(); + if (pChannels) *pChannels = channels; + latency = reply.readInt32(); + if (pLatencyMs) *pLatencyMs = latency; + return output; + } + + virtual int openDuplicateOutput(int output1, int output2) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); + data.writeInt32(output1); + data.writeInt32(output2); + remote()->transact(OPEN_DUPLICATE_OUTPUT, data, &reply); return reply.readInt32(); } - virtual status_t setParameter(const char* key, const char* value) + virtual status_t closeOutput(int output) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeCString(key); - data.writeCString(value); - remote()->transact(SET_PARAMETER, data, &reply); + data.writeInt32(output); + remote()->transact(CLOSE_OUTPUT, data, &reply); return reply.readInt32(); } - - virtual void registerClient(const sp<IAudioFlingerClient>& client) + + virtual status_t suspendOutput(int output) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeStrongBinder(client->asBinder()); - remote()->transact(REGISTER_CLIENT, data, &reply); + data.writeInt32(output); + remote()->transact(SUSPEND_OUTPUT, data, &reply); + return reply.readInt32(); } - - virtual size_t getInputBufferSize(uint32_t sampleRate, int format, int channelCount) + + virtual status_t restoreOutput(int output) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(sampleRate); + data.writeInt32(output); + remote()->transact(RESTORE_OUTPUT, data, &reply); + return reply.readInt32(); + } + + virtual int openInput(uint32_t *pDevices, + uint32_t *pSamplingRate, + uint32_t *pFormat, + uint32_t *pChannels, + uint32_t acoustics) + { + Parcel data, reply; + uint32_t devices = pDevices ? *pDevices : 0; + uint32_t samplingRate = pSamplingRate ? *pSamplingRate : 0; + uint32_t format = pFormat ? *pFormat : 0; + uint32_t channels = pChannels ? *pChannels : 0; + + data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); + data.writeInt32(devices); + data.writeInt32(samplingRate); data.writeInt32(format); - data.writeInt32(channelCount); - remote()->transact(GET_INPUTBUFFERSIZE, data, &reply); + data.writeInt32(channels); + data.writeInt32(acoustics); + remote()->transact(OPEN_INPUT, data, &reply); + int input = reply.readInt32(); + devices = reply.readInt32(); + if (pDevices) *pDevices = devices; + samplingRate = reply.readInt32(); + if (pSamplingRate) *pSamplingRate = samplingRate; + format = reply.readInt32(); + if (pFormat) *pFormat = format; + channels = reply.readInt32(); + if (pChannels) *pChannels = channels; + return input; + } + + virtual status_t closeInput(int input) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); + data.writeInt32(input); + remote()->transact(CLOSE_INPUT, data, &reply); return reply.readInt32(); } - - virtual void wakeUp() + + virtual status_t setStreamOutput(uint32_t stream, int output) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - remote()->transact(WAKE_UP, data, &reply, IBinder::FLAG_ONEWAY); - return; + data.writeInt32(stream); + data.writeInt32(output); + remote()->transact(SET_STREAM_OUTPUT, data, &reply); + return reply.readInt32(); } - virtual bool isA2dpEnabled() const + virtual status_t setVoiceVolume(float volume) { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - remote()->transact(IS_A2DP_ENABLED, data, &reply); - return (bool)reply.readInt32(); + data.writeFloat(volume); + remote()->transact(SET_VOICE_VOLUME, data, &reply); + return reply.readInt32(); } }; @@ -353,12 +471,6 @@ IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger"); // ---------------------------------------------------------------------- -#define CHECK_INTERFACE(interface, data, reply) \ - do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ - LOGW("Call incorrectly routed to " #interface); \ - return PERMISSION_DENIED; \ - } } while (0) - status_t BnAudioFlinger::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { @@ -373,10 +485,11 @@ status_t BnAudioFlinger::onTransact( size_t bufferCount = data.readInt32(); uint32_t flags = data.readInt32(); sp<IMemory> buffer = interface_cast<IMemory>(data.readStrongBinder()); + int output = data.readInt32(); status_t status; sp<IAudioTrack> track = createTrack(pid, streamType, sampleRate, format, - channelCount, bufferCount, flags, buffer, &status); + channelCount, bufferCount, flags, buffer, output, &status); reply->writeInt32(status); reply->writeStrongBinder(track->asBinder()); return NO_ERROR; @@ -384,14 +497,14 @@ status_t BnAudioFlinger::onTransact( case OPEN_RECORD: { CHECK_INTERFACE(IAudioFlinger, data, reply); pid_t pid = data.readInt32(); - int inputSource = data.readInt32(); + int input = data.readInt32(); uint32_t sampleRate = data.readInt32(); int format = data.readInt32(); int channelCount = data.readInt32(); size_t bufferCount = data.readInt32(); uint32_t flags = data.readInt32(); status_t status; - sp<IAudioRecord> record = openRecord(pid, inputSource, + sp<IAudioRecord> record = openRecord(pid, input, sampleRate, format, channelCount, bufferCount, flags, &status); reply->writeInt32(status); reply->writeStrongBinder(record->asBinder()); @@ -399,32 +512,27 @@ status_t BnAudioFlinger::onTransact( } break; case SAMPLE_RATE: { CHECK_INTERFACE(IAudioFlinger, data, reply); - int output = data.readInt32(); - reply->writeInt32( sampleRate(output) ); + reply->writeInt32( sampleRate(data.readInt32()) ); return NO_ERROR; } break; case CHANNEL_COUNT: { CHECK_INTERFACE(IAudioFlinger, data, reply); - int output = data.readInt32(); - reply->writeInt32( channelCount(output) ); + reply->writeInt32( channelCount(data.readInt32()) ); return NO_ERROR; } break; case FORMAT: { CHECK_INTERFACE(IAudioFlinger, data, reply); - int output = data.readInt32(); - reply->writeInt32( format(output) ); + reply->writeInt32( format(data.readInt32()) ); return NO_ERROR; } break; case FRAME_COUNT: { CHECK_INTERFACE(IAudioFlinger, data, reply); - int output = data.readInt32(); - reply->writeInt32( frameCount(output) ); + reply->writeInt32( frameCount(data.readInt32()) ); return NO_ERROR; } break; case LATENCY: { CHECK_INTERFACE(IAudioFlinger, data, reply); - int output = data.readInt32(); - reply->writeInt32( latency(output) ); + reply->writeInt32( latency(data.readInt32()) ); return NO_ERROR; } break; case SET_MASTER_VOLUME: { @@ -450,7 +558,9 @@ status_t BnAudioFlinger::onTransact( case SET_STREAM_VOLUME: { CHECK_INTERFACE(IAudioFlinger, data, reply); int stream = data.readInt32(); - reply->writeInt32( setStreamVolume(stream, data.readFloat()) ); + float volume = data.readFloat(); + int output = data.readInt32(); + reply->writeInt32( setStreamVolume(stream, volume, output) ); return NO_ERROR; } break; case SET_STREAM_MUTE: { @@ -462,7 +572,8 @@ status_t BnAudioFlinger::onTransact( case STREAM_VOLUME: { CHECK_INTERFACE(IAudioFlinger, data, reply); int stream = data.readInt32(); - reply->writeFloat( streamVolume(stream) ); + int output = data.readInt32(); + reply->writeFloat( streamVolume(stream, output) ); return NO_ERROR; } break; case STREAM_MUTE: { @@ -471,31 +582,12 @@ status_t BnAudioFlinger::onTransact( reply->writeInt32( streamMute(stream) ); return NO_ERROR; } break; - case SET_ROUTING: { - CHECK_INTERFACE(IAudioFlinger, data, reply); - int mode = data.readInt32(); - uint32_t routes = data.readInt32(); - uint32_t mask = data.readInt32(); - reply->writeInt32( setRouting(mode, routes, mask) ); - return NO_ERROR; - } break; - case GET_ROUTING: { - CHECK_INTERFACE(IAudioFlinger, data, reply); - int mode = data.readInt32(); - reply->writeInt32( getRouting(mode) ); - return NO_ERROR; - } break; case SET_MODE: { CHECK_INTERFACE(IAudioFlinger, data, reply); int mode = data.readInt32(); reply->writeInt32( setMode(mode) ); return NO_ERROR; } break; - case GET_MODE: { - CHECK_INTERFACE(IAudioFlinger, data, reply); - reply->writeInt32( getMode() ); - return NO_ERROR; - } break; case SET_MIC_MUTE: { CHECK_INTERFACE(IAudioFlinger, data, reply); int state = data.readInt32(); @@ -512,13 +604,21 @@ status_t BnAudioFlinger::onTransact( reply->writeInt32( isMusicActive() ); return NO_ERROR; } break; - case SET_PARAMETER: { + case SET_PARAMETERS: { CHECK_INTERFACE(IAudioFlinger, data, reply); - const char *key = data.readCString(); - const char *value = data.readCString(); - reply->writeInt32( setParameter(key, value) ); + int ioHandle = data.readInt32(); + String8 keyValuePairs(data.readString8()); + reply->writeInt32(setParameters(ioHandle, keyValuePairs)); return NO_ERROR; - } break; + } break; + case GET_PARAMETERS: { + CHECK_INTERFACE(IAudioFlinger, data, reply); + int ioHandle = data.readInt32(); + String8 keys(data.readString8()); + reply->writeString8(getParameters(ioHandle, keys)); + return NO_ERROR; + } break; + case REGISTER_CLIENT: { CHECK_INTERFACE(IAudioFlinger, data, reply); sp<IAudioFlingerClient> client = interface_cast<IAudioFlingerClient>(data.readStrongBinder()); @@ -533,14 +633,87 @@ status_t BnAudioFlinger::onTransact( reply->writeInt32( getInputBufferSize(sampleRate, format, channelCount) ); return NO_ERROR; } break; - case WAKE_UP: { + case OPEN_OUTPUT: { + CHECK_INTERFACE(IAudioFlinger, data, reply); + uint32_t devices = data.readInt32(); + uint32_t samplingRate = data.readInt32(); + uint32_t format = data.readInt32(); + uint32_t channels = data.readInt32(); + uint32_t latency = data.readInt32(); + uint32_t flags = data.readInt32(); + int output = openOutput(&devices, + &samplingRate, + &format, + &channels, + &latency, + flags); + LOGV("OPEN_OUTPUT output, %p", output); + reply->writeInt32(output); + reply->writeInt32(devices); + reply->writeInt32(samplingRate); + reply->writeInt32(format); + reply->writeInt32(channels); + reply->writeInt32(latency); + return NO_ERROR; + } break; + case OPEN_DUPLICATE_OUTPUT: { + CHECK_INTERFACE(IAudioFlinger, data, reply); + int output1 = data.readInt32(); + int output2 = data.readInt32(); + reply->writeInt32(openDuplicateOutput(output1, output2)); + return NO_ERROR; + } break; + case CLOSE_OUTPUT: { CHECK_INTERFACE(IAudioFlinger, data, reply); - wakeUp(); + reply->writeInt32(closeOutput(data.readInt32())); + return NO_ERROR; + } break; + case SUSPEND_OUTPUT: { + CHECK_INTERFACE(IAudioFlinger, data, reply); + reply->writeInt32(suspendOutput(data.readInt32())); + return NO_ERROR; + } break; + case RESTORE_OUTPUT: { + CHECK_INTERFACE(IAudioFlinger, data, reply); + reply->writeInt32(restoreOutput(data.readInt32())); + return NO_ERROR; + } break; + case OPEN_INPUT: { + CHECK_INTERFACE(IAudioFlinger, data, reply); + uint32_t devices = data.readInt32(); + uint32_t samplingRate = data.readInt32(); + uint32_t format = data.readInt32(); + uint32_t channels = data.readInt32(); + uint32_t acoutics = data.readInt32(); + + int input = openInput(&devices, + &samplingRate, + &format, + &channels, + acoutics); + reply->writeInt32(input); + reply->writeInt32(devices); + reply->writeInt32(samplingRate); + reply->writeInt32(format); + reply->writeInt32(channels); + return NO_ERROR; + } break; + case CLOSE_INPUT: { + CHECK_INTERFACE(IAudioFlinger, data, reply); + reply->writeInt32(closeInput(data.readInt32())); + return NO_ERROR; + } break; + case SET_STREAM_OUTPUT: { + CHECK_INTERFACE(IAudioFlinger, data, reply); + uint32_t stream = data.readInt32(); + int output = data.readInt32(); + reply->writeInt32(setStreamOutput(stream, output)); return NO_ERROR; } break; - case IS_A2DP_ENABLED: { + case SET_VOICE_VOLUME: { CHECK_INTERFACE(IAudioFlinger, data, reply); - reply->writeInt32( (int)isA2dpEnabled() ); + float volume = data.readFloat(); + reply->writeInt32( setVoiceVolume(volume) ); return NO_ERROR; } break; default: diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp index 9d00aefb4ef6..3900de4572d0 100644 --- a/media/libmedia/IAudioFlingerClient.cpp +++ b/media/libmedia/IAudioFlingerClient.cpp @@ -20,14 +20,15 @@ #include <stdint.h> #include <sys/types.h> -#include <utils/Parcel.h> +#include <binder/Parcel.h> #include <media/IAudioFlingerClient.h> +#include <media/AudioSystem.h> namespace android { enum { - AUDIO_OUTPUT_CHANGED = IBinder::FIRST_CALL_TRANSACTION + IO_CONFIG_CHANGED = IBinder::FIRST_CALL_TRANSACTION }; class BpAudioFlingerClient : public BpInterface<IAudioFlingerClient> @@ -38,12 +39,25 @@ public: { } - void a2dpEnabledChanged(bool enabled) + void ioConfigChanged(int event, int ioHandle, void *param2) { Parcel data, reply; data.writeInterfaceToken(IAudioFlingerClient::getInterfaceDescriptor()); - data.writeInt32((int)enabled); - remote()->transact(AUDIO_OUTPUT_CHANGED, data, &reply, IBinder::FLAG_ONEWAY); + data.writeInt32(event); + data.writeInt32(ioHandle); + if (event == AudioSystem::STREAM_CONFIG_CHANGED) { + uint32_t stream = *(uint32_t *)param2; + LOGV("ioConfigChanged stream %d", stream); + data.writeInt32(stream); + } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) { + AudioSystem::OutputDescriptor *desc = (AudioSystem::OutputDescriptor *)param2; + data.writeInt32(desc->samplingRate); + data.writeInt32(desc->format); + data.writeInt32(desc->channels); + data.writeInt32(desc->frameCount); + data.writeInt32(desc->latency); + } + remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY); } }; @@ -51,20 +65,30 @@ IMPLEMENT_META_INTERFACE(AudioFlingerClient, "android.media.IAudioFlingerClient" // ---------------------------------------------------------------------- -#define CHECK_INTERFACE(interface, data, reply) \ - do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ - LOGW("Call incorrectly routed to " #interface); \ - return PERMISSION_DENIED; \ - } } while (0) - status_t BnAudioFlingerClient::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { switch(code) { - case AUDIO_OUTPUT_CHANGED: { + case IO_CONFIG_CHANGED: { CHECK_INTERFACE(IAudioFlingerClient, data, reply); - bool enabled = (bool)data.readInt32(); - a2dpEnabledChanged(enabled); + int event = data.readInt32(); + int ioHandle = data.readInt32(); + void *param2 = 0; + AudioSystem::OutputDescriptor desc; + uint32_t stream; + if (event == AudioSystem::STREAM_CONFIG_CHANGED) { + stream = data.readInt32(); + param2 = &stream; + LOGV("STREAM_CONFIG_CHANGED stream %d", stream); + } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) { + desc.samplingRate = data.readInt32(); + desc.format = data.readInt32(); + desc.channels = data.readInt32(); + desc.frameCount = data.readInt32(); + desc.latency = data.readInt32(); + param2 = &desc; + } + ioConfigChanged(event, ioHandle, param2); return NO_ERROR; } break; default: diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp new file mode 100644 index 000000000000..18dd173bfc64 --- /dev/null +++ b/media/libmedia/IAudioPolicyService.cpp @@ -0,0 +1,413 @@ +/* +** +** Copyright 2009, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "IAudioPolicyService" +#include <utils/Log.h> + +#include <stdint.h> +#include <sys/types.h> + +#include <binder/Parcel.h> + +#include <media/IAudioPolicyService.h> + +namespace android { + +enum { + SET_DEVICE_CONNECTION_STATE = IBinder::FIRST_CALL_TRANSACTION, + GET_DEVICE_CONNECTION_STATE, + SET_PHONE_STATE, + SET_RINGER_MODE, + SET_FORCE_USE, + GET_FORCE_USE, + GET_OUTPUT, + START_OUTPUT, + STOP_OUTPUT, + RELEASE_OUTPUT, + GET_INPUT, + START_INPUT, + STOP_INPUT, + RELEASE_INPUT, + INIT_STREAM_VOLUME, + SET_STREAM_VOLUME, + GET_STREAM_VOLUME +}; + +class BpAudioPolicyService : public BpInterface<IAudioPolicyService> +{ +public: + BpAudioPolicyService(const sp<IBinder>& impl) + : BpInterface<IAudioPolicyService>(impl) + { + } + + virtual status_t setDeviceConnectionState( + AudioSystem::audio_devices device, + AudioSystem::device_connection_state state, + const char *device_address) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(static_cast <uint32_t>(device)); + data.writeInt32(static_cast <uint32_t>(state)); + data.writeCString(device_address); + remote()->transact(SET_DEVICE_CONNECTION_STATE, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual AudioSystem::device_connection_state getDeviceConnectionState( + AudioSystem::audio_devices device, + const char *device_address) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(static_cast <uint32_t>(device)); + data.writeCString(device_address); + remote()->transact(GET_DEVICE_CONNECTION_STATE, data, &reply); + return static_cast <AudioSystem::device_connection_state>(reply.readInt32()); + } + + virtual status_t setPhoneState(int state) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(state); + remote()->transact(SET_PHONE_STATE, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual status_t setRingerMode(uint32_t mode, uint32_t mask) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(mode); + data.writeInt32(mask); + remote()->transact(SET_RINGER_MODE, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual status_t setForceUse(AudioSystem::force_use usage, AudioSystem::forced_config config) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(static_cast <uint32_t>(usage)); + data.writeInt32(static_cast <uint32_t>(config)); + remote()->transact(SET_FORCE_USE, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual AudioSystem::forced_config getForceUse(AudioSystem::force_use usage) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(static_cast <uint32_t>(usage)); + remote()->transact(GET_FORCE_USE, data, &reply); + return static_cast <AudioSystem::forced_config> (reply.readInt32()); + } + + virtual audio_io_handle_t getOutput( + AudioSystem::stream_type stream, + uint32_t samplingRate, + uint32_t format, + uint32_t channels, + AudioSystem::output_flags flags) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(static_cast <uint32_t>(stream)); + data.writeInt32(samplingRate); + data.writeInt32(static_cast <uint32_t>(format)); + data.writeInt32(channels); + data.writeInt32(static_cast <uint32_t>(flags)); + remote()->transact(GET_OUTPUT, data, &reply); + return static_cast <audio_io_handle_t> (reply.readInt32()); + } + + virtual status_t startOutput(audio_io_handle_t output, AudioSystem::stream_type stream) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(output); + data.writeInt32(stream); + remote()->transact(START_OUTPUT, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual status_t stopOutput(audio_io_handle_t output, AudioSystem::stream_type stream) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(output); + data.writeInt32(stream); + remote()->transact(STOP_OUTPUT, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual void releaseOutput(audio_io_handle_t output) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(output); + remote()->transact(RELEASE_OUTPUT, data, &reply); + } + + virtual audio_io_handle_t getInput( + int inputSource, + uint32_t samplingRate, + uint32_t format, + uint32_t channels, + AudioSystem::audio_in_acoustics acoustics) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(inputSource); + data.writeInt32(samplingRate); + data.writeInt32(static_cast <uint32_t>(format)); + data.writeInt32(channels); + data.writeInt32(static_cast <uint32_t>(acoustics)); + remote()->transact(GET_INPUT, data, &reply); + return static_cast <audio_io_handle_t> (reply.readInt32()); + } + + virtual status_t startInput(audio_io_handle_t input) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(input); + remote()->transact(START_INPUT, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual status_t stopInput(audio_io_handle_t input) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(input); + remote()->transact(STOP_INPUT, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual void releaseInput(audio_io_handle_t input) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(input); + remote()->transact(RELEASE_INPUT, data, &reply); + } + + virtual status_t initStreamVolume(AudioSystem::stream_type stream, + int indexMin, + int indexMax) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(static_cast <uint32_t>(stream)); + data.writeInt32(indexMin); + data.writeInt32(indexMax); + remote()->transact(INIT_STREAM_VOLUME, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual status_t setStreamVolumeIndex(AudioSystem::stream_type stream, int index) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(static_cast <uint32_t>(stream)); + data.writeInt32(index); + remote()->transact(SET_STREAM_VOLUME, data, &reply); + return static_cast <status_t> (reply.readInt32()); + } + + virtual status_t getStreamVolumeIndex(AudioSystem::stream_type stream, int *index) + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32(static_cast <uint32_t>(stream)); + remote()->transact(GET_STREAM_VOLUME, data, &reply); + int lIndex = reply.readInt32(); + if (index) *index = lIndex; + return static_cast <status_t> (reply.readInt32()); + } +}; + +IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService"); + +// ---------------------------------------------------------------------- + + +status_t BnAudioPolicyService::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case SET_DEVICE_CONNECTION_STATE: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + AudioSystem::audio_devices device = static_cast <AudioSystem::audio_devices>(data.readInt32()); + AudioSystem::device_connection_state state = static_cast <AudioSystem::device_connection_state>(data.readInt32()); + const char *device_address = data.readCString(); + reply->writeInt32(static_cast <uint32_t>(setDeviceConnectionState(device, state, device_address))); + return NO_ERROR; + } break; + + case GET_DEVICE_CONNECTION_STATE: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + AudioSystem::audio_devices device = static_cast <AudioSystem::audio_devices>(data.readInt32()); + const char *device_address = data.readCString(); + reply->writeInt32(static_cast <uint32_t>(getDeviceConnectionState(device, device_address))); + return NO_ERROR; + } break; + + case SET_PHONE_STATE: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + reply->writeInt32(static_cast <uint32_t>(setPhoneState(data.readInt32()))); + return NO_ERROR; + } break; + + case SET_RINGER_MODE: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + uint32_t mode = data.readInt32(); + uint32_t mask = data.readInt32(); + reply->writeInt32(static_cast <uint32_t>(setRingerMode(mode, mask))); + return NO_ERROR; + } break; + + case SET_FORCE_USE: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + AudioSystem::force_use usage = static_cast <AudioSystem::force_use>(data.readInt32()); + AudioSystem::forced_config config = static_cast <AudioSystem::forced_config>(data.readInt32()); + reply->writeInt32(static_cast <uint32_t>(setForceUse(usage, config))); + return NO_ERROR; + } break; + + case GET_FORCE_USE: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + AudioSystem::force_use usage = static_cast <AudioSystem::force_use>(data.readInt32()); + reply->writeInt32(static_cast <uint32_t>(getForceUse(usage))); + return NO_ERROR; + } break; + + case GET_OUTPUT: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32()); + uint32_t samplingRate = data.readInt32(); + uint32_t format = data.readInt32(); + uint32_t channels = data.readInt32(); + AudioSystem::output_flags flags = static_cast <AudioSystem::output_flags>(data.readInt32()); + + audio_io_handle_t output = getOutput(stream, + samplingRate, + format, + channels, + flags); + reply->writeInt32(static_cast <int>(output)); + return NO_ERROR; + } break; + + case START_OUTPUT: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32()); + uint32_t stream = data.readInt32(); + reply->writeInt32(static_cast <uint32_t>(startOutput(output, (AudioSystem::stream_type)stream))); + return NO_ERROR; + } break; + + case STOP_OUTPUT: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32()); + uint32_t stream = data.readInt32(); + reply->writeInt32(static_cast <uint32_t>(stopOutput(output, (AudioSystem::stream_type)stream))); + return NO_ERROR; + } break; + + case RELEASE_OUTPUT: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32()); + releaseOutput(output); + return NO_ERROR; + } break; + + case GET_INPUT: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + int inputSource = data.readInt32(); + uint32_t samplingRate = data.readInt32(); + uint32_t format = data.readInt32(); + uint32_t channels = data.readInt32(); + AudioSystem::audio_in_acoustics acoustics = static_cast <AudioSystem::audio_in_acoustics>(data.readInt32()); + audio_io_handle_t input = getInput(inputSource, + samplingRate, + format, + channels, + acoustics); + reply->writeInt32(static_cast <int>(input)); + return NO_ERROR; + } break; + + case START_INPUT: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + audio_io_handle_t input = static_cast <audio_io_handle_t>(data.readInt32()); + reply->writeInt32(static_cast <uint32_t>(startInput(input))); + return NO_ERROR; + } break; + + case STOP_INPUT: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + audio_io_handle_t input = static_cast <audio_io_handle_t>(data.readInt32()); + reply->writeInt32(static_cast <uint32_t>(stopInput(input))); + return NO_ERROR; + } break; + + case RELEASE_INPUT: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + audio_io_handle_t input = static_cast <audio_io_handle_t>(data.readInt32()); + releaseInput(input); + return NO_ERROR; + } break; + + case INIT_STREAM_VOLUME: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32()); + int indexMin = data.readInt32(); + int indexMax = data.readInt32(); + reply->writeInt32(static_cast <uint32_t>(initStreamVolume(stream, indexMin,indexMax))); + return NO_ERROR; + } break; + + case SET_STREAM_VOLUME: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32()); + int index = data.readInt32(); + reply->writeInt32(static_cast <uint32_t>(setStreamVolumeIndex(stream, index))); + return NO_ERROR; + } break; + + case GET_STREAM_VOLUME: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + AudioSystem::stream_type stream = static_cast <AudioSystem::stream_type>(data.readInt32()); + int index; + status_t status = getStreamVolumeIndex(stream, &index); + reply->writeInt32(index); + reply->writeInt32(static_cast <uint32_t>(status)); + return NO_ERROR; + } break; + + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp index 6e42dac9e2a0..ba0d55b843d4 100644 --- a/media/libmedia/IAudioRecord.cpp +++ b/media/libmedia/IAudioRecord.cpp @@ -15,10 +15,14 @@ ** limitations under the License. */ +#define LOG_TAG "IAudioRecord" +//#define LOG_NDEBUG 0 +#include <utils/Log.h> + #include <stdint.h> #include <sys/types.h> -#include <utils/Parcel.h> +#include <binder/Parcel.h> #include <media/IAudioRecord.h> @@ -42,8 +46,13 @@ public: { Parcel data, reply; data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor()); - remote()->transact(START, data, &reply); - return reply.readInt32(); + status_t status = remote()->transact(START, data, &reply); + if (status == NO_ERROR) { + status = reply.readInt32(); + } else { + LOGW("start() error: %s", strerror(-status)); + } + return status; } virtual void stop() @@ -56,9 +65,13 @@ public: virtual sp<IMemory> getCblk() const { Parcel data, reply; + sp<IMemory> cblk; data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor()); - remote()->transact(GET_CBLK, data, &reply); - return interface_cast<IMemory>(reply.readStrongBinder()); + status_t status = remote()->transact(GET_CBLK, data, &reply); + if (status == NO_ERROR) { + cblk = interface_cast<IMemory>(reply.readStrongBinder()); + } + return cblk; } }; @@ -66,12 +79,6 @@ IMPLEMENT_META_INTERFACE(AudioRecord, "android.media.IAudioRecord"); // ---------------------------------------------------------------------- -#define CHECK_INTERFACE(interface, data, reply) \ - do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ - LOGW("Call incorrectly routed to " #interface); \ - return PERMISSION_DENIED; \ - } } while (0) - status_t BnAudioRecord::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp index abc202dafb8f..01ffd75e56c5 100644 --- a/media/libmedia/IAudioTrack.cpp +++ b/media/libmedia/IAudioTrack.cpp @@ -15,10 +15,14 @@ ** limitations under the License. */ +#define LOG_TAG "IAudioTrack" +//#define LOG_NDEBUG 0 +#include <utils/Log.h> + #include <stdint.h> #include <sys/types.h> -#include <utils/Parcel.h> +#include <binder/Parcel.h> #include <media/IAudioTrack.h> @@ -45,8 +49,13 @@ public: { Parcel data, reply; data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor()); - remote()->transact(START, data, &reply); - return reply.readInt32(); + status_t status = remote()->transact(START, data, &reply); + if (status == NO_ERROR) { + status = reply.readInt32(); + } else { + LOGW("start() error: %s", strerror(-status)); + } + return status; } virtual void stop() @@ -81,9 +90,13 @@ public: virtual sp<IMemory> getCblk() const { Parcel data, reply; + sp<IMemory> cblk; data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor()); - remote()->transact(GET_CBLK, data, &reply); - return interface_cast<IMemory>(reply.readStrongBinder()); + status_t status = remote()->transact(GET_CBLK, data, &reply); + if (status == NO_ERROR) { + cblk = interface_cast<IMemory>(reply.readStrongBinder()); + } + return cblk; } }; @@ -91,12 +104,6 @@ IMPLEMENT_META_INTERFACE(AudioTrack, "android.media.IAudioTrack"); // ---------------------------------------------------------------------- -#define CHECK_INTERFACE(interface, data, reply) \ - do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ - LOGW("Call incorrectly routed to " #interface); \ - return PERMISSION_DENIED; \ - } } while (0) - status_t BnAudioTrack::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp index 85b5944484ab..397a55bda6f8 100644 --- a/media/libmedia/IMediaMetadataRetriever.cpp +++ b/media/libmedia/IMediaMetadataRetriever.cpp @@ -17,7 +17,7 @@ #include <stdint.h> #include <sys/types.h> -#include <utils/Parcel.h> +#include <binder/Parcel.h> #include <SkBitmap.h> #include <media/IMediaMetadataRetriever.h> @@ -126,16 +126,10 @@ public: } }; -IMPLEMENT_META_INTERFACE(MediaMetadataRetriever, "android.hardware.IMediaMetadataRetriever"); +IMPLEMENT_META_INTERFACE(MediaMetadataRetriever, "android.media.IMediaMetadataRetriever"); // ---------------------------------------------------------------------- -#define CHECK_INTERFACE(interface, data, reply) \ - do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ - LOGW("Call incorrectly routed to " #interface); \ - return PERMISSION_DENIED; \ - } } while (0) - status_t BnMediaMetadataRetriever::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { @@ -215,4 +209,3 @@ status_t BnMediaMetadataRetriever::onTransact( // ---------------------------------------------------------------------------- }; // namespace android - diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp index f18765a10843..5d9db102f6a2 100644 --- a/media/libmedia/IMediaPlayer.cpp +++ b/media/libmedia/IMediaPlayer.cpp @@ -18,7 +18,7 @@ #include <stdint.h> #include <sys/types.h> -#include <utils/Parcel.h> +#include <binder/Parcel.h> #include <media/IMediaPlayer.h> #include <ui/ISurface.h> @@ -39,7 +39,10 @@ enum { RESET, SET_AUDIO_STREAM_TYPE, SET_LOOPING, - SET_VOLUME + SET_VOLUME, + INVOKE, + SET_METADATA_FILTER, + GET_METADATA, }; class BpMediaPlayer: public BpInterface<IMediaPlayer> @@ -170,18 +173,38 @@ public: remote()->transact(SET_VOLUME, data, &reply); return reply.readInt32(); } + + status_t invoke(const Parcel& request, Parcel *reply) + { // Avoid doing any extra copy. The interface descriptor should + // have been set by MediaPlayer.java. + return remote()->transact(INVOKE, request, reply); + } + + status_t setMetadataFilter(const Parcel& request) + { + Parcel reply; + // Avoid doing any extra copy of the request. The interface + // descriptor should have been set by MediaPlayer.java. + remote()->transact(SET_METADATA_FILTER, request, &reply); + return reply.readInt32(); + } + + status_t getMetadata(bool update_only, bool apply_filter, Parcel *reply) + { + Parcel request; + request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); + // TODO: Burning 2 ints for 2 boolean. Should probably use flags in an int here. + request.writeInt32(update_only); + request.writeInt32(apply_filter); + remote()->transact(GET_METADATA, request, reply); + return reply->readInt32(); + } }; -IMPLEMENT_META_INTERFACE(MediaPlayer, "android.hardware.IMediaPlayer"); +IMPLEMENT_META_INTERFACE(MediaPlayer, "android.media.IMediaPlayer"); // ---------------------------------------------------------------------- -#define CHECK_INTERFACE(interface, data, reply) \ - do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ - LOGW("Call incorrectly routed to " #interface); \ - return PERMISSION_DENIED; \ - } } while (0) - status_t BnMediaPlayer::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { @@ -266,6 +289,24 @@ status_t BnMediaPlayer::onTransact( reply->writeInt32(setVolume(data.readFloat(), data.readFloat())); return NO_ERROR; } break; + case INVOKE: { + CHECK_INTERFACE(IMediaPlayer, data, reply); + invoke(data, reply); + return NO_ERROR; + } break; + case SET_METADATA_FILTER: { + CHECK_INTERFACE(IMediaPlayer, data, reply); + reply->writeInt32(setMetadataFilter(data)); + return NO_ERROR; + } break; + case GET_METADATA: { + CHECK_INTERFACE(IMediaPlayer, data, reply); + const status_t retcode = getMetadata(data.readInt32(), data.readInt32(), reply); + reply->setDataPosition(0); + reply->writeInt32(retcode); + reply->setDataPosition(0); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } @@ -274,4 +315,3 @@ status_t BnMediaPlayer::onTransact( // ---------------------------------------------------------------------------- }; // namespace android - diff --git a/media/libmedia/IMediaPlayerClient.cpp b/media/libmedia/IMediaPlayerClient.cpp index 65022cd3c0a9..bf51829baff4 100644 --- a/media/libmedia/IMediaPlayerClient.cpp +++ b/media/libmedia/IMediaPlayerClient.cpp @@ -16,8 +16,8 @@ */ #include <utils/RefBase.h> -#include <utils/IInterface.h> -#include <utils/Parcel.h> +#include <binder/IInterface.h> +#include <binder/Parcel.h> #include <media/IMediaPlayerClient.h> @@ -46,16 +46,10 @@ public: } }; -IMPLEMENT_META_INTERFACE(MediaPlayerClient, "android.hardware.IMediaPlayerClient"); +IMPLEMENT_META_INTERFACE(MediaPlayerClient, "android.media.IMediaPlayerClient"); // ---------------------------------------------------------------------- -#define CHECK_INTERFACE(interface, data, reply) \ - do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ - LOGW("Call incorrectly routed to " #interface); \ - return PERMISSION_DENIED; \ - } } while (0) - status_t BnMediaPlayerClient::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { @@ -74,4 +68,3 @@ status_t BnMediaPlayerClient::onTransact( } }; // namespace android - diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index 01cdb6cf808f..cca3e9b104c2 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -17,11 +17,14 @@ #include <stdint.h> #include <sys/types.h> -#include <utils/Parcel.h> -#include <utils/IMemory.h> +#include <binder/Parcel.h> +#include <binder/IMemory.h> #include <media/IMediaPlayerService.h> #include <media/IMediaRecorder.h> +#include <media/IOMX.h> + +#include <utils/Errors.h> // for status_t namespace android { @@ -32,6 +35,8 @@ enum { DECODE_FD, CREATE_MEDIA_RECORDER, CREATE_METADATA_RETRIEVER, + GET_OMX, + SNOOP }; class BpMediaPlayerService: public BpInterface<IMediaPlayerService> @@ -109,18 +114,27 @@ public: *pFormat = reply.readInt32(); return interface_cast<IMemory>(reply.readStrongBinder()); } + + virtual sp<IMemory> snoop() + { + Parcel data, reply; + data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); + remote()->transact(SNOOP, data, &reply); + return interface_cast<IMemory>(reply.readStrongBinder()); + } + + virtual sp<IOMX> getOMX() { + Parcel data, reply; + data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); + remote()->transact(GET_OMX, data, &reply); + return interface_cast<IOMX>(reply.readStrongBinder()); + } }; -IMPLEMENT_META_INTERFACE(MediaPlayerService, "android.hardware.IMediaPlayerService"); +IMPLEMENT_META_INTERFACE(MediaPlayerService, "android.media.IMediaPlayerService"); // ---------------------------------------------------------------------- -#define CHECK_INTERFACE(interface, data, reply) \ - do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ - LOGW("Call incorrectly routed to " #interface); \ - return PERMISSION_DENIED; \ - } } while (0) - status_t BnMediaPlayerService::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { @@ -173,6 +187,12 @@ status_t BnMediaPlayerService::onTransact( reply->writeStrongBinder(player->asBinder()); return NO_ERROR; } break; + case SNOOP: { + CHECK_INTERFACE(IMediaPlayerService, data, reply); + sp<IMemory> snooped_audio = snoop(); + reply->writeStrongBinder(snooped_audio->asBinder()); + return NO_ERROR; + } break; case CREATE_MEDIA_RECORDER: { CHECK_INTERFACE(IMediaPlayerService, data, reply); pid_t pid = data.readInt32(); @@ -187,6 +207,12 @@ status_t BnMediaPlayerService::onTransact( reply->writeStrongBinder(retriever->asBinder()); return NO_ERROR; } break; + case GET_OMX: { + CHECK_INTERFACE(IMediaPlayerService, data, reply); + sp<IOMX> omx = getOMX(); + reply->writeStrongBinder(omx->asBinder()); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp index 84d08c40ac67..df7d301fa65a 100644 --- a/media/libmedia/IMediaRecorder.cpp +++ b/media/libmedia/IMediaRecorder.cpp @@ -18,7 +18,7 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "IMediaRecorder" #include <utils/Log.h> -#include <utils/Parcel.h> +#include <binder/Parcel.h> #include <ui/ISurface.h> #include <ui/ICamera.h> #include <media/IMediaPlayerClient.h> @@ -264,16 +264,10 @@ public: } }; -IMPLEMENT_META_INTERFACE(MediaRecorder, "android.hardware.IMediaRecorder"); +IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder"); // ---------------------------------------------------------------------- -#define CHECK_INTERFACE(interface, data, reply) \ - do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ - LOGW("Call incorrectly routed to " #interface); \ - return PERMISSION_DENIED; \ - } } while (0) - status_t BnMediaRecorder::onTransact( uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp new file mode 100644 index 000000000000..88a70649dd69 --- /dev/null +++ b/media/libmedia/IOMX.cpp @@ -0,0 +1,747 @@ +//#define LOG_NDEBUG 0 +#define LOG_TAG "IOMX" +#include <utils/Log.h> + +#include <binder/IMemory.h> +#include <binder/Parcel.h> +#include <media/IOMX.h> +#include <ui/ISurface.h> +#include <ui/Surface.h> + +namespace android { + +enum { + CONNECT = IBinder::FIRST_CALL_TRANSACTION, + LIST_NODES, + ALLOCATE_NODE, + FREE_NODE, + SEND_COMMAND, + GET_PARAMETER, + SET_PARAMETER, + GET_CONFIG, + SET_CONFIG, + USE_BUFFER, + ALLOC_BUFFER, + ALLOC_BUFFER_WITH_BACKUP, + FREE_BUFFER, + FILL_BUFFER, + EMPTY_BUFFER, + GET_EXTENSION_INDEX, + CREATE_RENDERER, + OBSERVER_ON_MSG, + RENDERER_RENDER, +}; + +sp<IOMXRenderer> IOMX::createRenderer( + const sp<Surface> &surface, + const char *componentName, + OMX_COLOR_FORMATTYPE colorFormat, + size_t encodedWidth, size_t encodedHeight, + size_t displayWidth, size_t displayHeight) { + return createRenderer( + surface->getISurface(), + componentName, colorFormat, encodedWidth, encodedHeight, + displayWidth, displayHeight); +} + +sp<IOMXRenderer> IOMX::createRendererFromJavaSurface( + JNIEnv *env, jobject javaSurface, + const char *componentName, + OMX_COLOR_FORMATTYPE colorFormat, + size_t encodedWidth, size_t encodedHeight, + size_t displayWidth, size_t displayHeight) { + jclass surfaceClass = env->FindClass("android/view/Surface"); + if (surfaceClass == NULL) { + LOGE("Can't find android/view/Surface"); + return NULL; + } + + jfieldID surfaceID = env->GetFieldID(surfaceClass, "mSurface", "I"); + if (surfaceID == NULL) { + LOGE("Can't find Surface.mSurface"); + return NULL; + } + + sp<Surface> surface = (Surface *)env->GetIntField(javaSurface, surfaceID); + + return createRenderer( + surface, componentName, colorFormat, encodedWidth, + encodedHeight, displayWidth, displayHeight); +} + +class BpOMX : public BpInterface<IOMX> { +public: + BpOMX(const sp<IBinder> &impl) + : BpInterface<IOMX>(impl) { + } + + virtual status_t listNodes(List<String8> *list) { + list->clear(); + + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + remote()->transact(LIST_NODES, data, &reply); + + int32_t n = reply.readInt32(); + for (int32_t i = 0; i < n; ++i) { + String8 s = reply.readString8(); + + list->push_back(s); + } + + return OK; + } + + virtual status_t allocateNode( + const char *name, const sp<IOMXObserver> &observer, node_id *node) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeCString(name); + data.writeStrongBinder(observer->asBinder()); + remote()->transact(ALLOCATE_NODE, data, &reply); + + status_t err = reply.readInt32(); + if (err == OK) { + *node = (void*)reply.readIntPtr(); + } else { + *node = 0; + } + + return err; + } + + virtual status_t freeNode(node_id node) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + remote()->transact(FREE_NODE, data, &reply); + + return reply.readInt32(); + } + + virtual status_t sendCommand( + node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(cmd); + data.writeInt32(param); + remote()->transact(SEND_COMMAND, data, &reply); + + return reply.readInt32(); + } + + virtual status_t getParameter( + node_id node, OMX_INDEXTYPE index, + void *params, size_t size) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(index); + data.writeInt32(size); + data.write(params, size); + remote()->transact(GET_PARAMETER, data, &reply); + + status_t err = reply.readInt32(); + if (err != OK) { + return err; + } + + reply.read(params, size); + + return OK; + } + + virtual status_t setParameter( + node_id node, OMX_INDEXTYPE index, + const void *params, size_t size) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(index); + data.writeInt32(size); + data.write(params, size); + remote()->transact(SET_PARAMETER, data, &reply); + + return reply.readInt32(); + } + + virtual status_t getConfig( + node_id node, OMX_INDEXTYPE index, + void *params, size_t size) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(index); + data.writeInt32(size); + data.write(params, size); + remote()->transact(GET_CONFIG, data, &reply); + + status_t err = reply.readInt32(); + if (err != OK) { + return err; + } + + reply.read(params, size); + + return OK; + } + + virtual status_t setConfig( + node_id node, OMX_INDEXTYPE index, + const void *params, size_t size) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(index); + data.writeInt32(size); + data.write(params, size); + remote()->transact(SET_CONFIG, data, &reply); + + return reply.readInt32(); + } + + virtual status_t useBuffer( + node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, + buffer_id *buffer) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.writeStrongBinder(params->asBinder()); + remote()->transact(USE_BUFFER, data, &reply); + + status_t err = reply.readInt32(); + if (err != OK) { + *buffer = 0; + + return err; + } + + *buffer = (void*)reply.readIntPtr(); + + return err; + } + + virtual status_t allocateBuffer( + node_id node, OMX_U32 port_index, size_t size, + buffer_id *buffer) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.writeInt32(size); + remote()->transact(ALLOC_BUFFER, data, &reply); + + status_t err = reply.readInt32(); + if (err != OK) { + *buffer = 0; + + return err; + } + + *buffer = (void*)reply.readIntPtr(); + + return err; + } + + virtual status_t allocateBufferWithBackup( + node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, + buffer_id *buffer) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.writeStrongBinder(params->asBinder()); + remote()->transact(ALLOC_BUFFER_WITH_BACKUP, data, &reply); + + status_t err = reply.readInt32(); + if (err != OK) { + *buffer = 0; + + return err; + } + + *buffer = (void*)reply.readIntPtr(); + + return err; + } + + virtual status_t freeBuffer( + node_id node, OMX_U32 port_index, buffer_id buffer) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.writeIntPtr((intptr_t)buffer); + remote()->transact(FREE_BUFFER, data, &reply); + + return reply.readInt32(); + } + + virtual status_t fillBuffer(node_id node, buffer_id buffer) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeIntPtr((intptr_t)buffer); + remote()->transact(FILL_BUFFER, data, &reply, IBinder::FLAG_ONEWAY); + + return reply.readInt32(); + } + + virtual status_t emptyBuffer( + node_id node, + buffer_id buffer, + OMX_U32 range_offset, OMX_U32 range_length, + OMX_U32 flags, OMX_TICKS timestamp) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeIntPtr((intptr_t)buffer); + data.writeInt32(range_offset); + data.writeInt32(range_length); + data.writeInt32(flags); + data.writeInt64(timestamp); + remote()->transact(EMPTY_BUFFER, data, &reply, IBinder::FLAG_ONEWAY); + + return reply.readInt32(); + } + + virtual status_t getExtensionIndex( + node_id node, + const char *parameter_name, + OMX_INDEXTYPE *index) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeCString(parameter_name); + + remote()->transact(GET_EXTENSION_INDEX, data, &reply); + + status_t err = reply.readInt32(); + if (err == OK) { + *index = static_cast<OMX_INDEXTYPE>(reply.readInt32()); + } else { + *index = OMX_IndexComponentStartUnused; + } + + return err; + } + + virtual sp<IOMXRenderer> createRenderer( + const sp<ISurface> &surface, + const char *componentName, + OMX_COLOR_FORMATTYPE colorFormat, + size_t encodedWidth, size_t encodedHeight, + size_t displayWidth, size_t displayHeight) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + + data.writeStrongBinder(surface->asBinder()); + data.writeCString(componentName); + data.writeInt32(colorFormat); + data.writeInt32(encodedWidth); + data.writeInt32(encodedHeight); + data.writeInt32(displayWidth); + data.writeInt32(displayHeight); + + remote()->transact(CREATE_RENDERER, data, &reply); + + return interface_cast<IOMXRenderer>(reply.readStrongBinder()); + } +}; + +IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX"); + +//////////////////////////////////////////////////////////////////////////////// + +#define CHECK_INTERFACE(interface, data, reply) \ + do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ + LOGW("Call incorrectly routed to " #interface); \ + return PERMISSION_DENIED; \ + } } while (0) + +status_t BnOMX::onTransact( + uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) { + switch (code) { + case LIST_NODES: + { + CHECK_INTERFACE(IOMX, data, reply); + + List<String8> list; + listNodes(&list); + + reply->writeInt32(list.size()); + for (List<String8>::iterator it = list.begin(); + it != list.end(); ++it) { + reply->writeString8(*it); + } + + return NO_ERROR; + } + + case ALLOCATE_NODE: + { + CHECK_INTERFACE(IOMX, data, reply); + + const char *name = data.readCString(); + + sp<IOMXObserver> observer = + interface_cast<IOMXObserver>(data.readStrongBinder()); + + node_id node; + + status_t err = allocateNode(name, observer, &node); + reply->writeInt32(err); + if (err == OK) { + reply->writeIntPtr((intptr_t)node); + } + + return NO_ERROR; + } + + case FREE_NODE: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + + reply->writeInt32(freeNode(node)); + + return NO_ERROR; + } + + case SEND_COMMAND: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + + OMX_COMMANDTYPE cmd = + static_cast<OMX_COMMANDTYPE>(data.readInt32()); + + OMX_S32 param = data.readInt32(); + reply->writeInt32(sendCommand(node, cmd, param)); + + return NO_ERROR; + } + + case GET_PARAMETER: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32()); + + size_t size = data.readInt32(); + + // XXX I am not happy with this but Parcel::readInplace didn't work. + void *params = malloc(size); + data.read(params, size); + + status_t err = getParameter(node, index, params, size); + + reply->writeInt32(err); + + if (err == OK) { + reply->write(params, size); + } + + free(params); + params = NULL; + + return NO_ERROR; + } + + case SET_PARAMETER: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32()); + + size_t size = data.readInt32(); + void *params = const_cast<void *>(data.readInplace(size)); + + reply->writeInt32(setParameter(node, index, params, size)); + + return NO_ERROR; + } + + case GET_CONFIG: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32()); + + size_t size = data.readInt32(); + + // XXX I am not happy with this but Parcel::readInplace didn't work. + void *params = malloc(size); + data.read(params, size); + + status_t err = getConfig(node, index, params, size); + + reply->writeInt32(err); + + if (err == OK) { + reply->write(params, size); + } + + free(params); + params = NULL; + + return NO_ERROR; + } + + case SET_CONFIG: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32()); + + size_t size = data.readInt32(); + void *params = const_cast<void *>(data.readInplace(size)); + + reply->writeInt32(setConfig(node, index, params, size)); + + return NO_ERROR; + } + + case USE_BUFFER: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_U32 port_index = data.readInt32(); + sp<IMemory> params = + interface_cast<IMemory>(data.readStrongBinder()); + + buffer_id buffer; + status_t err = useBuffer(node, port_index, params, &buffer); + reply->writeInt32(err); + + if (err == OK) { + reply->writeIntPtr((intptr_t)buffer); + } + + return NO_ERROR; + } + + case ALLOC_BUFFER: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_U32 port_index = data.readInt32(); + size_t size = data.readInt32(); + + buffer_id buffer; + status_t err = allocateBuffer(node, port_index, size, &buffer); + reply->writeInt32(err); + + if (err == OK) { + reply->writeIntPtr((intptr_t)buffer); + } + + return NO_ERROR; + } + + case ALLOC_BUFFER_WITH_BACKUP: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_U32 port_index = data.readInt32(); + sp<IMemory> params = + interface_cast<IMemory>(data.readStrongBinder()); + + buffer_id buffer; + status_t err = allocateBufferWithBackup( + node, port_index, params, &buffer); + + reply->writeInt32(err); + + if (err == OK) { + reply->writeIntPtr((intptr_t)buffer); + } + + return NO_ERROR; + } + + case FREE_BUFFER: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_U32 port_index = data.readInt32(); + buffer_id buffer = (void*)data.readIntPtr(); + reply->writeInt32(freeBuffer(node, port_index, buffer)); + + return NO_ERROR; + } + + case FILL_BUFFER: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + buffer_id buffer = (void*)data.readIntPtr(); + reply->writeInt32(fillBuffer(node, buffer)); + + return NO_ERROR; + } + + case EMPTY_BUFFER: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + buffer_id buffer = (void*)data.readIntPtr(); + OMX_U32 range_offset = data.readInt32(); + OMX_U32 range_length = data.readInt32(); + OMX_U32 flags = data.readInt32(); + OMX_TICKS timestamp = data.readInt64(); + + reply->writeInt32( + emptyBuffer( + node, buffer, range_offset, range_length, + flags, timestamp)); + + return NO_ERROR; + } + + case GET_EXTENSION_INDEX: + { + CHECK_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + const char *parameter_name = data.readCString(); + + OMX_INDEXTYPE index; + status_t err = getExtensionIndex(node, parameter_name, &index); + + reply->writeInt32(err); + + if (err == OK) { + reply->writeInt32(index); + } + + return OK; + } + + case CREATE_RENDERER: + { + CHECK_INTERFACE(IOMX, data, reply); + + sp<ISurface> isurface = + interface_cast<ISurface>(data.readStrongBinder()); + + const char *componentName = data.readCString(); + + OMX_COLOR_FORMATTYPE colorFormat = + static_cast<OMX_COLOR_FORMATTYPE>(data.readInt32()); + + size_t encodedWidth = (size_t)data.readInt32(); + size_t encodedHeight = (size_t)data.readInt32(); + size_t displayWidth = (size_t)data.readInt32(); + size_t displayHeight = (size_t)data.readInt32(); + + sp<IOMXRenderer> renderer = + createRenderer(isurface, componentName, colorFormat, + encodedWidth, encodedHeight, + displayWidth, displayHeight); + + reply->writeStrongBinder(renderer->asBinder()); + + return OK; + } + + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +//////////////////////////////////////////////////////////////////////////////// + +class BpOMXObserver : public BpInterface<IOMXObserver> { +public: + BpOMXObserver(const sp<IBinder> &impl) + : BpInterface<IOMXObserver>(impl) { + } + + virtual void onMessage(const omx_message &msg) { + Parcel data, reply; + data.writeInterfaceToken(IOMXObserver::getInterfaceDescriptor()); + data.write(&msg, sizeof(msg)); + + remote()->transact(OBSERVER_ON_MSG, data, &reply, IBinder::FLAG_ONEWAY); + } +}; + +IMPLEMENT_META_INTERFACE(OMXObserver, "android.hardware.IOMXObserver"); + +status_t BnOMXObserver::onTransact( + uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) { + switch (code) { + case OBSERVER_ON_MSG: + { + CHECK_INTERFACE(IOMXObserver, data, reply); + + omx_message msg; + data.read(&msg, sizeof(msg)); + + // XXX Could use readInplace maybe? + onMessage(msg); + + return NO_ERROR; + } + + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +//////////////////////////////////////////////////////////////////////////////// + +class BpOMXRenderer : public BpInterface<IOMXRenderer> { +public: + BpOMXRenderer(const sp<IBinder> &impl) + : BpInterface<IOMXRenderer>(impl) { + } + + virtual void render(IOMX::buffer_id buffer) { + Parcel data, reply; + data.writeInterfaceToken(IOMXRenderer::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)buffer); + + // NOTE: Do NOT make this a ONE_WAY call, it must be synchronous + // so that the caller knows when to recycle the buffer. + remote()->transact(RENDERER_RENDER, data, &reply); + } +}; + +IMPLEMENT_META_INTERFACE(OMXRenderer, "android.hardware.IOMXRenderer"); + +status_t BnOMXRenderer::onTransact( + uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) { + switch (code) { + case RENDERER_RENDER: + { + CHECK_INTERFACE(IOMXRenderer, data, reply); + + IOMX::buffer_id buffer = (void*)data.readIntPtr(); + + render(buffer); + + return NO_ERROR; + } + + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +} // namespace android diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp index 586aacb61979..ee9e1d874d8f 100644 --- a/media/libmedia/JetPlayer.cpp +++ b/media/libmedia/JetPlayer.cpp @@ -99,7 +99,7 @@ int JetPlayer::init() mAudioTrack->set(AudioSystem::MUSIC, //TODO parametrize this pLibConfig->sampleRate, 1, // format = PCM 16bits per sample, - pLibConfig->numChannels, + (pLibConfig->numChannels == 2) ? AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO, mTrackBufferSize, 0); diff --git a/media/libmedia/Metadata.cpp b/media/libmedia/Metadata.cpp new file mode 100644 index 000000000000..35ec6b34872f --- /dev/null +++ b/media/libmedia/Metadata.cpp @@ -0,0 +1,168 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "Metadata" +#include <utils/Log.h> + +#include <sys/types.h> +#include <media/Metadata.h> +#include <binder/Parcel.h> +#include <utils/Errors.h> +#include <utils/RefBase.h> + +// This file contains code to serialize Metadata triples (key, type, +// value) into a parcel. The Parcel is destinated to be decoded by the +// Metadata.java class. + +namespace { +// All these constants below must be kept in sync with Metadata.java. +enum MetadataId { + FIRST_SYSTEM_ID = 1, + LAST_SYSTEM_ID = 31, + FIRST_CUSTOM_ID = 8192 +}; + +// Types +enum Types { + STRING_VAL = 1, + INTEGER_VAL, + BOOLEAN_VAL, + LONG_VAL, + DOUBLE_VAL, + TIMED_TEXT_VAL, + DATE_VAL, + BYTE_ARRAY_VAL, +}; + +const size_t kRecordHeaderSize = 3 * sizeof(int32_t); +const int32_t kMetaMarker = 0x4d455441; // 'M' 'E' 'T' 'A' + +} // anonymous namespace + +namespace android { +namespace media { + +Metadata::Metadata(Parcel *p) + :mData(p), + mBegin(p->dataPosition()) { } + +Metadata::~Metadata() { } + +void Metadata::resetParcel() +{ + mData->setDataPosition(mBegin); +} + +// Update the 4 bytes int at the beginning of the parcel which holds +// the number of bytes written so far. +void Metadata::updateLength() +{ + const size_t end = mData->dataPosition(); + + mData->setDataPosition(mBegin); + mData->writeInt32(end - mBegin); + mData->setDataPosition(end); +} + +// Write the header. The java layer will look for the marker. +bool Metadata::appendHeader() +{ + bool ok = true; + + // Placeholder for the length of the metadata + ok = ok && mData->writeInt32(-1) == OK; + ok = ok && mData->writeInt32(kMetaMarker) == OK; + return ok; +} + +bool Metadata::appendBool(int key, bool val) +{ + if (!checkKey(key)) { + return false; + } + + const size_t begin = mData->dataPosition(); + bool ok = true; + + // 4 int32s: size, key, type, value. + ok = ok && mData->writeInt32(4 * sizeof(int32_t)) == OK; + ok = ok && mData->writeInt32(key) == OK; + ok = ok && mData->writeInt32(BOOLEAN_VAL) == OK; + ok = ok && mData->writeInt32(val ? 1 : 0) == OK; + if (!ok) { + mData->setDataPosition(begin); + } + return ok; +} + +bool Metadata::appendInt32(int key, int32_t val) +{ + if (!checkKey(key)) { + return false; + } + + const size_t begin = mData->dataPosition(); + bool ok = true; + + // 4 int32s: size, key, type, value. + ok = ok && mData->writeInt32(4 * sizeof(int32_t)) == OK; + ok = ok && mData->writeInt32(key) == OK; + ok = ok && mData->writeInt32(INTEGER_VAL) == OK; + ok = ok && mData->writeInt32(val) == OK; + if (!ok) { + mData->setDataPosition(begin); + } + return ok; +} + +// Check the key (i.e metadata id) is valid if it is a system one. +// Loop over all the exiting ones in the Parcel to check for duplicate +// (not allowed). +bool Metadata::checkKey(int key) +{ + if (key < FIRST_SYSTEM_ID || + (LAST_SYSTEM_ID < key && key < FIRST_CUSTOM_ID)) { + LOGE("Bad key %d", key); + return false; + } + size_t curr = mData->dataPosition(); + // Loop over the keys to check if it has been used already. + mData->setDataPosition(mBegin); + + bool error = false; + size_t left = curr - mBegin; + while (left > 0) { + size_t pos = mData->dataPosition(); + size_t size = mData->readInt32(); + if (size < kRecordHeaderSize || size > left) { + error = true; + break; + } + if (mData->readInt32() == key) { + LOGE("Key exists already %d", key); + error = true; + break; + } + mData->setDataPosition(pos + size); + left -= size; + } + mData->setDataPosition(curr); + return !error; +} + +} // namespace android::media +} // namespace android diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index 5435da74825d..60e3d71171a6 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -791,7 +791,6 @@ const unsigned char ToneGenerator::sToneMappingTable[NUM_REGIONS-1][NUM_SUP_TONE // generators, instantiates output audio track. // // Input: -// toneType: Type of tone generated (values in enum tone_type) // streamType: Type of stream used for tone playback (enum AudioTrack::stream_type) // volume: volume applied to tone (0.0 to 1.0) // @@ -799,7 +798,7 @@ const unsigned char ToneGenerator::sToneMappingTable[NUM_REGIONS-1][NUM_SUP_TONE // none // //////////////////////////////////////////////////////////////////////////////// -ToneGenerator::ToneGenerator(int streamType, float volume) { +ToneGenerator::ToneGenerator(int streamType, float volume, bool threadCanCallJava) { LOGV("ToneGenerator constructor: streamType=%d, volume=%f\n", streamType, volume); @@ -809,6 +808,7 @@ ToneGenerator::ToneGenerator(int streamType, float volume) { LOGE("Unable to marshal AudioFlinger"); return; } + mThreadCanCallJava = threadCanCallJava; mStreamType = streamType; mVolume = volume; mpAudioTrack = 0; @@ -869,14 +869,18 @@ ToneGenerator::~ToneGenerator() { // Description: Starts tone playback. // // Input: -// none +// toneType: Type of tone generated (values in enum tone_type) +// durationMs: The tone duration in milliseconds. If the tone is limited in time by definition, +// the actual duration will be the minimum of durationMs and the defined tone duration. +// Ommiting or setting durationMs to -1 does not limit tone duration. // // Output: // none // //////////////////////////////////////////////////////////////////////////////// -bool ToneGenerator::startTone(int toneType) { +bool ToneGenerator::startTone(int toneType, int durationMs) { bool lResult = false; + status_t lStatus; if ((toneType < 0) || (toneType >= NUM_TONES)) return lResult; @@ -896,6 +900,19 @@ bool ToneGenerator::startTone(int toneType) { toneType = getToneForRegion(toneType); mpNewToneDesc = &sToneDescriptors[toneType]; + mDurationMs = durationMs; + + if (mState == TONE_STOPPED) { + LOGV("Start waiting for previous tone to stop"); + lStatus = mWaitCbkCond.waitRelative(mLock, seconds(3)); + if (lStatus != NO_ERROR) { + LOGE("--- start wait for stop timed out, status %d", lStatus); + mState = TONE_IDLE; + mLock.unlock(); + return lResult; + } + } + if (mState == TONE_INIT) { if (prepareWave()) { LOGV("Immediate start, time %d\n", (unsigned int)(systemTime()/1000000)); @@ -906,7 +923,7 @@ bool ToneGenerator::startTone(int toneType) { mLock.lock(); if (mState == TONE_STARTING) { LOGV("Wait for start callback"); - status_t lStatus = mWaitCbkCond.waitRelative(mLock, seconds(1)); + lStatus = mWaitCbkCond.waitRelative(mLock, seconds(3)); if (lStatus != NO_ERROR) { LOGE("--- Immediate start timed out, status %d", lStatus); mState = TONE_IDLE; @@ -914,13 +931,12 @@ bool ToneGenerator::startTone(int toneType) { } } } else { - mState == TONE_IDLE; + mState = TONE_IDLE; } } else { LOGV("Delayed start\n"); - mState = TONE_RESTARTING; - status_t lStatus = mWaitCbkCond.waitRelative(mLock, seconds(1)); + lStatus = mWaitCbkCond.waitRelative(mLock, seconds(3)); if (lStatus == NO_ERROR) { if (mState != TONE_IDLE) { lResult = true; @@ -959,7 +975,7 @@ void ToneGenerator::stopTone() { if (mState == TONE_PLAYING || mState == TONE_STARTING || mState == TONE_RESTARTING) { mState = TONE_STOPPING; LOGV("waiting cond"); - status_t lStatus = mWaitCbkCond.waitRelative(mLock, seconds(1)); + status_t lStatus = mWaitCbkCond.waitRelative(mLock, seconds(3)); if (lStatus == NO_ERROR) { LOGV("track stop complete, time %d", (unsigned int)(systemTime()/1000000)); } else { @@ -1000,15 +1016,25 @@ bool ToneGenerator::initAudioTrack() { } // Open audio track in mono, PCM 16bit, default sampling rate, default buffer size - mpAudioTrack - = new AudioTrack(mStreamType, 0, AudioSystem::PCM_16_BIT, 1, 0, 0, audioCallback, this, 0); - + mpAudioTrack = new AudioTrack(); if (mpAudioTrack == 0) { LOGE("AudioTrack allocation failed"); goto initAudioTrack_exit; } LOGV("Create Track: %p\n", mpAudioTrack); + mpAudioTrack->set(mStreamType, + 0, + AudioSystem::PCM_16_BIT, + AudioSystem::CHANNEL_OUT_MONO, + 0, + 0, + audioCallback, + this, + 0, + 0, + mThreadCanCallJava); + if (mpAudioTrack->initCheck() != NO_ERROR) { LOGE("AudioTrack->initCheck failed"); goto initAudioTrack_exit; @@ -1053,7 +1079,7 @@ void ToneGenerator::audioCallback(int event, void* user, void *info) { if (event != AudioTrack::EVENT_MORE_DATA) return; - const AudioTrack::Buffer *buffer = static_cast<const AudioTrack::Buffer *>(info); + AudioTrack::Buffer *buffer = static_cast<AudioTrack::Buffer *>(info); ToneGenerator *lpToneGen = static_cast<ToneGenerator *>(user); short *lpOut = buffer->i16; unsigned int lNumSmp = buffer->size/sizeof(short); @@ -1073,6 +1099,7 @@ void ToneGenerator::audioCallback(int event, void* user, void *info) { lpToneGen->mLock.lock(); + // Update pcm frame count and end time (current time at the end of this process) lpToneGen->mTotalSmp += lReqSmp; @@ -1093,20 +1120,26 @@ void ToneGenerator::audioCallback(int event, void* user, void *info) { lWaveCmd = WaveGenerator::WAVEGEN_STOP; lpToneGen->mNextSegSmp = TONEGEN_INF; // forced to skip state machine management below break; + case TONE_STOPPED: + LOGV("Stopped Cbk"); + goto audioCallback_EndLoop; default: LOGV("Extra Cbk"); - // Force loop exit - lNumSmp = 0; goto audioCallback_EndLoop; } - // Exit if tone sequence is over - if (lpToneDesc->segments[lpToneGen->mCurSegment].duration == 0) { + if (lpToneDesc->segments[lpToneGen->mCurSegment].duration == 0 || + lpToneGen->mTotalSmp > lpToneGen->mMaxSmp) { if (lpToneGen->mState == TONE_PLAYING) { lpToneGen->mState = TONE_STOPPING; } - goto audioCallback_EndLoop; + if (lpToneDesc->segments[lpToneGen->mCurSegment].duration == 0) { + goto audioCallback_EndLoop; + } + // fade out before stopping if maximum duration reached + lWaveCmd = WaveGenerator::WAVEGEN_STOP; + lpToneGen->mNextSegSmp = TONEGEN_INF; // forced to skip state machine management below } if (lpToneGen->mTotalSmp > lpToneGen->mNextSegSmp) { @@ -1237,20 +1270,31 @@ audioCallback_EndLoop: lSignal = true; break; case TONE_STOPPING: + LOGV("Cbk Stopping\n"); + lpToneGen->mState = TONE_STOPPED; + // Force loop exit + lNumSmp = 0; + break; + case TONE_STOPPED: lpToneGen->mState = TONE_INIT; - LOGV("Cbk Stopping track\n"); - lSignal = true; + LOGV("Cbk Stopped track\n"); lpToneGen->mpAudioTrack->stop(); - // Force loop exit lNumSmp = 0; + buffer->size = 0; + lSignal = true; break; case TONE_STARTING: LOGV("Cbk starting track\n"); lpToneGen->mState = TONE_PLAYING; lSignal = true; break; + case TONE_PLAYING: + break; default: + // Force loop exit + lNumSmp = 0; + buffer->size = 0; break; } @@ -1286,6 +1330,17 @@ bool ToneGenerator::prepareWave() { mpToneDesc = mpNewToneDesc; + if (mDurationMs == -1) { + mMaxSmp = TONEGEN_INF; + } else { + if (mDurationMs > (int)(TONEGEN_INF / mSamplingRate)) { + mMaxSmp = (mDurationMs / 1000) * mSamplingRate; + } else { + mMaxSmp = (mDurationMs * mSamplingRate) / 1000; + } + LOGV("prepareWave, duration limited to %d ms", mDurationMs); + } + while (mpToneDesc->segments[segmentIdx].duration) { // Get total number of sine waves: needed to adapt sine wave gain. unsigned int lNumWaves = numWaves(segmentIdx); diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp index 09afc6c8798c..d34a8ed95b0c 100644 --- a/media/libmedia/mediametadataretriever.cpp +++ b/media/libmedia/mediametadataretriever.cpp @@ -18,8 +18,8 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "MediaMetadataRetriever" -#include <utils/IServiceManager.h> -#include <utils/IPCThreadState.h> +#include <binder/IServiceManager.h> +#include <binder/IPCThreadState.h> #include <media/mediametadataretriever.h> #include <media/IMediaPlayerService.h> #include <utils/Log.h> diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index 24e3e6f23432..040366be6814 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -24,13 +24,13 @@ #include <unistd.h> #include <fcntl.h> -#include <utils/IServiceManager.h> -#include <utils/IPCThreadState.h> +#include <binder/IServiceManager.h> +#include <binder/IPCThreadState.h> #include <media/mediaplayer.h> #include <media/AudioTrack.h> -#include <utils/MemoryBase.h> +#include <binder/MemoryBase.h> namespace android { @@ -196,12 +196,47 @@ status_t MediaPlayer::setDataSource(int fd, int64_t offset, int64_t length) return err; } +status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply) +{ + Mutex::Autolock _l(mLock); + if ((mPlayer != NULL) && ( mCurrentState & MEDIA_PLAYER_INITIALIZED )) + { + LOGV("invoke %d", request.dataSize()); + return mPlayer->invoke(request, reply); + } + LOGE("invoke failed: wrong state %X", mCurrentState); + return INVALID_OPERATION; +} + +status_t MediaPlayer::setMetadataFilter(const Parcel& filter) +{ + LOGD("setMetadataFilter"); + Mutex::Autolock lock(mLock); + if (mPlayer == NULL) { + return NO_INIT; + } + return mPlayer->setMetadataFilter(filter); +} + +status_t MediaPlayer::getMetadata(bool update_only, bool apply_filter, Parcel *metadata) +{ + LOGD("getMetadata"); + Mutex::Autolock lock(mLock); + if (mPlayer == NULL) { + return NO_INIT; + } + return mPlayer->getMetadata(update_only, apply_filter, metadata); +} + status_t MediaPlayer::setVideoSurface(const sp<Surface>& surface) { LOGV("setVideoSurface"); Mutex::Autolock _l(mLock); if (mPlayer == 0) return NO_INIT; - return mPlayer->setVideoSurface(surface->getISurface()); + if (surface != NULL) + return mPlayer->setVideoSurface(surface->getISurface()); + else + return mPlayer->setVideoSurface(NULL); } // must call with lock held @@ -655,4 +690,61 @@ MediaPlayer::DeathNotifier::~DeathNotifier() } +extern "C" { +#define FLOATING_POINT 1 +#include "fftwrap.h" +} + +static void *ffttable = NULL; + +// peeks at the audio data and fills 'data' with the requested kind +// (currently kind=0 returns mono 16 bit PCM data, and kind=1 returns +// 256 point FFT data). Return value is number of samples returned, +// which may be 0. +/*static*/ int MediaPlayer::snoop(short* data, int len, int kind) { + + sp<IMemory> p; + const sp<IMediaPlayerService>& service = getMediaPlayerService(); + if (service != 0) { + // Take a peek at the waveform. The returned data consists of 16 bit mono PCM data. + p = service->snoop(); + + if (p == NULL) { + return 0; + } + + if (kind == 0) { // return waveform data + int plen = p->size(); + len *= 2; // number of shorts -> number of bytes + short *src = (short*) p->pointer(); + if (plen > len) { + plen = len; + } + memcpy(data, src, plen); + return plen / sizeof(short); // return number of samples + } else if (kind == 1) { + // TODO: use a more efficient FFT + // Right now this uses the speex library, which is compiled to do a float FFT + if (!ffttable) ffttable = spx_fft_init(512); + short *usrc = (short*) p->pointer(); + float fsrc[512]; + for (int i=0;i<512;i++) + fsrc[i] = usrc[i]; + float fdst[512]; + spx_fft_float(ffttable, fsrc, fdst); + if (len > 512) { + len = 512; + } + len /= 2; // only half the output data is valid + for (int i=0; i < len; i++) + data[i] = fdst[i]; + return len; + } + + } else { + LOGE("Unable to locate media service"); + } + return 0; +} + }; // namespace android diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index 5093f0e81ef5..6b6393164fe2 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -20,7 +20,7 @@ #include <utils/Log.h> #include <ui/Surface.h> #include <media/mediarecorder.h> -#include <utils/IServiceManager.h> +#include <binder/IServiceManager.h> #include <utils/String8.h> #include <media/IMediaPlayerService.h> #include <media/IMediaRecorder.h> diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index 99f222bcd2c3..fb569dab0590 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -7,38 +7,50 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - MediaRecorderClient.cpp \ - MediaPlayerService.cpp \ + MediaRecorderClient.cpp \ + MediaPlayerService.cpp \ MetadataRetrieverClient.cpp \ - VorbisPlayer.cpp \ + TestPlayerStub.cpp \ + VorbisPlayer.cpp \ + VorbisMetadataRetriever.cpp \ + MidiMetadataRetriever.cpp \ MidiFile.cpp +ifeq ($(BUILD_WITH_FULL_STAGEFRIGHT),true) + +LOCAL_SRC_FILES += \ + StagefrightPlayer.cpp + +LOCAL_CFLAGS += -DBUILD_WITH_FULL_STAGEFRIGHT=1 + +endif + ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true) LOCAL_LDLIBS += -ldl -lpthread endif -LOCAL_SHARED_LIBRARIES := \ - libcutils \ - libutils \ - libvorbisidec \ - libsonivox \ - libopencore_player \ - libopencore_author \ - libmedia \ - libandroid_runtime - -ifneq ($(BUILD_WITHOUT_PV),true) -LOCAL_SHARED_LIBRARIES += \ - libopencore_player \ - libopencore_author +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libutils \ + libbinder \ + libvorbisidec \ + libsonivox \ + libopencore_player \ + libopencore_author \ + libmedia \ + libandroid_runtime \ + libstagefright \ + libstagefright_omx + +ifneq ($(TARGET_SIMULATOR),true) +LOCAL_SHARED_LIBRARIES += libdl endif -LOCAL_C_INCLUDES := external/tremor/Tremor \ - $(call include-path-for, graphics corecg) - -ifeq ($(BUILD_WITHOUT_PV),true) -LOCAL_CFLAGS := -DNO_OPENCORE -endif +LOCAL_C_INCLUDES := external/tremor/Tremor \ + $(JNI_H_INCLUDE) \ + $(call include-path-for, graphics corecg) \ + $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include \ + $(TOP)/frameworks/base/media/libstagefright/include LOCAL_MODULE:= libmediaplayerservice diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 8ef0dc6c42c4..0a6c365df5ad 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -27,18 +27,28 @@ #include <unistd.h> #include <string.h> + #include <cutils/atomic.h> +#include <cutils/properties.h> // for property_get + +#include <utils/misc.h> #include <android_runtime/ActivityManager.h> -#include <utils/IPCThreadState.h> -#include <utils/IServiceManager.h> -#include <utils/MemoryHeapBase.h> -#include <utils/MemoryBase.h> + +#include <binder/IPCThreadState.h> +#include <binder/IServiceManager.h> +#include <binder/MemoryHeapBase.h> +#include <binder/MemoryBase.h> +#include <utils/Errors.h> // for status_t +#include <utils/String8.h> +#include <utils/SystemClock.h> +#include <utils/Vector.h> #include <cutils/properties.h> #include <media/MediaPlayerInterface.h> #include <media/mediarecorder.h> #include <media/MediaMetadataRetrieverInterface.h> +#include <media/Metadata.h> #include <media/AudioTrack.h> #include "MediaRecorderClient.h" @@ -48,6 +58,10 @@ #include "MidiFile.h" #include "VorbisPlayer.h" #include <media/PVPlayer.h> +#include "TestPlayerStub.h" +#include "StagefrightPlayer.h" + +#include <OMX.h> /* desktop Linux needs a little help with gettid() */ #if defined(HAVE_GETTID) && !defined(HAVE_ANDROID_OS) @@ -61,6 +75,111 @@ pid_t gettid() { return syscall(__NR_gettid);} #undef __KERNEL__ #endif +namespace { +using android::media::Metadata; +using android::status_t; +using android::OK; +using android::BAD_VALUE; +using android::NOT_ENOUGH_DATA; +using android::Parcel; + +// Max number of entries in the filter. +const int kMaxFilterSize = 64; // I pulled that out of thin air. + +// FIXME: Move all the metadata related function in the Metadata.cpp + + +// Unmarshall a filter from a Parcel. +// Filter format in a parcel: +// +// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | number of entries (n) | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | metadata type 1 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | metadata type 2 | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// .... +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// | metadata type n | +// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +// +// @param p Parcel that should start with a filter. +// @param[out] filter On exit contains the list of metadata type to be +// filtered. +// @param[out] status On exit contains the status code to be returned. +// @return true if the parcel starts with a valid filter. +bool unmarshallFilter(const Parcel& p, + Metadata::Filter *filter, + status_t *status) +{ + int32_t val; + if (p.readInt32(&val) != OK) + { + LOGE("Failed to read filter's length"); + *status = NOT_ENOUGH_DATA; + return false; + } + + if( val > kMaxFilterSize || val < 0) + { + LOGE("Invalid filter len %d", val); + *status = BAD_VALUE; + return false; + } + + const size_t num = val; + + filter->clear(); + filter->setCapacity(num); + + size_t size = num * sizeof(Metadata::Type); + + + if (p.dataAvail() < size) + { + LOGE("Filter too short expected %d but got %d", size, p.dataAvail()); + *status = NOT_ENOUGH_DATA; + return false; + } + + const Metadata::Type *data = + static_cast<const Metadata::Type*>(p.readInplace(size)); + + if (NULL == data) + { + LOGE("Filter had no data"); + *status = BAD_VALUE; + return false; + } + + // TODO: The stl impl of vector would be more efficient here + // because it degenerates into a memcpy on pod types. Try to + // replace later or use stl::set. + for (size_t i = 0; i < num; ++i) + { + filter->add(*data); + ++data; + } + *status = OK; + return true; +} + +// @param filter Of metadata type. +// @param val To be searched. +// @return true if a match was found. +bool findMetadata(const Metadata::Filter& filter, const int32_t val) +{ + // Deal with empty and ANY right away + if (filter.isEmpty()) return false; + if (filter[0] == Metadata::kAny) return true; + + return filter.indexOf(val) >= 0; +} + +} // anonymous namespace + namespace android { @@ -83,7 +202,7 @@ extmap FILE_EXTS [] = { }; // TODO: Find real cause of Audio/Video delay in PV framework and remove this workaround -/* static */ const uint32_t MediaPlayerService::AudioOutput::kAudioVideoDelayMs = 96; +/* static */ const uint32_t MediaPlayerService::AudioOutput::kAudioVideoDelayMs = 0; /* static */ int MediaPlayerService::AudioOutput::mMinBufferCount = 4; /* static */ bool MediaPlayerService::AudioOutput::mIsOnEmulator = false; @@ -106,7 +225,10 @@ MediaPlayerService::~MediaPlayerService() sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(pid_t pid) { #ifndef NO_OPENCORE - sp<MediaRecorderClient> recorder = new MediaRecorderClient(pid); + sp<MediaRecorderClient> recorder = new MediaRecorderClient(this, pid); + wp<MediaRecorderClient> w = recorder; + Mutex::Autolock lock(mLock); + mMediaRecorderClients.add(w); #else sp<MediaRecorderClient> recorder = NULL; #endif @@ -114,6 +236,13 @@ sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(pid_t pid) return recorder; } +void MediaPlayerService::removeMediaRecorderClient(wp<MediaRecorderClient> client) +{ + Mutex::Autolock lock(mLock); + mMediaRecorderClients.remove(client); + LOGV("Delete media recorder client"); +} + sp<IMediaMetadataRetriever> MediaPlayerService::createMetadataRetriever(pid_t pid) { sp<MetadataRetrieverClient> retriever = new MetadataRetrieverClient(pid); @@ -155,6 +284,16 @@ sp<IMediaPlayer> MediaPlayerService::create(pid_t pid, const sp<IMediaPlayerClie return c; } +sp<IOMX> MediaPlayerService::getOMX() { + Mutex::Autolock autoLock(mLock); + + if (mOMX.get() == NULL) { + mOMX = new OMX; + } + + return mOMX; +} + status_t MediaPlayerService::AudioCache::dump(int fd, const Vector<String16>& args) const { const size_t SIZE = 256; @@ -337,6 +476,13 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args) sp<Client> c = mClients[i].promote(); if (c != 0) c->dump(fd, args); } + for (int i = 0, n = mMediaRecorderClients.size(); i < n; ++i) { + result.append(" MediaRecorderClient\n"); + sp<MediaRecorderClient> c = mMediaRecorderClients[i].promote(); + snprintf(buffer, 255, " pid(%d)\n\n", c->mPid); + result.append(buffer); + } + result.append(" Files opened and/or mapped:\n"); snprintf(buffer, SIZE, "/proc/%d/maps", myTid()); FILE *f = fopen(buffer, "r"); @@ -461,6 +607,7 @@ void MediaPlayerService::Client::disconnect() p = mPlayer; } mClient.clear(); + mPlayer.clear(); // clear the notification to prevent callbacks to dead client @@ -478,7 +625,19 @@ void MediaPlayerService::Client::disconnect() IPCThreadState::self()->flushCommands(); } -static player_type getPlayerType(int fd, int64_t offset, int64_t length) +static player_type getDefaultPlayerType() { +#if BUILD_WITH_FULL_STAGEFRIGHT + char value[PROPERTY_VALUE_MAX]; + if (property_get("media.stagefright.enable-player", value, NULL) + && (!strcmp(value, "1") || !strcasecmp(value, "true"))) { + return STAGEFRIGHT_PLAYER; + } +#endif + + return PV_PLAYER; +} + +player_type getPlayerType(int fd, int64_t offset, int64_t length) { char buf[20]; lseek(fd, offset, SEEK_SET); @@ -508,12 +667,14 @@ static player_type getPlayerType(int fd, int64_t offset, int64_t length) EAS_Shutdown(easdata); } - // Fall through to PV - return PV_PLAYER; + return getDefaultPlayerType(); } -static player_type getPlayerType(const char* url) +player_type getPlayerType(const char* url) { + if (TestPlayerStub::canBeUsed(url)) { + return TEST_PLAYER; + } // use MidiFile for MIDI extensions int lenURL = strlen(url); @@ -527,8 +688,7 @@ static player_type getPlayerType(const char* url) } } - // Fall through to PV - return PV_PLAYER; + return getDefaultPlayerType(); } static sp<MediaPlayerBase> createPlayer(player_type playerType, void* cookie, @@ -550,6 +710,16 @@ static sp<MediaPlayerBase> createPlayer(player_type playerType, void* cookie, LOGV(" create VorbisPlayer"); p = new VorbisPlayer(); break; +#if BUILD_WITH_FULL_STAGEFRIGHT + case STAGEFRIGHT_PLAYER: + LOGV(" create StagefrightPlayer"); + p = new StagefrightPlayer; + break; +#endif + case TEST_PLAYER: + LOGV("Create Test Player stub"); + p = new TestPlayerStub(); + break; } if (p != NULL) { if (p->initCheck() == NO_ERROR) { @@ -614,7 +784,11 @@ status_t MediaPlayerService::Client::setDataSource(const char *url) // now set data source LOGV(" setDataSource"); mStatus = p->setDataSource(url); - if (mStatus == NO_ERROR) mPlayer = p; + if (mStatus == NO_ERROR) { + mPlayer = p; + } else { + LOGE(" error: %d", mStatus); + } return mStatus; } } @@ -671,6 +845,73 @@ status_t MediaPlayerService::Client::setVideoSurface(const sp<ISurface>& surface return p->setVideoSurface(surface); } +status_t MediaPlayerService::Client::invoke(const Parcel& request, + Parcel *reply) +{ + sp<MediaPlayerBase> p = getPlayer(); + if (p == NULL) return UNKNOWN_ERROR; + return p->invoke(request, reply); +} + +// This call doesn't need to access the native player. +status_t MediaPlayerService::Client::setMetadataFilter(const Parcel& filter) +{ + status_t status; + media::Metadata::Filter allow, drop; + + if (unmarshallFilter(filter, &allow, &status) && + unmarshallFilter(filter, &drop, &status)) { + Mutex::Autolock lock(mLock); + + mMetadataAllow = allow; + mMetadataDrop = drop; + } + return status; +} + +status_t MediaPlayerService::Client::getMetadata( + bool update_only, bool apply_filter, Parcel *reply) +{ + sp<MediaPlayerBase> player = getPlayer(); + if (player == 0) return UNKNOWN_ERROR; + + status_t status; + // Placeholder for the return code, updated by the caller. + reply->writeInt32(-1); + + media::Metadata::Filter ids; + + // We don't block notifications while we fetch the data. We clear + // mMetadataUpdated first so we don't lose notifications happening + // during the rest of this call. + { + Mutex::Autolock lock(mLock); + if (update_only) { + ids = mMetadataUpdated; + } + mMetadataUpdated.clear(); + } + + media::Metadata metadata(reply); + + metadata.appendHeader(); + status = player->getMetadata(ids, reply); + + if (status != OK) { + metadata.resetParcel(); + LOGE("getMetadata failed %d", status); + return status; + } + + // FIXME: Implement filtering on the result. Not critical since + // filtering takes place on the update notifications already. This + // would be when all the metadata are fetch and a filter is set. + + // Everything is fine, update the metadata length. + metadata.updateLength(); + return OK; +} + status_t MediaPlayerService::Client::prepareAsync() { LOGV("[%d] prepareAsync", mConnId); @@ -790,13 +1031,51 @@ status_t MediaPlayerService::Client::setVolume(float leftVolume, float rightVolu return NO_ERROR; } + void MediaPlayerService::Client::notify(void* cookie, int msg, int ext1, int ext2) { Client* client = static_cast<Client*>(cookie); + + if (MEDIA_INFO == msg && + MEDIA_INFO_METADATA_UPDATE == ext1) { + const media::Metadata::Type metadata_type = ext2; + + if(client->shouldDropMetadata(metadata_type)) { + return; + } + + // Update the list of metadata that have changed. getMetadata + // also access mMetadataUpdated and clears it. + client->addNewMetadataUpdate(metadata_type); + } LOGV("[%d] notify (%p, %d, %d, %d)", client->mConnId, cookie, msg, ext1, ext2); client->mClient->notify(msg, ext1, ext2); } + +bool MediaPlayerService::Client::shouldDropMetadata(media::Metadata::Type code) const +{ + Mutex::Autolock lock(mLock); + + if (findMetadata(mMetadataDrop, code)) { + return true; + } + + if (mMetadataAllow.isEmpty() || findMetadata(mMetadataAllow, code)) { + return false; + } else { + return true; + } +} + + +void MediaPlayerService::Client::addNewMetadataUpdate(media::Metadata::Type metadata_type) { + Mutex::Autolock lock(mLock); + if (mMetadataUpdated.indexOf(metadata_type) < 0) { + mMetadataUpdated.add(metadata_type); + } +} + #if CALLBACK_ANTAGONIZER const int Antagonizer::interval = 10000; // 10 msecs @@ -930,16 +1209,129 @@ Exit: return mem; } +/* + * Avert your eyes, ugly hack ahead. + * The following is to support music visualizations. + */ + +static const int NUMVIZBUF = 32; +static const int VIZBUFFRAMES = 1024; +static const int TOTALBUFTIMEMSEC = NUMVIZBUF * VIZBUFFRAMES * 1000 / 44100; + +static bool gotMem = false; +static sp<MemoryBase> mem[NUMVIZBUF]; +static uint64_t timeStamp[NUMVIZBUF]; +static uint64_t lastReadTime; +static uint64_t lastWriteTime; +static int writeIdx = 0; + +static void allocVizBufs() { + if (!gotMem) { + for (int i=0;i<NUMVIZBUF;i++) { + sp<MemoryHeapBase> heap = new MemoryHeapBase(VIZBUFFRAMES*2, 0, "snooper"); + mem[i] = new MemoryBase(heap, 0, heap->getSize()); + timeStamp[i] = 0; + } + gotMem = true; + } +} + + +/* + * Get a buffer of audio data that is about to be played. + * We don't synchronize this because in practice the writer + * is ahead of the reader, and even if we did happen to catch + * a buffer while it's being written, it's just a visualization, + * so no harm done. + */ +static sp<MemoryBase> getVizBuffer() { + + allocVizBufs(); + + lastReadTime = uptimeMillis() + 100; // account for renderer delay (we shouldn't be doing this here) + + // if there is no recent buffer (yet), just return empty handed + if (lastWriteTime + TOTALBUFTIMEMSEC < lastReadTime) { + //LOGI("@@@@ no audio data to look at yet"); + return NULL; + } + + char buf[200]; + + int closestIdx = -1; + uint32_t closestTime = 0x7ffffff; + + for (int i = 0; i < NUMVIZBUF; i++) { + uint64_t tsi = timeStamp[i]; + uint64_t diff = tsi > lastReadTime ? tsi - lastReadTime : lastReadTime - tsi; + if (diff < closestTime) { + closestIdx = i; + closestTime = diff; + } + } + + + if (closestIdx >= 0) { + //LOGI("@@@ return buffer %d, %d/%d", closestIdx, uint32_t(lastReadTime), uint32_t(timeStamp[closestIdx])); + return mem[closestIdx]; + } + + // we won't get here, since we either bailed out early, or got a buffer + LOGD("Didn't expect to be here"); + return NULL; +} + +static void storeVizBuf(const void *data, int len, uint64_t time) { + // Copy the data in to the visualizer buffer + // Assume a 16 bit stereo source for now. + short *viz = (short*)mem[writeIdx]->pointer(); + short *src = (short*)data; + for (int i = 0; i < VIZBUFFRAMES; i++) { + // Degrade quality by mixing to mono and clearing the lowest 3 bits. + // This should still be good enough for a visualization + *viz++ = ((int(src[0]) + int(src[1])) >> 1) & ~0x7; + src += 2; + } + timeStamp[writeIdx++] = time; + if (writeIdx >= NUMVIZBUF) { + writeIdx = 0; + } +} + +static void makeVizBuffers(const char *data, int len, uint64_t time) { + + allocVizBufs(); + + uint64_t startTime = time; + const int frameSize = 4; // 16 bit stereo sample is 4 bytes + while (len >= VIZBUFFRAMES * frameSize) { + storeVizBuf(data, len, time); + data += VIZBUFFRAMES * frameSize; + len -= VIZBUFFRAMES * frameSize; + time += 1000 * VIZBUFFRAMES / 44100; + } + //LOGI("@@@ stored buffers from %d to %d", uint32_t(startTime), uint32_t(time)); +} + +sp<IMemory> MediaPlayerService::snoop() +{ + sp<MemoryBase> mem = getVizBuffer(); + return mem; +} + + #undef LOG_TAG #define LOG_TAG "AudioSink" MediaPlayerService::AudioOutput::AudioOutput() -{ + : mCallback(NULL), + mCallbackCookie(NULL) { mTrack = 0; mStreamType = AudioSystem::MUSIC; mLeftVolume = 1.0; mRightVolume = 1.0; mLatency = 0; mMsecsPerFrame = 0; + mNumFramesWritten = 0; setMinBufferCount(); } @@ -1003,8 +1395,13 @@ float MediaPlayerService::AudioOutput::msecsPerFrame() const return mMsecsPerFrame; } -status_t MediaPlayerService::AudioOutput::open(uint32_t sampleRate, int channelCount, int format, int bufferCount) +status_t MediaPlayerService::AudioOutput::open( + uint32_t sampleRate, int channelCount, int format, int bufferCount, + AudioCallback cb, void *cookie) { + mCallback = cb; + mCallbackCookie = cookie; + // Check argument "bufferCount" against the mininum buffer count if (bufferCount < mMinBufferCount) { LOGD("bufferCount (%d) is too small and increased to %d", bufferCount, mMinBufferCount); @@ -1025,7 +1422,27 @@ status_t MediaPlayerService::AudioOutput::open(uint32_t sampleRate, int channelC } frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate; - AudioTrack *t = new AudioTrack(mStreamType, sampleRate, format, channelCount, frameCount); + + AudioTrack *t; + if (mCallback != NULL) { + t = new AudioTrack( + mStreamType, + sampleRate, + format, + (channelCount == 2) ? AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO, + frameCount, + 0 /* flags */, + CallbackWrapper, + this); + } else { + t = new AudioTrack( + mStreamType, + sampleRate, + format, + (channelCount == 2) ? AudioSystem::CHANNEL_OUT_STEREO : AudioSystem::CHANNEL_OUT_MONO, + frameCount); + } + if ((t == 0) || (t->initCheck() != NO_ERROR)) { LOGE("Unable to create audio track"); delete t; @@ -1046,13 +1463,38 @@ void MediaPlayerService::AudioOutput::start() if (mTrack) { mTrack->setVolume(mLeftVolume, mRightVolume); mTrack->start(); + mTrack->getPosition(&mNumFramesWritten); } } ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size) { + LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback."); + //LOGV("write(%p, %u)", buffer, size); - if (mTrack) return mTrack->write(buffer, size); + if (mTrack) { + // Only make visualization buffers if anyone recently requested visualization data + uint64_t now = uptimeMillis(); + if (lastReadTime + TOTALBUFTIMEMSEC >= now) { + // Based on the current play counter, the number of frames written and + // the current real time we can calculate the approximate real start + // time of the buffer we're about to write. + uint32_t pos; + mTrack->getPosition(&pos); + + // we're writing ahead by this many frames: + int ahead = mNumFramesWritten - pos; + //LOGI("@@@ written: %d, playpos: %d, latency: %d", mNumFramesWritten, pos, mTrack->latency()); + // which is this many milliseconds, assuming 44100 Hz: + ahead /= 44; + + makeVizBuffers((const char*)buffer, size, now + ahead + mTrack->latency()); + lastWriteTime = now; + } + ssize_t ret = mTrack->write(buffer, size); + mNumFramesWritten += ret / 4; // assume 16 bit stereo + return ret; + } return NO_INIT; } @@ -1060,6 +1502,7 @@ void MediaPlayerService::AudioOutput::stop() { LOGV("stop"); if (mTrack) mTrack->stop(); + lastWriteTime = 0; } void MediaPlayerService::AudioOutput::flush() @@ -1072,6 +1515,7 @@ void MediaPlayerService::AudioOutput::pause() { LOGV("pause"); if (mTrack) mTrack->pause(); + lastWriteTime = 0; } void MediaPlayerService::AudioOutput::close() @@ -1091,6 +1535,20 @@ void MediaPlayerService::AudioOutput::setVolume(float left, float right) } } +// static +void MediaPlayerService::AudioOutput::CallbackWrapper( + int event, void *cookie, void *info) { + if (event != AudioTrack::EVENT_MORE_DATA) { + return; + } + + AudioOutput *me = (AudioOutput *)cookie; + AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info; + + (*me->mCallback)( + me, buffer->raw, buffer->size, me->mCallbackCookie); +} + #undef LOG_TAG #define LOG_TAG "AudioCache" MediaPlayerService::AudioCache::AudioCache(const char* name) : @@ -1111,8 +1569,14 @@ float MediaPlayerService::AudioCache::msecsPerFrame() const return mMsecsPerFrame; } -status_t MediaPlayerService::AudioCache::open(uint32_t sampleRate, int channelCount, int format, int bufferCount) +status_t MediaPlayerService::AudioCache::open( + uint32_t sampleRate, int channelCount, int format, int bufferCount, + AudioCallback cb, void *cookie) { + if (cb != NULL) { + return UNKNOWN_ERROR; // TODO: implement this. + } + LOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount); if (mHeap->getHeapID() < 0) return NO_INIT; mSampleRate = sampleRate; @@ -1177,4 +1641,4 @@ void MediaPlayerService::AudioCache::notify(void* cookie, int msg, int ext1, int p->mSignal.signal(); } -}; // namespace android +} // namespace android diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index f138886814b4..b00f5b747785 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -18,17 +18,24 @@ #ifndef ANDROID_MEDIAPLAYERSERVICE_H #define ANDROID_MEDIAPLAYERSERVICE_H -#include <utils.h> +#include <utils/Log.h> +#include <utils/threads.h> +#include <utils/List.h> +#include <utils/Errors.h> #include <utils/KeyedVector.h> +#include <utils/Vector.h> #include <ui/SurfaceComposerClient.h> #include <media/IMediaPlayerService.h> #include <media/MediaPlayerInterface.h> +#include <media/Metadata.h> namespace android { class IMediaRecorder; class IMediaMetadataRetriever; +class IOMX; +class MediaRecorderClient; #define CALLBACK_ANTAGONIZER 0 #if CALLBACK_ANTAGONIZER @@ -69,7 +76,12 @@ class MediaPlayerService : public BnMediaPlayerService virtual ssize_t frameSize() const; virtual uint32_t latency() const; virtual float msecsPerFrame() const; - virtual status_t open(uint32_t sampleRate, int channelCount, int format, int bufferCount=4); + + virtual status_t open( + uint32_t sampleRate, int channelCount, + int format, int bufferCount, + AudioCallback cb, void *cookie); + virtual void start(); virtual ssize_t write(const void* buffer, size_t size); virtual void stop(); @@ -84,8 +96,12 @@ class MediaPlayerService : public BnMediaPlayerService static int getMinBufferCount(); private: static void setMinBufferCount(); + static void CallbackWrapper( + int event, void *me, void *info); AudioTrack* mTrack; + AudioCallback mCallback; + void * mCallbackCookie; int mStreamType; float mLeftVolume; float mRightVolume; @@ -97,6 +113,8 @@ class MediaPlayerService : public BnMediaPlayerService static bool mIsOnEmulator; static int mMinBufferCount; // 12 for emulator; otherwise 4 + public: // visualization hack support + uint32_t mNumFramesWritten; }; class AudioCache : public MediaPlayerBase::AudioSink @@ -113,7 +131,12 @@ class MediaPlayerService : public BnMediaPlayerService virtual ssize_t frameSize() const { return ssize_t(mChannelCount * ((mFormat == AudioSystem::PCM_16_BIT)?sizeof(int16_t):sizeof(u_int8_t))); } virtual uint32_t latency() const; virtual float msecsPerFrame() const; - virtual status_t open(uint32_t sampleRate, int channelCount, int format, int bufferCount=1); + + virtual status_t open( + uint32_t sampleRate, int channelCount, int format, + int bufferCount = 1, + AudioCallback cb = NULL, void *cookie = NULL); + virtual void start() {} virtual ssize_t write(const void* buffer, size_t size); virtual void stop() {} @@ -140,7 +163,7 @@ class MediaPlayerService : public BnMediaPlayerService sp<MemoryHeapBase> mHeap; float mMsecsPerFrame; uint16_t mChannelCount; - uint16_t mFormat; + uint16_t mFormat; ssize_t mFrameCount; uint32_t mSampleRate; uint32_t mSize; @@ -153,6 +176,7 @@ public: // IMediaPlayerService interface virtual sp<IMediaRecorder> createMediaRecorder(pid_t pid); + void removeMediaRecorderClient(wp<MediaRecorderClient> client); virtual sp<IMediaMetadataRetriever> createMetadataRetriever(pid_t pid); // House keeping for media player clients @@ -160,11 +184,14 @@ public: virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length); virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat); virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat); + virtual sp<IMemory> snoop(); + virtual sp<IOMX> getOMX(); virtual status_t dump(int fd, const Vector<String16>& args); void removeClient(wp<Client> client); + private: class Client : public BnMediaPlayer { @@ -184,6 +211,11 @@ private: virtual status_t setAudioStreamType(int type); virtual status_t setLooping(int loop); virtual status_t setVolume(float leftVolume, float rightVolume); + virtual status_t invoke(const Parcel& request, Parcel *reply); + virtual status_t setMetadataFilter(const Parcel& filter); + virtual status_t getMetadata(bool update_only, + bool apply_filter, + Parcel *reply); sp<MediaPlayerBase> createPlayer(player_type playerType); status_t setDataSource(const char *url); @@ -206,6 +238,18 @@ private: sp<MediaPlayerBase> getPlayer() const { Mutex::Autolock lock(mLock); return mPlayer; } + + + // @param type Of the metadata to be tested. + // @return true if the metadata should be dropped according to + // the filters. + bool shouldDropMetadata(media::Metadata::Type type) const; + + // Add a new element to the set of metadata updated. Noop if + // the element exists already. + // @param type Of the metadata to be recorded. + void addNewMetadataUpdate(media::Metadata::Type type); + mutable Mutex mLock; sp<MediaPlayerBase> mPlayer; sp<MediaPlayerService> mService; @@ -215,6 +259,17 @@ private: status_t mStatus; bool mLoop; int32_t mConnId; + + // Metadata filters. + media::Metadata::Filter mMetadataAllow; // protected by mLock + media::Metadata::Filter mMetadataDrop; // protected by mLock + + // Metadata updated. For each MEDIA_INFO_METADATA_UPDATE + // notification we try to update mMetadataUpdated which is a + // set: no duplicate. + // getMetadata clears this set. + media::Metadata::Filter mMetadataUpdated; // protected by mLock + #if CALLBACK_ANTAGONIZER Antagonizer* mAntagonizer; #endif @@ -227,7 +282,9 @@ private: mutable Mutex mLock; SortedVector< wp<Client> > mClients; + SortedVector< wp<MediaRecorderClient> > mMediaRecorderClients; int32_t mNextConnId; + sp<IOMX> mOMX; }; // ---------------------------------------------------------------------------- @@ -235,4 +292,3 @@ private: }; // namespace android #endif // ANDROID_MEDIAPLAYERSERVICE_H - diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp index 8bc410cbbe91..95ee3e46218a 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.cpp +++ b/media/libmediaplayerservice/MediaRecorderClient.cpp @@ -25,14 +25,17 @@ #include <string.h> #include <cutils/atomic.h> #include <android_runtime/ActivityManager.h> -#include <utils/IPCThreadState.h> -#include <utils/IServiceManager.h> -#include <utils/MemoryHeapBase.h> -#include <utils/MemoryBase.h> +#include <binder/IPCThreadState.h> +#include <binder/IServiceManager.h> +#include <binder/MemoryHeapBase.h> +#include <binder/MemoryBase.h> #include <media/PVMediaRecorder.h> #include <utils/String16.h> +#include <media/AudioTrack.h> + #include "MediaRecorderClient.h" +#include "MediaPlayerService.h" namespace android { @@ -80,6 +83,7 @@ status_t MediaRecorderClient::setVideoSource(int vs) Mutex::Autolock lock(mLock); if (mRecorder == NULL) { LOGE("recorder is not initialized"); + return NO_INIT; } return mRecorder->setVideoSource((video_source)vs); } @@ -93,6 +97,7 @@ status_t MediaRecorderClient::setAudioSource(int as) Mutex::Autolock lock(mLock); if (mRecorder == NULL) { LOGE("recorder is not initialized"); + return NO_INIT; } return mRecorder->setAudioSource((audio_source)as); } @@ -271,15 +276,18 @@ status_t MediaRecorderClient::release() if (mRecorder != NULL) { delete mRecorder; mRecorder = NULL; + wp<MediaRecorderClient> client(this); + mMediaPlayerService->removeMediaRecorderClient(client); } return NO_ERROR; } -MediaRecorderClient::MediaRecorderClient(pid_t pid) +MediaRecorderClient::MediaRecorderClient(const sp<MediaPlayerService>& service, pid_t pid) { LOGV("Client constructor"); mPid = pid; mRecorder = new PVMediaRecorder(); + mMediaPlayerService = service; } MediaRecorderClient::~MediaRecorderClient() diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h index 6a1c2d5c4636..6260441a8825 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.h +++ b/media/libmediaplayerservice/MediaRecorderClient.h @@ -24,6 +24,7 @@ namespace android { class PVMediaRecorder; class ISurface; +class MediaPlayerService; class MediaRecorderClient : public BnMediaRecorder { @@ -53,12 +54,13 @@ public: private: friend class MediaPlayerService; // for accessing private constructor - MediaRecorderClient(pid_t pid); + MediaRecorderClient(const sp<MediaPlayerService>& service, pid_t pid); virtual ~MediaRecorderClient(); pid_t mPid; Mutex mLock; PVMediaRecorder *mRecorder; + sp<MediaPlayerService> mMediaPlayerService; }; }; // namespace android diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp index 6cb4a34ead9f..2cdc351b0f87 100644 --- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp +++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp @@ -21,25 +21,41 @@ #include <sys/types.h> #include <sys/stat.h> +#include <sys/resource.h> #include <dirent.h> #include <unistd.h> #include <string.h> #include <cutils/atomic.h> -#include <utils/MemoryDealer.h> +#include <binder/MemoryDealer.h> #include <android_runtime/ActivityManager.h> -#include <utils/IPCThreadState.h> -#include <utils/IServiceManager.h> +#include <binder/IPCThreadState.h> +#include <binder/IServiceManager.h> #include <media/MediaMetadataRetrieverInterface.h> #include <media/MediaPlayerInterface.h> #include <media/PVMetadataRetriever.h> #include <private/media/VideoFrame.h> - +#include "VorbisMetadataRetriever.h" +#include "MidiMetadataRetriever.h" #include "MetadataRetrieverClient.h" +/* desktop Linux needs a little help with gettid() */ +#if defined(HAVE_GETTID) && !defined(HAVE_ANDROID_OS) +#define __KERNEL__ +# include <linux/unistd.h> +#ifdef _syscall0 +_syscall0(pid_t,gettid) +#else +pid_t gettid() { return syscall(__NR_gettid);} +#endif +#undef __KERNEL__ +#endif namespace android { +extern player_type getPlayerType(const char* url); +extern player_type getPlayerType(int fd, int64_t offset, int64_t length); + MetadataRetrieverClient::MetadataRetrieverClient(pid_t pid) { LOGV("MetadataRetrieverClient constructor pid(%d)", pid); @@ -48,15 +64,8 @@ MetadataRetrieverClient::MetadataRetrieverClient(pid_t pid) mAlbumArtDealer = NULL; mThumbnail = NULL; mAlbumArt = NULL; - -#ifndef NO_OPENCORE - mRetriever = new PVMetadataRetriever(); -#else mRetriever = NULL; -#endif - if (mRetriever == NULL) { - LOGE("failed to initialize the retriever"); - } + mMode = METADATA_MODE_FRAME_CAPTURE_AND_METADATA_RETRIEVAL; } MetadataRetrieverClient::~MetadataRetrieverClient() @@ -71,7 +80,7 @@ status_t MetadataRetrieverClient::dump(int fd, const Vector<String16>& args) con char buffer[SIZE]; String8 result; result.append(" MetadataRetrieverClient\n"); - snprintf(buffer, 255, " pid(%d)\n", mPid); + snprintf(buffer, 255, " pid(%d) mode(%d)\n", mPid, mMode); result.append(buffer); write(fd, result.string(), result.size()); write(fd, "\n", 1); @@ -87,9 +96,40 @@ void MetadataRetrieverClient::disconnect() mAlbumArtDealer.clear(); mThumbnail.clear(); mAlbumArt.clear(); + mMode = METADATA_MODE_FRAME_CAPTURE_AND_METADATA_RETRIEVAL; IPCThreadState::self()->flushCommands(); } +static sp<MediaMetadataRetrieverBase> createRetriever(player_type playerType) +{ + sp<MediaMetadataRetrieverBase> p; + switch (playerType) { +#ifndef NO_OPENCORE + case PV_PLAYER: + LOGV("create pv metadata retriever"); + p = new PVMetadataRetriever(); + break; +#endif + case VORBIS_PLAYER: + LOGV("create vorbis metadata retriever"); + p = new VorbisMetadataRetriever(); + break; + case SONIVOX_PLAYER: + LOGV("create midi metadata retriever"); + p = new MidiMetadataRetriever(); + break; + default: + // TODO: + // support for STAGEFRIGHT_PLAYER and TEST_PLAYER + LOGE("player type %d is not supported", playerType); + break; + } + if (p == NULL) { + LOGE("failed to create a retriever object"); + } + return p; +} + status_t MetadataRetrieverClient::setDataSource(const char *url) { LOGV("setDataSource(%s)", url); @@ -97,28 +137,33 @@ status_t MetadataRetrieverClient::setDataSource(const char *url) if (url == NULL) { return UNKNOWN_ERROR; } - if (mRetriever == NULL) { - LOGE("retriever is not initialized"); - return NO_INIT; + player_type playerType = getPlayerType(url); +#if !defined(NO_OPENCORE) && defined(BUILD_WITH_FULL_STAGEFRIGHT) + if (playerType == STAGEFRIGHT_PLAYER) { + // Stagefright doesn't support metadata in this branch yet. + playerType = PV_PLAYER; + } +#endif + LOGV("player type = %d", playerType); + sp<MediaMetadataRetrieverBase> p = createRetriever(playerType); + if (p == NULL) return NO_INIT; + status_t ret = p->setMode(mMode); + if (ret == NO_ERROR) { + ret = p->setDataSource(url); } - return mRetriever->setDataSource(url); + if (ret == NO_ERROR) mRetriever = p; + return ret; } status_t MetadataRetrieverClient::setDataSource(int fd, int64_t offset, int64_t length) { LOGV("setDataSource fd=%d, offset=%lld, length=%lld", fd, offset, length); Mutex::Autolock lock(mLock); - if (mRetriever == NULL) { - LOGE("retriever is not initialized"); - ::close(fd); - return NO_INIT; - } - struct stat sb; int ret = fstat(fd, &sb); if (ret != 0) { LOGE("fstat(%d) failed: %d, %s", fd, ret, strerror(errno)); - return UNKNOWN_ERROR; + return BAD_VALUE; } LOGV("st_dev = %llu", sb.st_dev); LOGV("st_mode = %u", sb.st_mode); @@ -129,13 +174,31 @@ status_t MetadataRetrieverClient::setDataSource(int fd, int64_t offset, int64_t if (offset >= sb.st_size) { LOGE("offset (%lld) bigger than file size (%llu)", offset, sb.st_size); ::close(fd); - return UNKNOWN_ERROR; + return BAD_VALUE; } if (offset + length > sb.st_size) { length = sb.st_size - offset; - LOGE("calculated length = %lld", length); + LOGV("calculated length = %lld", length); + } + + player_type playerType = getPlayerType(fd, offset, length); +#if !defined(NO_OPENCORE) && defined(BUILD_WITH_FULL_STAGEFRIGHT) + if (playerType == STAGEFRIGHT_PLAYER) { + // Stagefright doesn't support metadata in this branch yet. + playerType = PV_PLAYER; } - status_t status = mRetriever->setDataSource(fd, offset, length); +#endif + LOGV("player type = %d", playerType); + sp<MediaMetadataRetrieverBase> p = createRetriever(playerType); + if (p == NULL) { + ::close(fd); + return NO_INIT; + } + status_t status = p->setMode(mMode); + if (status == NO_ERROR) { + p->setDataSource(fd, offset, length); + } + if (status == NO_ERROR) mRetriever = p; ::close(fd); return status; } @@ -144,28 +207,37 @@ status_t MetadataRetrieverClient::setMode(int mode) { LOGV("setMode"); Mutex::Autolock lock(mLock); - if (mRetriever == NULL) { - LOGE("retriever is not initialized"); - return NO_INIT; + if (mode < METADATA_MODE_NOOP || + mode > METADATA_MODE_FRAME_CAPTURE_AND_METADATA_RETRIEVAL) { + LOGE("invalid mode %d", mode); + return BAD_VALUE; } - return mRetriever->setMode(mode); + mMode = mode; + return NO_ERROR; } status_t MetadataRetrieverClient::getMode(int* mode) const { LOGV("getMode"); Mutex::Autolock lock(mLock); + + // TODO: + // This may not be necessary. + // If setDataSource() has not been called, return the cached value + // otherwise, return the value retrieved from the retriever if (mRetriever == NULL) { - LOGE("retriever is not initialized"); - return NO_INIT; + *mode = mMode; + } else { + mRetriever->getMode(mode); } - return mRetriever->getMode(mode); + return NO_ERROR; } sp<IMemory> MetadataRetrieverClient::captureFrame() { LOGV("captureFrame"); Mutex::Autolock lock(mLock); + Priority priority(ANDROID_PRIORITY_BACKGROUND); mThumbnail.clear(); mThumbnailDealer.clear(); if (mRetriever == NULL) { @@ -207,6 +279,7 @@ sp<IMemory> MetadataRetrieverClient::extractAlbumArt() { LOGV("extractAlbumArt"); Mutex::Autolock lock(mLock); + Priority priority(ANDROID_PRIORITY_BACKGROUND); mAlbumArt.clear(); mAlbumArtDealer.clear(); if (mRetriever == NULL) { @@ -248,7 +321,19 @@ const char* MetadataRetrieverClient::extractMetadata(int keyCode) LOGE("retriever is not initialized"); return NULL; } + Priority priority(ANDROID_PRIORITY_BACKGROUND); return mRetriever->extractMetadata(keyCode); } +MetadataRetrieverClient::Priority::Priority(int newPriority) +{ + mOldPriority = getpriority(PRIO_PROCESS, 0); + setpriority(PRIO_PROCESS, 0, newPriority); +} + +MetadataRetrieverClient::Priority::~Priority() +{ + setpriority(PRIO_PROCESS, 0, mOldPriority); +} + }; // namespace android diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h index ce29c98791b4..852d73497fd4 100644 --- a/media/libmediaplayerservice/MetadataRetrieverClient.h +++ b/media/libmediaplayerservice/MetadataRetrieverClient.h @@ -18,9 +18,12 @@ #ifndef ANDROID_MEDIAMETADATARETRIEVERSERVICE_H #define ANDROID_MEDIAMETADATARETRIEVERSERVICE_H -#include <utils.h> +#include <utils/Log.h> +#include <utils/threads.h> +#include <utils/List.h> +#include <utils/Errors.h> #include <utils/KeyedVector.h> -#include <utils/IMemory.h> +#include <binder/IMemory.h> #include <media/MediaMetadataRetrieverInterface.h> @@ -51,12 +54,23 @@ public: private: friend class MediaPlayerService; + class Priority + { + public: + Priority(int newPriority); + ~Priority(); + private: + Priority(); + int mOldPriority; + }; + explicit MetadataRetrieverClient(pid_t pid); virtual ~MetadataRetrieverClient(); mutable Mutex mLock; sp<MediaMetadataRetrieverBase> mRetriever; pid_t mPid; + int mMode; // Keep the shared memory copy of album art and capture frame (for thumbnail) sp<MemoryDealer> mAlbumArtDealer; diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h index 302f1cf2f8e4..25d4a1b624ba 100644 --- a/media/libmediaplayerservice/MidiFile.h +++ b/media/libmediaplayerservice/MidiFile.h @@ -46,6 +46,9 @@ public: virtual status_t reset(); virtual status_t setLooping(int loop); virtual player_type playerType() { return SONIVOX_PLAYER; } + virtual status_t invoke(const Parcel& request, Parcel *reply) { + return INVALID_OPERATION; + } private: status_t createOutputTrack(); @@ -74,4 +77,3 @@ private: }; // namespace android #endif // ANDROID_MIDIFILE_H - diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.cpp b/media/libmediaplayerservice/MidiMetadataRetriever.cpp new file mode 100644 index 000000000000..3795b7b3f7e5 --- /dev/null +++ b/media/libmediaplayerservice/MidiMetadataRetriever.cpp @@ -0,0 +1,91 @@ +/* +** +** Copyright 2009, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MidiMetadataRetriever" +#include <utils/Log.h> + +#include "MidiMetadataRetriever.h" +#include <media/mediametadataretriever.h> + +namespace android { + +static status_t ERROR_NOT_OPEN = -1; +static status_t ERROR_OPEN_FAILED = -2; +static status_t ERROR_EAS_FAILURE = -3; +static status_t ERROR_ALLOCATE_FAILED = -4; + +void MidiMetadataRetriever::clearMetadataValues() +{ + LOGV("clearMetadataValues"); + mMetadataValues[0][0] = '\0'; +} + +status_t MidiMetadataRetriever::setDataSource(const char *url) +{ + LOGV("setDataSource: %s", url? url: "NULL pointer"); + Mutex::Autolock lock(mLock); + clearMetadataValues(); + if (mMidiPlayer == 0) { + mMidiPlayer = new MidiFile(); + } + return mMidiPlayer->setDataSource(url); +} + +status_t MidiMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length) +{ + LOGV("setDataSource: fd(%d), offset(%lld), and length(%lld)", fd, offset, length); + Mutex::Autolock lock(mLock); + clearMetadataValues(); + if (mMidiPlayer == 0) { + mMidiPlayer = new MidiFile(); + } + return mMidiPlayer->setDataSource(fd, offset, length);; +} + +const char* MidiMetadataRetriever::extractMetadata(int keyCode) +{ + LOGV("extractMetdata: key(%d)", keyCode); + Mutex::Autolock lock(mLock); + if (mMidiPlayer == 0 || mMidiPlayer->initCheck() != NO_ERROR) { + LOGE("Midi player is not initialized yet"); + return NULL; + } + switch (keyCode) { + case METADATA_KEY_DURATION: + { + if (mMetadataValues[0][0] == '\0') { + int duration = -1; + if (mMidiPlayer->getDuration(&duration) != NO_ERROR) { + LOGE("failed to get duration"); + return NULL; + } + snprintf(mMetadataValues[0], MAX_METADATA_STRING_LENGTH, "%d", duration); + } + + LOGV("duration: %s ms", mMetadataValues[0]); + return mMetadataValues[0]; + } + default: + LOGE("Unsupported key code (%d)", keyCode); + return NULL; + } + return NULL; +} + +}; + diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.h b/media/libmediaplayerservice/MidiMetadataRetriever.h new file mode 100644 index 000000000000..73ff3475733c --- /dev/null +++ b/media/libmediaplayerservice/MidiMetadataRetriever.h @@ -0,0 +1,49 @@ +/* +** +** Copyright 2009, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef ANDROID_MIDIMETADATARETRIEVER_H +#define ANDROID_MIDIMETADATARETRIEVER_H + +#include <utils/threads.h> +#include <utils/Errors.h> +#include <media/MediaMetadataRetrieverInterface.h> + +#include "MidiFile.h" + +namespace android { + +class MidiMetadataRetriever : public MediaMetadataRetrieverInterface { +public: + MidiMetadataRetriever() {} + ~MidiMetadataRetriever() {} + + virtual status_t setDataSource(const char *url); + virtual status_t setDataSource(int fd, int64_t offset, int64_t length); + virtual const char* extractMetadata(int keyCode); + +private: + static const uint32_t MAX_METADATA_STRING_LENGTH = 128; + void clearMetadataValues(); + + Mutex mLock; + sp<MidiFile> mMidiPlayer; + char mMetadataValues[1][MAX_METADATA_STRING_LENGTH]; +}; + +}; // namespace android + +#endif // ANDROID_MIDIMETADATARETRIEVER_H diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp new file mode 100644 index 000000000000..dbee451b01e1 --- /dev/null +++ b/media/libmediaplayerservice/StagefrightPlayer.cpp @@ -0,0 +1,210 @@ +//#define LOG_NDEBUG 0 +#define LOG_TAG "StagefrightPlayer" +#include <utils/Log.h> + +#include "StagefrightPlayer.h" +#include <media/stagefright/MediaPlayerImpl.h> + +namespace android { + +StagefrightPlayer::StagefrightPlayer() + : mPlayer(NULL) { + LOGV("StagefrightPlayer"); +} + +StagefrightPlayer::~StagefrightPlayer() { + LOGV("~StagefrightPlayer"); + reset(); + LOGV("~StagefrightPlayer done."); +} + +status_t StagefrightPlayer::initCheck() { + LOGV("initCheck"); + return OK; +} + +status_t StagefrightPlayer::setDataSource(const char *url) { + LOGV("setDataSource('%s')", url); + + reset(); + mPlayer = new MediaPlayerImpl(url); + + status_t err = mPlayer->initCheck(); + if (err != OK) { + delete mPlayer; + mPlayer = NULL; + } else { + mPlayer->setAudioSink(mAudioSink); + } + + return err; +} + +// Warning: The filedescriptor passed into this method will only be valid until +// the method returns, if you want to keep it, dup it! +status_t StagefrightPlayer::setDataSource(int fd, int64_t offset, int64_t length) { + LOGV("setDataSource(%d, %lld, %lld)", fd, offset, length); + + reset(); + mPlayer = new MediaPlayerImpl(dup(fd), offset, length); + + status_t err = mPlayer->initCheck(); + if (err != OK) { + delete mPlayer; + mPlayer = NULL; + } else { + mPlayer->setAudioSink(mAudioSink); + } + + return err; +} + +status_t StagefrightPlayer::setVideoSurface(const sp<ISurface> &surface) { + LOGV("setVideoSurface"); + + if (mPlayer == NULL) { + return NO_INIT; + } + + mPlayer->setISurface(surface); + + return OK; +} + +status_t StagefrightPlayer::prepare() { + LOGV("prepare"); + + if (mPlayer == NULL) { + return NO_INIT; + } + + sendEvent( + MEDIA_SET_VIDEO_SIZE, + mPlayer->getWidth(), mPlayer->getHeight()); + + return OK; +} + +status_t StagefrightPlayer::prepareAsync() { + LOGV("prepareAsync"); + + status_t err = prepare(); + + if (err != OK) { + return err; + } + + sendEvent(MEDIA_PREPARED); + + return OK; +} + +status_t StagefrightPlayer::start() { + LOGV("start"); + + if (mPlayer == NULL) { + return NO_INIT; + } + + mPlayer->play(); + + return OK; +} + +status_t StagefrightPlayer::stop() { + LOGV("stop"); + + if (mPlayer == NULL) { + return NO_INIT; + } + + reset(); + + return OK; +} + +status_t StagefrightPlayer::pause() { + LOGV("pause"); + + if (mPlayer == NULL) { + return NO_INIT; + } + + mPlayer->pause(); + + return OK; +} + +bool StagefrightPlayer::isPlaying() { + LOGV("isPlaying"); + return mPlayer != NULL && mPlayer->isPlaying(); +} + +status_t StagefrightPlayer::seekTo(int msec) { + LOGV("seekTo"); + + if (mPlayer == NULL) { + return NO_INIT; + } + + status_t err = mPlayer->seekTo((int64_t)msec * 1000); + + sendEvent(MEDIA_SEEK_COMPLETE); + + return err; +} + +status_t StagefrightPlayer::getCurrentPosition(int *msec) { + LOGV("getCurrentPosition"); + + if (mPlayer == NULL) { + return NO_INIT; + } + + *msec = mPlayer->getPosition() / 1000; + return OK; +} + +status_t StagefrightPlayer::getDuration(int *msec) { + LOGV("getDuration"); + + if (mPlayer == NULL) { + return NO_INIT; + } + + *msec = mPlayer->getDuration() / 1000; + return OK; +} + +status_t StagefrightPlayer::reset() { + LOGV("reset"); + + delete mPlayer; + mPlayer = NULL; + + return OK; +} + +status_t StagefrightPlayer::setLooping(int loop) { + LOGV("setLooping"); + return UNKNOWN_ERROR; +} + +player_type StagefrightPlayer::playerType() { + LOGV("playerType"); + return STAGEFRIGHT_PLAYER; +} + +status_t StagefrightPlayer::invoke(const Parcel &request, Parcel *reply) { + return INVALID_OPERATION; +} + +void StagefrightPlayer::setAudioSink(const sp<AudioSink> &audioSink) { + MediaPlayerInterface::setAudioSink(audioSink); + + if (mPlayer != NULL) { + mPlayer->setAudioSink(audioSink); + } +} + +} // namespace android diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h new file mode 100644 index 000000000000..f214872c7c64 --- /dev/null +++ b/media/libmediaplayerservice/StagefrightPlayer.h @@ -0,0 +1,60 @@ +/* +** +** Copyright 2009, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef ANDROID_STAGEFRIGHTPLAYER_H +#define ANDROID_STAGEFRIGHTPLAYER_H + +#include <media/MediaPlayerInterface.h> + +namespace android { + +class MediaPlayerImpl; + +class StagefrightPlayer : public MediaPlayerInterface { +public: + StagefrightPlayer(); + virtual ~StagefrightPlayer(); + + virtual status_t initCheck(); + virtual status_t setDataSource(const char *url); + virtual status_t setDataSource(int fd, int64_t offset, int64_t length); + virtual status_t setVideoSurface(const sp<ISurface> &surface); + virtual status_t prepare(); + virtual status_t prepareAsync(); + virtual status_t start(); + virtual status_t stop(); + virtual status_t pause(); + virtual bool isPlaying(); + virtual status_t seekTo(int msec); + virtual status_t getCurrentPosition(int *msec); + virtual status_t getDuration(int *msec); + virtual status_t reset(); + virtual status_t setLooping(int loop); + virtual player_type playerType(); + virtual status_t invoke(const Parcel &request, Parcel *reply); + virtual void setAudioSink(const sp<AudioSink> &audioSink); + +private: + MediaPlayerImpl *mPlayer; + + StagefrightPlayer(const StagefrightPlayer &); + StagefrightPlayer &operator=(const StagefrightPlayer &); +}; + +} // namespace android + +#endif // ANDROID_STAGEFRIGHTPLAYER_H diff --git a/media/libmediaplayerservice/TestPlayerStub.cpp b/media/libmediaplayerservice/TestPlayerStub.cpp new file mode 100644 index 000000000000..862770864565 --- /dev/null +++ b/media/libmediaplayerservice/TestPlayerStub.cpp @@ -0,0 +1,196 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "TestPlayerStub" +#include "utils/Log.h" + +#include "TestPlayerStub.h" + +#include <dlfcn.h> // for dlopen/dlclose +#include <stdlib.h> +#include <string.h> +#include <cutils/properties.h> +#include <utils/Errors.h> // for status_t + +#include "media/MediaPlayerInterface.h" + + +namespace { +using android::status_t; +using android::MediaPlayerBase; + +const char *kTestUrlScheme = "test:"; +const char *kUrlParam = "url="; + +const char *kBuildTypePropName = "ro.build.type"; +const char *kEngBuild = "eng"; +const char *kTestBuild = "test"; + +// @return true if the current build is 'eng' or 'test'. +bool isTestBuild() +{ + char prop[PROPERTY_VALUE_MAX] = { '\0', }; + + property_get(kBuildTypePropName, prop, '\0'); + return strcmp(prop, kEngBuild) == 0 || strcmp(prop, kTestBuild) == 0; +} + +// @return true if the url scheme is 'test:' +bool isTestUrl(const char *url) +{ + return url && strncmp(url, kTestUrlScheme, strlen(kTestUrlScheme)) == 0; +} + +} // anonymous namespace + +namespace android { + +TestPlayerStub::TestPlayerStub() + :mUrl(NULL), mFilename(NULL), mContentUrl(NULL), + mHandle(NULL), mNewPlayer(NULL), mDeletePlayer(NULL), + mPlayer(NULL) { } + +TestPlayerStub::~TestPlayerStub() +{ + resetInternal(); +} + +status_t TestPlayerStub::initCheck() +{ + return isTestBuild() ? OK : INVALID_OPERATION; +} + +// Parse mUrl to get: +// * The library to be dlopened. +// * The url to be passed to the real setDataSource impl. +// +// mUrl is expected to be in following format: +// +// test:<name of the .so>?url=<url for setDataSource> +// +// The value of the url parameter is treated as a string (no +// unescaping of illegal charaters). +status_t TestPlayerStub::parseUrl() +{ + if (strlen(mUrl) < strlen(kTestUrlScheme)) { + resetInternal(); + return BAD_VALUE; + } + + char *i = mUrl + strlen(kTestUrlScheme); + + mFilename = i; + + while (*i != '\0' && *i != '?') { + ++i; + } + + if (*i == '\0' || strncmp(i + 1, kUrlParam, strlen(kUrlParam)) != 0) { + resetInternal(); + return BAD_VALUE; + } + *i = '\0'; // replace '?' to nul-terminate mFilename + + mContentUrl = i + 1 + strlen(kUrlParam); + return OK; +} + +// Load the dynamic library. +// Create the test player. +// Call setDataSource on the test player with the url in param. +status_t TestPlayerStub::setDataSource(const char *url) +{ + if (!isTestUrl(url) || NULL != mHandle) { + return INVALID_OPERATION; + } + + mUrl = strdup(url); + + status_t status = parseUrl(); + + if (OK != status) { + resetInternal(); + return status; + } + + ::dlerror(); // Clears any pending error. + + // Load the test player from the url. dlopen will fail if the lib + // is not there. dls are under /system/lib + // None of the entry points should be NULL. + mHandle = ::dlopen(mFilename, RTLD_NOW | RTLD_GLOBAL); + if (!mHandle) { + LOGE("dlopen failed: %s", ::dlerror()); + resetInternal(); + return UNKNOWN_ERROR; + } + + // Load the 2 entry points to create and delete instances. + const char *err; + mNewPlayer = reinterpret_cast<NEW_PLAYER>(dlsym(mHandle, + "newPlayer")); + err = ::dlerror(); + if (err || mNewPlayer == NULL) { + // if err is NULL the string <null> is inserted in the logs => + // mNewPlayer was NULL. + LOGE("dlsym for newPlayer failed %s", err); + resetInternal(); + return UNKNOWN_ERROR; + } + + mDeletePlayer = reinterpret_cast<DELETE_PLAYER>(dlsym(mHandle, + "deletePlayer")); + err = ::dlerror(); + if (err || mDeletePlayer == NULL) { + LOGE("dlsym for deletePlayer failed %s", err); + resetInternal(); + return UNKNOWN_ERROR; + } + + mPlayer = (*mNewPlayer)(); + return mPlayer->setDataSource(mContentUrl); +} + +// Internal cleanup. +status_t TestPlayerStub::resetInternal() +{ + if(mUrl) { + free(mUrl); + mUrl = NULL; + } + mFilename = NULL; + mContentUrl = NULL; + + if (mPlayer) { + LOG_ASSERT(mDeletePlayer != NULL); + (*mDeletePlayer)(mPlayer); + mPlayer = NULL; + } + + if (mHandle) { + ::dlclose(mHandle); + mHandle = NULL; + } + return OK; +} + +/* static */ bool TestPlayerStub::canBeUsed(const char *url) +{ + return isTestBuild() && isTestUrl(url); +} + +} // namespace android diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h new file mode 100644 index 000000000000..80d53a8b95d8 --- /dev/null +++ b/media/libmediaplayerservice/TestPlayerStub.h @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIAPLAYERSERVICE_TESTPLAYERSTUB_H__ +#define ANDROID_FRAMEWORKS_BASE_MEDIA_LIBMEDIAPLAYERSERVICE_TESTPLAYERSTUB_H__ + +#include <media/MediaPlayerInterface.h> +#include <utils/Errors.h> + +namespace android { +class MediaPlayerBase; // in media/MediaPlayerInterface.h + +// Wrapper around a test media player that gets dynamically loaded. +// +// The URL passed to setDataSource has this format: +// +// test:<name of the .so>?url=<url for the real setDataSource impl.> +// +// e.g: +// test:invoke_test_media_player.so?url=http://youtube.com/ +// test:invoke_test_media_player.so?url=speedtest +// +// TestPlayerStub::setDataSource loads the library in the test url. 2 +// entry points with C linkage are expected. One to create the test +// player and one to destroy it. +// +// extern "C" android::MediaPlayerBase* newPlayer(); +// extern "C" android::status_t deletePlayer(android::MediaPlayerBase *p); +// +// Once the test player has been loaded, its setDataSource +// implementation is called with the value of the 'url' parameter. +// +// typical usage in a java test: +// ============================ +// +// MediaPlayer p = new MediaPlayer(); +// p.setDataSource("test:invoke_mock_media_player.so?url=http://youtube.com"); +// p.prepare(); +// ... +// p.release(); + +class TestPlayerStub : public MediaPlayerInterface { + public: + typedef MediaPlayerBase* (*NEW_PLAYER)(); + typedef status_t (*DELETE_PLAYER)(MediaPlayerBase *); + + TestPlayerStub(); + virtual ~TestPlayerStub(); + + // Called right after the constructor. Check if the current build + // allows test players. + virtual status_t initCheck(); + + // @param url Should be a test url. See class comment. + virtual status_t setDataSource(const char* url); + + // Test player for a file descriptor source is not supported. + virtual status_t setDataSource(int, int64_t, int64_t) { + return INVALID_OPERATION; + } + + + // All the methods below wrap the mPlayer instance. + virtual status_t setVideoSurface(const android::sp<android::ISurface>& s) { + return mPlayer->setVideoSurface(s); + } + virtual status_t prepare() {return mPlayer->prepare();} + virtual status_t prepareAsync() {return mPlayer->prepareAsync();} + virtual status_t start() {return mPlayer->start();} + virtual status_t stop() {return mPlayer->stop();} + virtual status_t pause() {return mPlayer->pause();} + virtual bool isPlaying() {return mPlayer->isPlaying();} + virtual status_t seekTo(int msec) {return mPlayer->seekTo(msec);} + virtual status_t getCurrentPosition(int *p) { + return mPlayer->getCurrentPosition(p); + } + virtual status_t getDuration(int *d) {return mPlayer->getDuration(d);} + virtual status_t reset() {return mPlayer->reset();} + virtual status_t setLooping(int b) {return mPlayer->setLooping(b);} + virtual player_type playerType() {return mPlayer->playerType();} + virtual status_t invoke(const android::Parcel& in, android::Parcel *out) { + return mPlayer->invoke(in, out); + } + + + // @return true if the current build is 'eng' or 'test' and the + // url's scheme is 'test:' + static bool canBeUsed(const char *url); + + private: + // Release the player, dlclose the library. + status_t resetInternal(); + status_t parseUrl(); + + char *mUrl; // test:foo.so?url=http://bar + char *mFilename; // foo.so + char *mContentUrl; // http://bar + void *mHandle; // returned by dlopen + NEW_PLAYER mNewPlayer; + DELETE_PLAYER mDeletePlayer; + MediaPlayerBase *mPlayer; // wrapped player +}; + +} // namespace android + +#endif diff --git a/media/libmediaplayerservice/VorbisMetadataRetriever.cpp b/media/libmediaplayerservice/VorbisMetadataRetriever.cpp new file mode 100644 index 000000000000..e98167807708 --- /dev/null +++ b/media/libmediaplayerservice/VorbisMetadataRetriever.cpp @@ -0,0 +1,86 @@ +/* +** +** Copyright 2009, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "VorbisMetadataRetriever" +#include <utils/Log.h> + +#include "VorbisMetadataRetriever.h" +#include <media/mediametadataretriever.h> +# + +namespace android { + +void VorbisMetadataRetriever::clearMetadataValues() +{ + LOGV("cleearMetadataValues"); + mMetadataValues[0][0] = '\0'; +} + +status_t VorbisMetadataRetriever::setDataSource(const char *url) +{ + LOGV("setDataSource: url(%s)", url? url: "NULL pointer"); + Mutex::Autolock lock(mLock); + clearMetadataValues(); + if (mVorbisPlayer == 0) { + mVorbisPlayer = new VorbisPlayer(); + } + return mVorbisPlayer->setDataSource(url); +} + +status_t VorbisMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length) +{ + LOGV("setDataSource: fd(%d), offset(%lld), and length(%lld)", fd, offset, length); + Mutex::Autolock lock(mLock); + clearMetadataValues(); + if (mVorbisPlayer == 0) { + mVorbisPlayer = new VorbisPlayer(); + } + return mVorbisPlayer->setDataSource(fd, offset, length); +} + +const char* VorbisMetadataRetriever::extractMetadata(int keyCode) +{ + LOGV("extractMetadata: key(%d)", keyCode); + Mutex::Autolock lock(mLock); + if (mVorbisPlayer == 0 || mVorbisPlayer->initCheck() != NO_ERROR) { + LOGE("no vorbis player is initialized yet"); + return NULL; + } + switch (keyCode) { + case METADATA_KEY_DURATION: + { + if (mMetadataValues[0][0] == '\0') { + int duration = -1; + if (mVorbisPlayer->getDuration(&duration) != NO_ERROR) { + LOGE("failed to get duration"); + return NULL; + } + snprintf(mMetadataValues[0], MAX_METADATA_STRING_LENGTH, "%d", duration); + } + LOGV("duration: %s ms", mMetadataValues[0]); + return mMetadataValues[0]; + } + default: + LOGE("Unsupported key code (%d)", keyCode); + return NULL; + } + return NULL; +} + +}; + diff --git a/media/libmediaplayerservice/VorbisMetadataRetriever.h b/media/libmediaplayerservice/VorbisMetadataRetriever.h new file mode 100644 index 000000000000..1c57fe3f42c3 --- /dev/null +++ b/media/libmediaplayerservice/VorbisMetadataRetriever.h @@ -0,0 +1,49 @@ +/* +** +** Copyright 2009, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef ANDROID_VORBISMETADATARETRIEVER_H +#define ANDROID_VORBISMETADATARETRIEVER_H + +#include <utils/threads.h> +#include <utils/Errors.h> +#include <media/MediaMetadataRetrieverInterface.h> + +#include "VorbisPlayer.h" + +namespace android { + +class VorbisMetadataRetriever : public MediaMetadataRetrieverInterface { +public: + VorbisMetadataRetriever() {} + ~VorbisMetadataRetriever() {} + + virtual status_t setDataSource(const char *url); + virtual status_t setDataSource(int fd, int64_t offset, int64_t length); + virtual const char* extractMetadata(int keyCode); + +private: + static const uint32_t MAX_METADATA_STRING_LENGTH = 128; + void clearMetadataValues(); + + Mutex mLock; + sp<VorbisPlayer> mVorbisPlayer; + char mMetadataValues[1][MAX_METADATA_STRING_LENGTH]; +}; + +}; // namespace android + +#endif // ANDROID_VORBISMETADATARETRIEVER_H diff --git a/media/libmediaplayerservice/VorbisPlayer.h b/media/libmediaplayerservice/VorbisPlayer.h index c30dc1b72dd2..40246543d4ce 100644 --- a/media/libmediaplayerservice/VorbisPlayer.h +++ b/media/libmediaplayerservice/VorbisPlayer.h @@ -53,6 +53,7 @@ public: virtual status_t reset(); virtual status_t setLooping(int loop); virtual player_type playerType() { return VORBIS_PLAYER; } + virtual status_t invoke(const Parcel& request, Parcel *reply) {return INVALID_OPERATION;} private: status_t setdatasource(const char *path, int fd, int64_t offset, int64_t length); @@ -88,4 +89,3 @@ private: }; // namespace android #endif // ANDROID_VORBISPLAYER_H - diff --git a/media/libstagefright/AMRExtractor.cpp b/media/libstagefright/AMRExtractor.cpp new file mode 100644 index 000000000000..8d85ce2c72ff --- /dev/null +++ b/media/libstagefright/AMRExtractor.cpp @@ -0,0 +1,242 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "AMRExtractor" +#include <utils/Log.h> + +#include <media/stagefright/AMRExtractor.h> +#include <media/stagefright/DataSource.h> +#include <media/stagefright/MediaBufferGroup.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MetaData.h> +#include <utils/String8.h> + +namespace android { + +class AMRSource : public MediaSource { +public: + AMRSource(const sp<DataSource> &source, bool isWide); + + virtual status_t start(MetaData *params = NULL); + virtual status_t stop(); + + virtual sp<MetaData> getFormat(); + + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + +protected: + virtual ~AMRSource(); + +private: + sp<DataSource> mDataSource; + bool mIsWide; + + off_t mOffset; + int64_t mCurrentTimeUs; + bool mStarted; + MediaBufferGroup *mGroup; + + AMRSource(const AMRSource &); + AMRSource &operator=(const AMRSource &); +}; + +//////////////////////////////////////////////////////////////////////////////// + +AMRExtractor::AMRExtractor(const sp<DataSource> &source) + : mDataSource(source), + mInitCheck(NO_INIT) { + String8 mimeType; + float confidence; + if (SniffAMR(mDataSource, &mimeType, &confidence)) { + mInitCheck = OK; + mIsWide = (mimeType == MEDIA_MIMETYPE_AUDIO_AMR_WB); + } +} + +AMRExtractor::~AMRExtractor() { +} + +size_t AMRExtractor::countTracks() { + return mInitCheck == OK ? 1 : 0; +} + +sp<MediaSource> AMRExtractor::getTrack(size_t index) { + if (mInitCheck != OK || index != 0) { + return NULL; + } + + return new AMRSource(mDataSource, mIsWide); +} + +sp<MetaData> AMRExtractor::getTrackMetaData(size_t index) { + if (mInitCheck != OK || index != 0) { + return NULL; + } + + return makeAMRFormat(mIsWide); +} + +// static +sp<MetaData> AMRExtractor::makeAMRFormat(bool isWide) { + sp<MetaData> meta = new MetaData; + meta->setCString( + kKeyMIMEType, isWide ? MEDIA_MIMETYPE_AUDIO_AMR_WB + : MEDIA_MIMETYPE_AUDIO_AMR_NB); + + meta->setInt32(kKeyChannelCount, 1); + meta->setInt32(kKeySampleRate, isWide ? 16000 : 8000); + + return meta; +} + +//////////////////////////////////////////////////////////////////////////////// + +AMRSource::AMRSource(const sp<DataSource> &source, bool isWide) + : mDataSource(source), + mIsWide(isWide), + mOffset(mIsWide ? 9 : 6), + mCurrentTimeUs(0), + mStarted(false), + mGroup(NULL) { +} + +AMRSource::~AMRSource() { + if (mStarted) { + stop(); + } +} + +status_t AMRSource::start(MetaData *params) { + CHECK(!mStarted); + + mOffset = mIsWide ? 9 : 6; + mCurrentTimeUs = 0; + mGroup = new MediaBufferGroup; + mGroup->add_buffer(new MediaBuffer(128)); + mStarted = true; + + return OK; +} + +status_t AMRSource::stop() { + CHECK(mStarted); + + delete mGroup; + mGroup = NULL; + + mStarted = false; + return OK; +} + +sp<MetaData> AMRSource::getFormat() { + return AMRExtractor::makeAMRFormat(mIsWide); +} + +status_t AMRSource::read( + MediaBuffer **out, const ReadOptions *options) { + *out = NULL; + + uint8_t header; + ssize_t n = mDataSource->read_at(mOffset, &header, 1); + + if (n < 1) { + return ERROR_IO; + } + + MediaBuffer *buffer; + status_t err = mGroup->acquire_buffer(&buffer); + if (err != OK) { + return err; + } + + if (header & 0x83) { + // Padding bits must be 0. + + return ERROR_MALFORMED; + } + + unsigned FT = (header >> 3) & 0x0f; + + if (FT > 8 || (!mIsWide && FT > 7)) { + return ERROR_MALFORMED; + } + + static const size_t kFrameSizeNB[8] = { + 95, 103, 118, 134, 148, 159, 204, 244 + }; + static const size_t kFrameSizeWB[9] = { + 132, 177, 253, 285, 317, 365, 397, 461, 477 + }; + + size_t frameSize = mIsWide ? kFrameSizeWB[FT] : kFrameSizeNB[FT]; + + // Round up bits to bytes and add 1 for the header byte. + frameSize = (frameSize + 7) / 8 + 1; + + n = mDataSource->read_at(mOffset, buffer->data(), frameSize); + + if (n != (ssize_t)frameSize) { + buffer->release(); + buffer = NULL; + + return ERROR_IO; + } + + buffer->set_range(0, frameSize); + buffer->meta_data()->setInt32( + kKeyTimeUnits, (mCurrentTimeUs + 500) / 1000); + buffer->meta_data()->setInt32( + kKeyTimeScale, 1000); + + mOffset += frameSize; + mCurrentTimeUs += 20000; // Each frame is 20ms + + *out = buffer; + + return OK; +} + +//////////////////////////////////////////////////////////////////////////////// + +bool SniffAMR( + const sp<DataSource> &source, String8 *mimeType, float *confidence) { + char header[9]; + + if (source->read_at(0, header, sizeof(header)) != sizeof(header)) { + return false; + } + + if (!memcmp(header, "#!AMR\n", 6)) { + *mimeType = MEDIA_MIMETYPE_AUDIO_AMR_NB; + *confidence = 0.5; + + return true; + } else if (!memcmp(header, "#!AMR-WB\n", 9)) { + *mimeType = MEDIA_MIMETYPE_AUDIO_AMR_WB; + *confidence = 0.5; + + return true; + } + + return false; +} + +} // namespace android diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk new file mode 100644 index 000000000000..9f71dae9d36d --- /dev/null +++ b/media/libstagefright/Android.mk @@ -0,0 +1,64 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + ESDS.cpp \ + MediaBuffer.cpp \ + MediaBufferGroup.cpp \ + MediaDefs.cpp \ + MediaSource.cpp \ + MetaData.cpp \ + OMXCodec.cpp \ + Utils.cpp \ + OMXClient.cpp + +ifeq ($(BUILD_WITH_FULL_STAGEFRIGHT),true) + +LOCAL_SRC_FILES += \ + AMRExtractor.cpp \ + CachingDataSource.cpp \ + DataSource.cpp \ + FileSource.cpp \ + HTTPDataSource.cpp \ + HTTPStream.cpp \ + JPEGSource.cpp \ + MediaExtractor.cpp \ + MP3Extractor.cpp \ + MPEG4Extractor.cpp \ + MPEG4Writer.cpp \ + MediaPlayerImpl.cpp \ + MmapSource.cpp \ + SampleTable.cpp \ + ShoutcastSource.cpp \ + TimeSource.cpp \ + TimedEventQueue.cpp \ + AudioPlayer.cpp \ + stagefright_string.cpp + +endif + +LOCAL_C_INCLUDES:= \ + $(JNI_H_INCLUDE) \ + $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include \ + $(TOP)/external/opencore/android + +LOCAL_SHARED_LIBRARIES := \ + libbinder \ + libmedia \ + libutils \ + libcutils \ + libui + +ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true) + LOCAL_LDLIBS += -lpthread +endif + +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_PRELINK_MODULE:= false + +LOCAL_MODULE:= libstagefright + +include $(BUILD_SHARED_LIBRARY) + +include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp new file mode 100644 index 000000000000..538facba9c94 --- /dev/null +++ b/media/libstagefright/AudioPlayer.cpp @@ -0,0 +1,294 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "AudioPlayer" +#include <utils/Log.h> + +#include <media/AudioTrack.h> +#include <media/stagefright/AudioPlayer.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MetaData.h> + +namespace android { + +AudioPlayer::AudioPlayer(const sp<MediaPlayerBase::AudioSink> &audioSink) + : mAudioTrack(NULL), + mInputBuffer(NULL), + mSampleRate(0), + mLatencyUs(0), + mFrameSize(0), + mNumFramesPlayed(0), + mPositionTimeMediaUs(-1), + mPositionTimeRealUs(-1), + mSeeking(false), + mStarted(false), + mAudioSink(audioSink) { +} + +AudioPlayer::~AudioPlayer() { + if (mStarted) { + stop(); + } +} + +void AudioPlayer::setSource(const sp<MediaSource> &source) { + CHECK_EQ(mSource, NULL); + mSource = source; +} + +void AudioPlayer::start() { + CHECK(!mStarted); + CHECK(mSource != NULL); + + status_t err = mSource->start(); + CHECK_EQ(err, OK); + + sp<MetaData> format = mSource->getFormat(); + const char *mime; + bool success = format->findCString(kKeyMIMEType, &mime); + CHECK(success); + CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)); + + success = format->findInt32(kKeySampleRate, &mSampleRate); + CHECK(success); + + int32_t numChannels; + success = format->findInt32(kKeyChannelCount, &numChannels); + CHECK(success); + + if (mAudioSink.get() != NULL) { + status_t err = mAudioSink->open( + mSampleRate, numChannels, AudioSystem::PCM_16_BIT, + DEFAULT_AUDIOSINK_BUFFERCOUNT, + &AudioPlayer::AudioSinkCallback, this); + CHECK_EQ(err, OK); + + mLatencyUs = (int64_t)mAudioSink->latency() * 1000; + mFrameSize = mAudioSink->frameSize(); + + mAudioSink->start(); + } else { + mAudioTrack = new AudioTrack( + AudioSystem::MUSIC, mSampleRate, AudioSystem::PCM_16_BIT, + (numChannels == 2) + ? AudioSystem::CHANNEL_OUT_STEREO + : AudioSystem::CHANNEL_OUT_MONO, + 8192, 0, &AudioCallback, this, 0); + + CHECK_EQ(mAudioTrack->initCheck(), OK); + + mLatencyUs = (int64_t)mAudioTrack->latency() * 1000; + mFrameSize = mAudioTrack->frameSize(); + + mAudioTrack->start(); + } + + mStarted = true; +} + +void AudioPlayer::pause() { + CHECK(mStarted); + + if (mAudioSink.get() != NULL) { + mAudioSink->pause(); + } else { + mAudioTrack->stop(); + } +} + +void AudioPlayer::resume() { + CHECK(mStarted); + + if (mAudioSink.get() != NULL) { + mAudioSink->start(); + } else { + mAudioTrack->start(); + } +} + +void AudioPlayer::stop() { + CHECK(mStarted); + + if (mAudioSink.get() != NULL) { + mAudioSink->stop(); + } else { + mAudioTrack->stop(); + + delete mAudioTrack; + mAudioTrack = NULL; + } + + // Make sure to release any buffer we hold onto so that the + // source is able to stop(). + if (mInputBuffer != NULL) { + LOGV("AudioPlayer releasing input buffer."); + + mInputBuffer->release(); + mInputBuffer = NULL; + } + + mSource->stop(); + + mNumFramesPlayed = 0; + mPositionTimeMediaUs = -1; + mPositionTimeRealUs = -1; + mSeeking = false; + mStarted = false; +} + +// static +void AudioPlayer::AudioCallback(int event, void *user, void *info) { + static_cast<AudioPlayer *>(user)->AudioCallback(event, info); +} + +// static +void AudioPlayer::AudioSinkCallback( + MediaPlayerBase::AudioSink *audioSink, + void *buffer, size_t size, void *cookie) { + AudioPlayer *me = (AudioPlayer *)cookie; + + me->fillBuffer(buffer, size); +} + +void AudioPlayer::AudioCallback(int event, void *info) { + if (event != AudioTrack::EVENT_MORE_DATA) { + return; + } + + AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info; + fillBuffer(buffer->raw, buffer->size); +} + +void AudioPlayer::fillBuffer(void *data, size_t size) { + if (mNumFramesPlayed == 0) { + LOGV("AudioCallback"); + } + + size_t size_done = 0; + size_t size_remaining = size; + while (size_remaining > 0) { + MediaSource::ReadOptions options; + + { + Mutex::Autolock autoLock(mLock); + + if (mSeeking) { + options.setSeekTo(mSeekTimeUs); + + if (mInputBuffer != NULL) { + mInputBuffer->release(); + mInputBuffer = NULL; + } + mSeeking = false; + } + } + + if (mInputBuffer == NULL) { + status_t err = mSource->read(&mInputBuffer, &options); + + CHECK((err == OK && mInputBuffer != NULL) + || (err != OK && mInputBuffer == NULL)); + + if (err != OK) { + memset((char *)data + size_done, 0, size_remaining); + break; + } + + int32_t units, scale; + bool success = + mInputBuffer->meta_data()->findInt32(kKeyTimeUnits, &units); + success = success && + mInputBuffer->meta_data()->findInt32(kKeyTimeScale, &scale); + CHECK(success); + + Mutex::Autolock autoLock(mLock); + mPositionTimeMediaUs = (int64_t)units * 1000000 / scale; + + mPositionTimeRealUs = + ((mNumFramesPlayed + size_done / mFrameSize) * 1000000) + / mSampleRate; + + LOGV("buffer->size() = %d, " + "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f", + mInputBuffer->range_length(), + mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6); + } + + if (mInputBuffer->range_length() == 0) { + mInputBuffer->release(); + mInputBuffer = NULL; + + continue; + } + + size_t copy = size_remaining; + if (copy > mInputBuffer->range_length()) { + copy = mInputBuffer->range_length(); + } + + memcpy((char *)data + size_done, + (const char *)mInputBuffer->data() + mInputBuffer->range_offset(), + copy); + + mInputBuffer->set_range(mInputBuffer->range_offset() + copy, + mInputBuffer->range_length() - copy); + + size_done += copy; + size_remaining -= copy; + } + + Mutex::Autolock autoLock(mLock); + mNumFramesPlayed += size / mFrameSize; +} + +int64_t AudioPlayer::getRealTimeUs() { + Mutex::Autolock autoLock(mLock); + return getRealTimeUsLocked(); +} + +int64_t AudioPlayer::getRealTimeUsLocked() const { + return -mLatencyUs + (mNumFramesPlayed * 1000000) / mSampleRate; +} + +int64_t AudioPlayer::getMediaTimeUs() { + Mutex::Autolock autoLock(mLock); + + return mPositionTimeMediaUs + (getRealTimeUsLocked() - mPositionTimeRealUs); +} + +bool AudioPlayer::getMediaTimeMapping( + int64_t *realtime_us, int64_t *mediatime_us) { + Mutex::Autolock autoLock(mLock); + + *realtime_us = mPositionTimeRealUs; + *mediatime_us = mPositionTimeMediaUs; + + return mPositionTimeRealUs != -1 || mPositionTimeMediaUs != -1; +} + +status_t AudioPlayer::seekTo(int64_t time_us) { + Mutex::Autolock autoLock(mLock); + + mSeeking = true; + mSeekTimeUs = time_us; + + return OK; +} + +} diff --git a/media/libstagefright/CachingDataSource.cpp b/media/libstagefright/CachingDataSource.cpp new file mode 100644 index 000000000000..fd005764a6ca --- /dev/null +++ b/media/libstagefright/CachingDataSource.cpp @@ -0,0 +1,157 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <stdlib.h> +#include <string.h> + +#include <media/stagefright/CachingDataSource.h> +#include <media/stagefright/MediaDebug.h> + +namespace android { + +CachingDataSource::CachingDataSource( + const sp<DataSource> &source, size_t pageSize, int numPages) + : mSource(source), + mData(malloc(pageSize * numPages)), + mPageSize(pageSize), + mFirst(NULL), + mLast(NULL) { + for (int i = 0; i < numPages; ++i) { + Page *page = new Page; + page->mPrev = mLast; + page->mNext = NULL; + + if (mLast == NULL) { + mFirst = page; + } else { + mLast->mNext = page; + } + + mLast = page; + + page->mOffset = -1; + page->mLength = 0; + page->mData = (char *)mData + mPageSize * i; + } +} + +CachingDataSource::~CachingDataSource() { + Page *page = mFirst; + while (page != NULL) { + Page *next = page->mNext; + delete page; + page = next; + } + mFirst = mLast = NULL; + + free(mData); + mData = NULL; +} + +status_t CachingDataSource::InitCheck() const { + return OK; +} + +ssize_t CachingDataSource::read_at(off_t offset, void *data, size_t size) { + Mutex::Autolock autoLock(mLock); + + size_t total = 0; + while (size > 0) { + Page *page = mFirst; + while (page != NULL) { + if (page->mOffset >= 0 && offset >= page->mOffset + && offset < page->mOffset + (off_t)page->mLength) { + break; + } + page = page->mNext; + } + + if (page == NULL) { + page = allocate_page(); + page->mOffset = offset - offset % mPageSize; + ssize_t n = mSource->read_at(page->mOffset, page->mData, mPageSize); + if (n < 0) { + page->mLength = 0; + } else { + page->mLength = (size_t)n; + } + mFirst->mPrev = page; + page->mNext = mFirst; + page->mPrev = NULL; + mFirst = page; + + if (n < 0) { + return n; + } + + if (offset >= page->mOffset + (off_t)page->mLength) { + break; + } + } else { + // Move "page" to the front in LRU order. + if (page->mNext != NULL) { + page->mNext->mPrev = page->mPrev; + } else { + mLast = page->mPrev; + } + + if (page->mPrev != NULL) { + page->mPrev->mNext = page->mNext; + } else { + mFirst = page->mNext; + } + + mFirst->mPrev = page; + page->mNext = mFirst; + page->mPrev = NULL; + mFirst = page; + } + + size_t copy = page->mLength - (offset - page->mOffset); + if (copy > size) { + copy = size; + } + memcpy(data,(const char *)page->mData + (offset - page->mOffset), + copy); + + total += copy; + + if (page->mLength < mPageSize) { + // This was the final page. There is no more data beyond it. + break; + } + + offset += copy; + size -= copy; + data = (char *)data + copy; + } + + return total; +} + +CachingDataSource::Page *CachingDataSource::allocate_page() { + // The last page is the least recently used, i.e. oldest. + + Page *page = mLast; + + page->mPrev->mNext = NULL; + mLast = page->mPrev; + page->mPrev = NULL; + + return page; +} + +} // namespace android diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp new file mode 100644 index 000000000000..596ab67d91c3 --- /dev/null +++ b/media/libstagefright/CameraSource.cpp @@ -0,0 +1,224 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <sys/time.h> + +#include <OMX_Component.h> + +#include <binder/IServiceManager.h> +#include <media/stagefright/CameraSource.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MetaData.h> +#include <ui/ICameraClient.h> +#include <ui/ICameraService.h> +#include <ui/Overlay.h> +#include <utils/String16.h> + +namespace android { + +class CameraBuffer : public MediaBuffer { +public: + CameraBuffer(const sp<IMemory> &frame) + : MediaBuffer(frame->pointer(), frame->size()), + mFrame(frame) { + } + + sp<IMemory> releaseFrame() { + sp<IMemory> frame = mFrame; + mFrame.clear(); + return frame; + } + +private: + sp<IMemory> mFrame; +}; + +class CameraSourceClient : public BnCameraClient { +public: + CameraSourceClient() + : mSource(NULL) { + } + + virtual void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) { + CHECK(mSource != NULL); + mSource->notifyCallback(msgType, ext1, ext2); + } + + virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) { + CHECK(mSource != NULL); + mSource->dataCallback(msgType, data); + } + + void setCameraSource(CameraSource *source) { + mSource = source; + } + +private: + CameraSource *mSource; +}; + +class DummySurface : public BnSurface { +public: + DummySurface() {} + + virtual status_t registerBuffers(const BufferHeap &buffers) { + return OK; + } + + virtual void postBuffer(ssize_t offset) { + } + + virtual void unregisterBuffers() { + } + + virtual sp<OverlayRef> createOverlay( + uint32_t w, uint32_t h, int32_t format) { + return NULL; + } +}; + +// static +CameraSource *CameraSource::Create() { + sp<IServiceManager> sm = defaultServiceManager(); + + sp<ICameraService> service = + interface_cast<ICameraService>( + sm->getService(String16("media.camera"))); + + sp<CameraSourceClient> client = new CameraSourceClient; + sp<ICamera> camera = service->connect(client); + + CameraSource *source = new CameraSource(camera, client); + client->setCameraSource(source); + + return source; +} + +CameraSource::CameraSource( + const sp<ICamera> &camera, const sp<ICameraClient> &client) + : mCamera(camera), + mCameraClient(client), + mNumFrames(0), + mStarted(false) { + printf("params: \"%s\"\n", mCamera->getParameters().string()); +} + +CameraSource::~CameraSource() { + if (mStarted) { + stop(); + } + + mCamera->disconnect(); +} + +status_t CameraSource::start(MetaData *) { + CHECK(!mStarted); + + status_t err = mCamera->lock(); + CHECK_EQ(err, OK); + + err = mCamera->setPreviewDisplay(new DummySurface); + CHECK_EQ(err, OK); + mCamera->setPreviewCallbackFlag(1); + mCamera->startPreview(); + CHECK_EQ(err, OK); + + mStarted = true; + + return OK; +} + +status_t CameraSource::stop() { + CHECK(mStarted); + + mCamera->stopPreview(); + mCamera->unlock(); + + mStarted = false; + + return OK; +} + +sp<MetaData> CameraSource::getFormat() { + sp<MetaData> meta = new MetaData; + meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); + meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420SemiPlanar); + meta->setInt32(kKeyWidth, 480); + meta->setInt32(kKeyHeight, 320); + + return meta; +} + +status_t CameraSource::read( + MediaBuffer **buffer, const ReadOptions *options) { + CHECK(mStarted); + + *buffer = NULL; + + int64_t seekTimeUs; + if (options && options->getSeekTo(&seekTimeUs)) { + return ERROR_UNSUPPORTED; + } + + sp<IMemory> frame; + + { + Mutex::Autolock autoLock(mLock); + while (mFrames.empty()) { + mFrameAvailableCondition.wait(mLock); + } + + frame = *mFrames.begin(); + mFrames.erase(mFrames.begin()); + } + + int count = mNumFrames++; + + *buffer = new CameraBuffer(frame); + + (*buffer)->meta_data()->clear(); + (*buffer)->meta_data()->setInt32(kKeyTimeScale, 15); + (*buffer)->meta_data()->setInt32(kKeyTimeUnits, count); + + (*buffer)->add_ref(); + (*buffer)->setObserver(this); + + return OK; +} + +void CameraSource::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) { + printf("notifyCallback %d, %d, %d\n", msgType, ext1, ext2); +} + +void CameraSource::dataCallback(int32_t msgType, const sp<IMemory> &data) { + Mutex::Autolock autoLock(mLock); + + mFrames.push_back(data); + mFrameAvailableCondition.signal(); +} + +void CameraSource::signalBufferReturned(MediaBuffer *_buffer) { + CameraBuffer *buffer = static_cast<CameraBuffer *>(_buffer); + + mCamera->releaseRecordingFrame(buffer->releaseFrame()); + + buffer->setObserver(NULL); + buffer->release(); + buffer = NULL; +} + +} // namespace android diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp new file mode 100644 index 000000000000..daac539bfa74 --- /dev/null +++ b/media/libstagefright/DataSource.cpp @@ -0,0 +1,91 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/stagefright/AMRExtractor.h> +#include <media/stagefright/DataSource.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MP3Extractor.h> +#include <media/stagefright/MPEG4Extractor.h> +#include <utils/String8.h> + +namespace android { + +bool DataSource::getUInt16(off_t offset, uint16_t *x) { + *x = 0; + + uint8_t byte[2]; + if (read_at(offset, byte, 2) != 2) { + return false; + } + + *x = (byte[0] << 8) | byte[1]; + + return true; +} + +status_t DataSource::getSize(off_t *size) { + *size = 0; + + return ERROR_UNSUPPORTED; +} + +//////////////////////////////////////////////////////////////////////////////// + +Mutex DataSource::gSnifferMutex; +List<DataSource::SnifferFunc> DataSource::gSniffers; + +bool DataSource::sniff(String8 *mimeType, float *confidence) { + *mimeType = ""; + *confidence = 0.0f; + + Mutex::Autolock autoLock(gSnifferMutex); + for (List<SnifferFunc>::iterator it = gSniffers.begin(); + it != gSniffers.end(); ++it) { + String8 newMimeType; + float newConfidence; + if ((*it)(this, &newMimeType, &newConfidence)) { + if (newConfidence > *confidence) { + *mimeType = newMimeType; + *confidence = newConfidence; + } + } + } + + return *confidence > 0.0; +} + +// static +void DataSource::RegisterSniffer(SnifferFunc func) { + Mutex::Autolock autoLock(gSnifferMutex); + + for (List<SnifferFunc>::iterator it = gSniffers.begin(); + it != gSniffers.end(); ++it) { + if (*it == func) { + return; + } + } + + gSniffers.push_back(func); +} + +// static +void DataSource::RegisterDefaultSniffers() { + RegisterSniffer(SniffMP3); + RegisterSniffer(SniffMPEG4); + RegisterSniffer(SniffAMR); +} + +} // namespace android diff --git a/media/libstagefright/ESDS.cpp b/media/libstagefright/ESDS.cpp new file mode 100644 index 000000000000..53b92a07df40 --- /dev/null +++ b/media/libstagefright/ESDS.cpp @@ -0,0 +1,196 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/stagefright/ESDS.h> + +#include <string.h> + +namespace android { + +ESDS::ESDS(const void *data, size_t size) + : mData(new uint8_t[size]), + mSize(size), + mInitCheck(NO_INIT), + mDecoderSpecificOffset(0), + mDecoderSpecificLength(0) { + memcpy(mData, data, size); + + mInitCheck = parse(); +} + +ESDS::~ESDS() { + delete[] mData; + mData = NULL; +} + +status_t ESDS::InitCheck() const { + return mInitCheck; +} + +status_t ESDS::getCodecSpecificInfo(const void **data, size_t *size) const { + if (mInitCheck != OK) { + return mInitCheck; + } + + *data = &mData[mDecoderSpecificOffset]; + *size = mDecoderSpecificLength; + + return OK; +} + +status_t ESDS::skipDescriptorHeader( + size_t offset, size_t size, + uint8_t *tag, size_t *data_offset, size_t *data_size) const { + if (size == 0) { + return ERROR_MALFORMED; + } + + *tag = mData[offset++]; + --size; + + *data_size = 0; + bool more; + do { + if (size == 0) { + return ERROR_MALFORMED; + } + + uint8_t x = mData[offset++]; + --size; + + *data_size = (*data_size << 7) | (x & 0x7f); + more = (x & 0x80) != 0; + } + while (more); + + if (*data_size > size) { + return ERROR_MALFORMED; + } + + *data_offset = offset; + + return OK; +} + +status_t ESDS::parse() { + uint8_t tag; + size_t data_offset; + size_t data_size; + status_t err = + skipDescriptorHeader(0, mSize, &tag, &data_offset, &data_size); + + if (err != OK) { + return err; + } + + if (tag != kTag_ESDescriptor) { + return ERROR_MALFORMED; + } + + return parseESDescriptor(data_offset, data_size); +} + +status_t ESDS::parseESDescriptor(size_t offset, size_t size) { + if (size < 3) { + return ERROR_MALFORMED; + } + + offset += 2; // skip ES_ID + size -= 2; + + unsigned streamDependenceFlag = mData[offset] & 0x80; + unsigned URL_Flag = mData[offset] & 0x40; + unsigned OCRstreamFlag = mData[offset] & 0x20; + + ++offset; + --size; + + if (streamDependenceFlag) { + offset += 2; + size -= 2; + } + + if (URL_Flag) { + if (offset >= size) { + return ERROR_MALFORMED; + } + unsigned URLlength = mData[offset]; + offset += URLlength + 1; + size -= URLlength + 1; + } + + if (OCRstreamFlag) { + offset += 2; + size -= 2; + } + + if (offset >= size) { + return ERROR_MALFORMED; + } + + uint8_t tag; + size_t sub_offset, sub_size; + status_t err = skipDescriptorHeader( + offset, size, &tag, &sub_offset, &sub_size); + + if (err != OK) { + return err; + } + + if (tag != kTag_DecoderConfigDescriptor) { + return ERROR_MALFORMED; + } + + err = parseDecoderConfigDescriptor(sub_offset, sub_size); + + return err; +} + +status_t ESDS::parseDecoderConfigDescriptor(size_t offset, size_t size) { + if (size < 13) { + return ERROR_MALFORMED; + } + + offset += 13; + size -= 13; + + if (size == 0) { + mDecoderSpecificOffset = 0; + mDecoderSpecificLength = 0; + return OK; + } + + uint8_t tag; + size_t sub_offset, sub_size; + status_t err = skipDescriptorHeader( + offset, size, &tag, &sub_offset, &sub_size); + + if (err != OK) { + return err; + } + + if (tag != kTag_DecoderSpecificInfo) { + return ERROR_MALFORMED; + } + + mDecoderSpecificOffset = sub_offset; + mDecoderSpecificLength = sub_size; + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp new file mode 100644 index 000000000000..f6b90b25ad5e --- /dev/null +++ b/media/libstagefright/FileSource.cpp @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/stagefright/FileSource.h> +#include <media/stagefright/MediaDebug.h> + +namespace android { + +FileSource::FileSource(const char *filename) + : mFile(fopen(filename, "rb")) { +} + +FileSource::~FileSource() { + if (mFile != NULL) { + fclose(mFile); + mFile = NULL; + } +} + +status_t FileSource::InitCheck() const { + return mFile != NULL ? OK : NO_INIT; +} + +ssize_t FileSource::read_at(off_t offset, void *data, size_t size) { + Mutex::Autolock autoLock(mLock); + + int err = fseeko(mFile, offset, SEEK_SET); + CHECK(err != -1); + + ssize_t result = fread(data, 1, size, mFile); + + return result; +} + +} // namespace android diff --git a/media/libstagefright/HTTPDataSource.cpp b/media/libstagefright/HTTPDataSource.cpp new file mode 100644 index 000000000000..4dedebd43360 --- /dev/null +++ b/media/libstagefright/HTTPDataSource.cpp @@ -0,0 +1,172 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <stdlib.h> + +#include <media/stagefright/HTTPDataSource.h> +#include <media/stagefright/HTTPStream.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/stagefright_string.h> + +namespace android { + +HTTPDataSource::HTTPDataSource(const char *uri) + : mHost(NULL), + mPort(0), + mPath(NULL), + mBuffer(malloc(kBufferSize)), + mBufferLength(0), + mBufferOffset(0) { + CHECK(!strncasecmp("http://", uri, 7)); + + string host; + string path; + int port; + + char *slash = strchr(uri + 7, '/'); + if (slash == NULL) { + host = uri + 7; + path = "/"; + } else { + host = string(uri + 7, slash - (uri + 7)); + path = slash; + } + + char *colon = strchr(host.c_str(), ':'); + if (colon == NULL) { + port = 80; + } else { + char *end; + long tmp = strtol(colon + 1, &end, 10); + CHECK(end > colon + 1); + CHECK(tmp > 0 && tmp < 65536); + port = tmp; + + host = string(host, 0, colon - host.c_str()); + } + + LOGI("Connecting to host '%s', port %d, path '%s'", + host.c_str(), port, path.c_str()); + + mHost = strdup(host.c_str()); + mPort = port; + mPath = strdup(path.c_str()); + + status_t err = mHttp.connect(mHost, mPort); + CHECK_EQ(err, OK); +} + +HTTPDataSource::HTTPDataSource(const char *host, int port, const char *path) + : mHost(strdup(host)), + mPort(port), + mPath(strdup(path)), + mBuffer(malloc(kBufferSize)), + mBufferLength(0), + mBufferOffset(0) { + status_t err = mHttp.connect(mHost, mPort); + CHECK_EQ(err, OK); +} + +HTTPDataSource::~HTTPDataSource() { + mHttp.disconnect(); + + free(mBuffer); + mBuffer = NULL; + + free(mPath); + mPath = NULL; +} + +ssize_t HTTPDataSource::read_at(off_t offset, void *data, size_t size) { + if (offset >= mBufferOffset && offset < mBufferOffset + mBufferLength) { + size_t num_bytes_available = mBufferLength - (offset - mBufferOffset); + + size_t copy = num_bytes_available; + if (copy > size) { + copy = size; + } + + memcpy(data, (const char *)mBuffer + (offset - mBufferOffset), copy); + + return copy; + } + + mBufferOffset = offset; + mBufferLength = 0; + + char host[128]; + sprintf(host, "Host: %s\r\n", mHost); + + char range[128]; + sprintf(range, "Range: bytes=%ld-%ld\r\n\r\n", + mBufferOffset, mBufferOffset + kBufferSize - 1); + + int http_status; + + status_t err; + int attempt = 1; + for (;;) { + if ((err = mHttp.send("GET ")) != OK + || (err = mHttp.send(mPath)) != OK + || (err = mHttp.send(" HTTP/1.1\r\n")) != OK + || (err = mHttp.send(host)) != OK + || (err = mHttp.send(range)) != OK + || (err = mHttp.send("\r\n")) != OK + || (err = mHttp.receive_header(&http_status)) != OK) { + + if (attempt == 3) { + return err; + } + + mHttp.connect(mHost, mPort); + ++attempt; + } else { + break; + } + } + + if ((http_status / 100) != 2) { + return UNKNOWN_ERROR; + } + + string value; + if (!mHttp.find_header_value("Content-Length", &value)) { + return UNKNOWN_ERROR; + } + + char *end; + unsigned long contentLength = strtoul(value.c_str(), &end, 10); + + ssize_t num_bytes_received = mHttp.receive(mBuffer, contentLength); + + if (num_bytes_received <= 0) { + return num_bytes_received; + } + + mBufferLength = (size_t)num_bytes_received; + + size_t copy = mBufferLength; + if (copy > size) { + copy = size; + } + + memcpy(data, mBuffer, copy); + + return copy; +} + +} // namespace android + diff --git a/media/libstagefright/HTTPStream.cpp b/media/libstagefright/HTTPStream.cpp new file mode 100644 index 000000000000..6af7df9559aa --- /dev/null +++ b/media/libstagefright/HTTPStream.cpp @@ -0,0 +1,285 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <sys/socket.h> + +#include <arpa/inet.h> +#include <ctype.h> +#include <errno.h> +#include <netdb.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <unistd.h> + +#include <media/stagefright/HTTPStream.h> +#include <media/stagefright/MediaDebug.h> + +namespace android { + +// static +const char *HTTPStream::kStatusKey = ":status:"; + +HTTPStream::HTTPStream() + : mState(READY), + mSocket(-1) { +} + +HTTPStream::~HTTPStream() { + disconnect(); +} + +status_t HTTPStream::connect(const char *server, int port) { + status_t err = OK; + + if (mState == CONNECTED) { + return ERROR_ALREADY_CONNECTED; + } + + CHECK_EQ(mSocket, -1); + mSocket = socket(AF_INET, SOCK_STREAM, 0); + + if (mSocket < 0) { + return UNKNOWN_ERROR; + } + + struct hostent *ent = gethostbyname(server); + if (ent == NULL) { + err = ERROR_UNKNOWN_HOST; + goto exit1; + } + + struct sockaddr_in addr; + addr.sin_family = AF_INET; + addr.sin_port = htons(port); + addr.sin_addr.s_addr = *(in_addr_t *)ent->h_addr; + memset(addr.sin_zero, 0, sizeof(addr.sin_zero)); + + if (::connect(mSocket, (const struct sockaddr *)&addr, sizeof(addr)) < 0) { + err = ERROR_CANNOT_CONNECT; + goto exit1; + } + + mState = CONNECTED; + + return OK; + +exit1: + close(mSocket); + mSocket = -1; + + return err; +} + +status_t HTTPStream::disconnect() { + if (mState != CONNECTED) { + return ERROR_NOT_CONNECTED; + } + + CHECK(mSocket >= 0); + close(mSocket); + mSocket = -1; + + mState = READY; + + return OK; +} + +status_t HTTPStream::send(const char *data, size_t size) { + if (mState != CONNECTED) { + return ERROR_NOT_CONNECTED; + } + + while (size > 0) { + ssize_t n = ::send(mSocket, data, size, 0); + + if (n < 0) { + if (errno == EINTR) { + continue; + } + + disconnect(); + + return ERROR_IO; + } else if (n == 0) { + disconnect(); + + return ERROR_CONNECTION_LOST; + } + + size -= (size_t)n; + data += (size_t)n; + } + + return OK; +} + +status_t HTTPStream::send(const char *data) { + return send(data, strlen(data)); +} + +status_t HTTPStream::receive_line(char *line, size_t size) { + if (mState != CONNECTED) { + return ERROR_NOT_CONNECTED; + } + + bool saw_CR = false; + size_t length = 0; + + for (;;) { + char c; + ssize_t n = recv(mSocket, &c, 1, 0); + if (n < 0) { + if (errno == EINTR) { + continue; + } + + disconnect(); + + return ERROR_IO; + } else if (n == 0) { + disconnect(); + + return ERROR_CONNECTION_LOST; + } + + if (saw_CR && c == '\n') { + // We have a complete line. + + line[length - 1] = '\0'; + return OK; + } + + saw_CR = (c == '\r'); + + CHECK(length + 1 < size); + line[length++] = c; + } +} + +status_t HTTPStream::receive_header(int *http_status) { + *http_status = -1; + mHeaders.clear(); + + char line[1024]; + status_t err = receive_line(line, sizeof(line)); + if (err != OK) { + return err; + } + + mHeaders.add(string(kStatusKey), string(line)); + + char *spacePos = strchr(line, ' '); + if (spacePos == NULL) { + // Malformed response? + return UNKNOWN_ERROR; + } + + char *status_start = spacePos + 1; + char *status_end = status_start; + while (isdigit(*status_end)) { + ++status_end; + } + + if (status_end == status_start) { + // Malformed response, status missing? + return UNKNOWN_ERROR; + } + + memmove(line, status_start, status_end - status_start); + line[status_end - status_start] = '\0'; + + long tmp = strtol(line, NULL, 10); + if (tmp < 0 || tmp > 999) { + return UNKNOWN_ERROR; + } + + *http_status = (int)tmp; + + for (;;) { + err = receive_line(line, sizeof(line)); + if (err != OK) { + return err; + } + + if (*line == '\0') { + // Empty line signals the end of the header. + break; + } + + // puts(line); + + char *colonPos = strchr(line, ':'); + if (colonPos == NULL) { + mHeaders.add(string(line), string()); + } else { + char *end_of_key = colonPos; + while (end_of_key > line && isspace(end_of_key[-1])) { + --end_of_key; + } + + char *start_of_value = colonPos + 1; + while (isspace(*start_of_value)) { + ++start_of_value; + } + + *end_of_key = '\0'; + + mHeaders.add(string(line), string(start_of_value)); + } + } + + return OK; +} + +ssize_t HTTPStream::receive(void *data, size_t size) { + size_t total = 0; + while (total < size) { + ssize_t n = recv(mSocket, (char *)data + total, size - total, 0); + + if (n < 0) { + if (errno == EINTR) { + continue; + } + + disconnect(); + return ERROR_IO; + } else if (n == 0) { + disconnect(); + + return ERROR_CONNECTION_LOST; + } + + total += (size_t)n; + } + + return (ssize_t)total; +} + +bool HTTPStream::find_header_value(const string &key, string *value) const { + ssize_t index = mHeaders.indexOfKey(key); + if (index < 0) { + value->clear(); + return false; + } + + *value = mHeaders.valueAt(index); + + return true; +} + +} // namespace android + diff --git a/media/libstagefright/JPEGSource.cpp b/media/libstagefright/JPEGSource.cpp new file mode 100644 index 000000000000..d1dfd83836b0 --- /dev/null +++ b/media/libstagefright/JPEGSource.cpp @@ -0,0 +1,233 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "JPEGSource" +#include <utils/Log.h> + +#include <media/stagefright/DataSource.h> +#include <media/stagefright/JPEGSource.h> +#include <media/stagefright/MediaBufferGroup.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MetaData.h> + +#define JPEG_SOF0 0xC0 /* nStart Of Frame N*/ +#define JPEG_SOF1 0xC1 /* N indicates which compression process*/ +#define JPEG_SOF2 0xC2 /* Only SOF0-SOF2 are now in common use*/ +#define JPEG_SOF3 0xC3 +#define JPEG_SOF5 0xC5 /* NB: codes C4 and CC are NOT SOF markers*/ +#define JPEG_SOF6 0xC6 +#define JPEG_SOF7 0xC7 +#define JPEG_SOF9 0xC9 +#define JPEG_SOF10 0xCA +#define JPEG_SOF11 0xCB +#define JPEG_SOF13 0xCD +#define JPEG_SOF14 0xCE +#define JPEG_SOF15 0xCF +#define JPEG_SOI 0xD8 /* nStart Of Image (beginning of datastream)*/ +#define JPEG_EOI 0xD9 /* End Of Image (end of datastream)*/ +#define JPEG_SOS 0xDA /* nStart Of Scan (begins compressed data)*/ +#define JPEG_JFIF 0xE0 /* Jfif marker*/ +#define JPEG_EXIF 0xE1 /* Exif marker*/ +#define JPEG_COM 0xFE /* COMment */ +#define JPEG_DQT 0xDB +#define JPEG_DHT 0xC4 +#define JPEG_DRI 0xDD + +namespace android { + +JPEGSource::JPEGSource(const sp<DataSource> &source) + : mSource(source), + mGroup(NULL), + mStarted(false), + mSize(0), + mWidth(0), + mHeight(0), + mOffset(0) { + CHECK_EQ(parseJPEG(), OK); + CHECK(mSource->getSize(&mSize) == OK); +} + +JPEGSource::~JPEGSource() { + if (mStarted) { + stop(); + } +} + +status_t JPEGSource::start(MetaData *) { + if (mStarted) { + return UNKNOWN_ERROR; + } + + mGroup = new MediaBufferGroup; + mGroup->add_buffer(new MediaBuffer(mSize)); + + mOffset = 0; + + mStarted = true; + + return OK; +} + +status_t JPEGSource::stop() { + if (!mStarted) { + return UNKNOWN_ERROR; + } + + delete mGroup; + mGroup = NULL; + + mStarted = false; + + return OK; +} + +sp<MetaData> JPEGSource::getFormat() { + sp<MetaData> meta = new MetaData; + meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_IMAGE_JPEG); + meta->setInt32(kKeyWidth, mWidth); + meta->setInt32(kKeyHeight, mHeight); + meta->setInt32(kKeyMaxInputSize, mSize); + + return meta; +} + +status_t JPEGSource::read( + MediaBuffer **out, const ReadOptions *options) { + *out = NULL; + + int64_t seekTimeUs; + if (options != NULL && options->getSeekTo(&seekTimeUs)) { + return UNKNOWN_ERROR; + } + + MediaBuffer *buffer; + mGroup->acquire_buffer(&buffer); + + ssize_t n = mSource->read_at(mOffset, buffer->data(), mSize - mOffset); + + if (n <= 0) { + buffer->release(); + buffer = NULL; + + return UNKNOWN_ERROR; + } + + buffer->set_range(0, n); + + mOffset += n; + + *out = buffer; + + return OK; +} + +status_t JPEGSource::parseJPEG() { + mWidth = 0; + mHeight = 0; + + off_t i = 0; + + uint16_t soi; + if (!mSource->getUInt16(i, &soi)) { + return ERROR_IO; + } + + i += 2; + + if (soi != 0xffd8) { + return UNKNOWN_ERROR; + } + + for (;;) { + uint8_t marker; + if (mSource->read_at(i++, &marker, 1) != 1) { + return ERROR_IO; + } + + CHECK_EQ(marker, 0xff); + + if (mSource->read_at(i++, &marker, 1) != 1) { + return ERROR_IO; + } + + CHECK(marker != 0xff); + + uint16_t chunkSize; + if (!mSource->getUInt16(i, &chunkSize)) { + return ERROR_IO; + } + + i += 2; + + if (chunkSize < 2) { + return UNKNOWN_ERROR; + } + + switch (marker) { + case JPEG_SOS: + { + return (mWidth > 0 && mHeight > 0) ? OK : UNKNOWN_ERROR; + } + + case JPEG_EOI: + { + return UNKNOWN_ERROR; + } + + case JPEG_SOF0: + case JPEG_SOF1: + case JPEG_SOF3: + case JPEG_SOF5: + case JPEG_SOF6: + case JPEG_SOF7: + case JPEG_SOF9: + case JPEG_SOF10: + case JPEG_SOF11: + case JPEG_SOF13: + case JPEG_SOF14: + case JPEG_SOF15: + { + uint16_t width, height; + if (!mSource->getUInt16(i + 1, &height) + || !mSource->getUInt16(i + 3, &width)) { + return ERROR_IO; + } + + mWidth = width; + mHeight = height; + + i += chunkSize - 2; + break; + } + + default: + { + // Skip chunk + + i += chunkSize - 2; + + break; + } + } + } + + return OK; +} + +} // namespace android diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp new file mode 100644 index 000000000000..7fd699fe22bf --- /dev/null +++ b/media/libstagefright/MP3Extractor.cpp @@ -0,0 +1,520 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MP3Extractor" +#include <utils/Log.h> + +#include <media/stagefright/DataSource.h> +#include <media/stagefright/MP3Extractor.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaBufferGroup.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/Utils.h> +#include <utils/String8.h> + +namespace android { + +static bool get_mp3_frame_size( + uint32_t header, size_t *frame_size, + int *out_sampling_rate = NULL, int *out_channels = NULL, + int *out_bitrate = NULL) { + *frame_size = 0; + + if (out_sampling_rate) { + *out_sampling_rate = 0; + } + + if (out_channels) { + *out_channels = 0; + } + + if (out_bitrate) { + *out_bitrate = 0; + } + + if ((header & 0xffe00000) != 0xffe00000) { + return false; + } + + unsigned version = (header >> 19) & 3; + + if (version == 0x01) { + return false; + } + + unsigned layer = (header >> 17) & 3; + + if (layer == 0x00) { + return false; + } + + unsigned protection = (header >> 16) & 1; + + unsigned bitrate_index = (header >> 12) & 0x0f; + + if (bitrate_index == 0 || bitrate_index == 0x0f) { + // Disallow "free" bitrate. + return false; + } + + unsigned sampling_rate_index = (header >> 10) & 3; + + if (sampling_rate_index == 3) { + return false; + } + + static const int kSamplingRateV1[] = { 44100, 48000, 32000 }; + int sampling_rate = kSamplingRateV1[sampling_rate_index]; + if (version == 2 /* V2 */) { + sampling_rate /= 2; + } else if (version == 0 /* V2.5 */) { + sampling_rate /= 4; + } + + unsigned padding = (header >> 9) & 1; + + if (layer == 3) { + // layer I + + static const int kBitrateV1[] = { + 32, 64, 96, 128, 160, 192, 224, 256, + 288, 320, 352, 384, 416, 448 + }; + + static const int kBitrateV2[] = { + 32, 48, 56, 64, 80, 96, 112, 128, + 144, 160, 176, 192, 224, 256 + }; + + int bitrate = + (version == 3 /* V1 */) + ? kBitrateV1[bitrate_index - 1] + : kBitrateV2[bitrate_index - 1]; + + if (out_bitrate) { + *out_bitrate = bitrate; + } + + *frame_size = (12000 * bitrate / sampling_rate + padding) * 4; + } else { + // layer II or III + + static const int kBitrateV1L2[] = { + 32, 48, 56, 64, 80, 96, 112, 128, + 160, 192, 224, 256, 320, 384 + }; + + static const int kBitrateV1L3[] = { + 32, 40, 48, 56, 64, 80, 96, 112, + 128, 160, 192, 224, 256, 320 + }; + + static const int kBitrateV2[] = { + 8, 16, 24, 32, 40, 48, 56, 64, + 80, 96, 112, 128, 144, 160 + }; + + int bitrate; + if (version == 3 /* V1 */) { + bitrate = (layer == 2 /* L2 */) + ? kBitrateV1L2[bitrate_index - 1] + : kBitrateV1L3[bitrate_index - 1]; + } else { + // V2 (or 2.5) + + bitrate = kBitrateV2[bitrate_index - 1]; + } + + if (out_bitrate) { + *out_bitrate = bitrate; + } + + *frame_size = 144000 * bitrate / sampling_rate + padding; + } + + if (out_sampling_rate) { + *out_sampling_rate = sampling_rate; + } + + if (out_channels) { + int channel_mode = (header >> 6) & 3; + + *out_channels = (channel_mode == 3) ? 1 : 2; + } + + return true; +} + +static bool Resync( + const sp<DataSource> &source, uint32_t match_header, + off_t *inout_pos, uint32_t *out_header) { + // Everything must match except for + // protection, bitrate, padding, private bits and mode extension. + const uint32_t kMask = 0xfffe0ccf; + + const size_t kMaxFrameSize = 4096; + uint8_t *buffer = new uint8_t[kMaxFrameSize]; + + off_t pos = *inout_pos - kMaxFrameSize; + size_t buffer_offset = kMaxFrameSize; + size_t buffer_length = kMaxFrameSize; + bool valid = false; + do { + if (buffer_offset + 3 >= buffer_length) { + if (buffer_length < kMaxFrameSize) { + break; + } + + pos += buffer_offset; + + if (pos >= *inout_pos + 128 * 1024) { + // Don't scan forever. + LOGV("giving up at offset %ld", pos); + break; + } + + memmove(buffer, &buffer[buffer_offset], buffer_length - buffer_offset); + buffer_length = buffer_length - buffer_offset; + buffer_offset = 0; + + ssize_t n = source->read_at( + pos, &buffer[buffer_length], kMaxFrameSize - buffer_length); + + if (n <= 0) { + break; + } + + buffer_length += (size_t)n; + + continue; + } + + uint32_t header = U32_AT(&buffer[buffer_offset]); + + if (match_header != 0 && (header & kMask) != (match_header & kMask)) { + ++buffer_offset; + continue; + } + + size_t frame_size; + int sample_rate, num_channels, bitrate; + if (!get_mp3_frame_size(header, &frame_size, + &sample_rate, &num_channels, &bitrate)) { + ++buffer_offset; + continue; + } + + LOGV("found possible 1st frame at %ld", pos + buffer_offset); + + // We found what looks like a valid frame, + // now find its successors. + + off_t test_pos = pos + buffer_offset + frame_size; + + valid = true; + for (int j = 0; j < 3; ++j) { + uint8_t tmp[4]; + if (source->read_at(test_pos, tmp, 4) < 4) { + valid = false; + break; + } + + uint32_t test_header = U32_AT(tmp); + + LOGV("subsequent header is %08x", test_header); + + if ((test_header & kMask) != (header & kMask)) { + valid = false; + break; + } + + size_t test_frame_size; + if (!get_mp3_frame_size(test_header, &test_frame_size)) { + valid = false; + break; + } + + LOGV("found subsequent frame #%d at %ld", j + 2, test_pos); + + test_pos += test_frame_size; + } + + if (valid) { + *inout_pos = pos + buffer_offset; + + if (out_header != NULL) { + *out_header = header; + } + } else { + LOGV("no dice, no valid sequence of frames found."); + } + + ++buffer_offset; + + } while (!valid); + + delete[] buffer; + buffer = NULL; + + return valid; +} + +class MP3Source : public MediaSource { +public: + MP3Source( + const sp<MetaData> &meta, const sp<DataSource> &source, + off_t first_frame_pos, uint32_t fixed_header); + + virtual status_t start(MetaData *params = NULL); + virtual status_t stop(); + + virtual sp<MetaData> getFormat(); + + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + +protected: + virtual ~MP3Source(); + +private: + sp<MetaData> mMeta; + sp<DataSource> mDataSource; + off_t mFirstFramePos; + uint32_t mFixedHeader; + off_t mCurrentPos; + int64_t mCurrentTimeUs; + bool mStarted; + + MediaBufferGroup *mGroup; + + MP3Source(const MP3Source &); + MP3Source &operator=(const MP3Source &); +}; + +MP3Extractor::MP3Extractor(const sp<DataSource> &source) + : mDataSource(source), + mFirstFramePos(-1), + mFixedHeader(0) { + off_t pos = 0; + uint32_t header; + bool success = Resync(mDataSource, 0, &pos, &header); + CHECK(success); + + if (success) { + mFirstFramePos = pos; + mFixedHeader = header; + + size_t frame_size; + int sample_rate; + int num_channels; + int bitrate; + get_mp3_frame_size( + header, &frame_size, &sample_rate, &num_channels, &bitrate); + + mMeta = new MetaData; + + mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG); + mMeta->setInt32(kKeySampleRate, sample_rate); + mMeta->setInt32(kKeyBitRate, bitrate); + mMeta->setInt32(kKeyChannelCount, num_channels); + + off_t fileSize; + if (mDataSource->getSize(&fileSize) == OK) { + mMeta->setInt32( + kKeyDuration, + 8 * (fileSize - mFirstFramePos) / bitrate); + mMeta->setInt32(kKeyTimeScale, 1000); + } + } +} + +MP3Extractor::~MP3Extractor() { +} + +size_t MP3Extractor::countTracks() { + return (mFirstFramePos < 0) ? 0 : 1; +} + +sp<MediaSource> MP3Extractor::getTrack(size_t index) { + if (mFirstFramePos < 0 || index != 0) { + return NULL; + } + + return new MP3Source( + mMeta, mDataSource, mFirstFramePos, mFixedHeader); +} + +sp<MetaData> MP3Extractor::getTrackMetaData(size_t index) { + if (mFirstFramePos < 0 || index != 0) { + return NULL; + } + + return mMeta; +} + +//////////////////////////////////////////////////////////////////////////////// + +MP3Source::MP3Source( + const sp<MetaData> &meta, const sp<DataSource> &source, + off_t first_frame_pos, uint32_t fixed_header) + : mMeta(meta), + mDataSource(source), + mFirstFramePos(first_frame_pos), + mFixedHeader(fixed_header), + mCurrentPos(0), + mCurrentTimeUs(0), + mStarted(false), + mGroup(NULL) { +} + +MP3Source::~MP3Source() { + if (mStarted) { + stop(); + } +} + +status_t MP3Source::start(MetaData *) { + CHECK(!mStarted); + + mGroup = new MediaBufferGroup; + + const size_t kMaxFrameSize = 32768; + mGroup->add_buffer(new MediaBuffer(kMaxFrameSize)); + + mCurrentPos = mFirstFramePos; + mCurrentTimeUs = 0; + + mStarted = true; + + return OK; +} + +status_t MP3Source::stop() { + CHECK(mStarted); + + delete mGroup; + mGroup = NULL; + + mStarted = false; + + return OK; +} + +sp<MetaData> MP3Source::getFormat() { + return mMeta; +} + +status_t MP3Source::read( + MediaBuffer **out, const ReadOptions *options) { + *out = NULL; + + int64_t seekTimeUs; + if (options != NULL && options->getSeekTo(&seekTimeUs)) { + int32_t bitrate; + if (!mMeta->findInt32(kKeyBitRate, &bitrate)) { + // bitrate is in kbits/sec. + LOGI("no bitrate"); + + return ERROR_UNSUPPORTED; + } + + mCurrentTimeUs = seekTimeUs; + mCurrentPos = mFirstFramePos + seekTimeUs * bitrate / 1000000 * 125; + } + + MediaBuffer *buffer; + status_t err = mGroup->acquire_buffer(&buffer); + if (err != OK) { + return err; + } + + size_t frame_size; + for (;;) { + ssize_t n = mDataSource->read_at(mCurrentPos, buffer->data(), 4); + if (n < 4) { + buffer->release(); + buffer = NULL; + + return ERROR_END_OF_STREAM; + } + + uint32_t header = U32_AT((const uint8_t *)buffer->data()); + + if (get_mp3_frame_size(header, &frame_size)) { + break; + } + + // Lost sync. + LOGW("lost sync!\n"); + + off_t pos = mCurrentPos; + if (!Resync(mDataSource, mFixedHeader, &pos, NULL)) { + LOGE("Unable to resync. Signalling end of stream."); + + buffer->release(); + buffer = NULL; + + return ERROR_END_OF_STREAM; + } + + mCurrentPos = pos; + + // Try again with the new position. + } + + CHECK(frame_size <= buffer->size()); + + ssize_t n = mDataSource->read_at(mCurrentPos, buffer->data(), frame_size); + if (n < (ssize_t)frame_size) { + buffer->release(); + buffer = NULL; + + return ERROR_END_OF_STREAM; + } + + buffer->set_range(0, frame_size); + + buffer->meta_data()->setInt32(kKeyTimeUnits, mCurrentTimeUs / 1000); + buffer->meta_data()->setInt32(kKeyTimeScale, 1000); + + mCurrentPos += frame_size; + mCurrentTimeUs += 1152 * 1000000 / 44100; + + *out = buffer; + + return OK; +} + +bool SniffMP3( + const sp<DataSource> &source, String8 *mimeType, float *confidence) { + off_t pos = 0; + uint32_t header; + if (!Resync(source, 0, &pos, &header)) { + return false; + } + + *mimeType = MEDIA_MIMETYPE_AUDIO_MPEG; + *confidence = 0.3f; + + return true; +} + +} // namespace android diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp new file mode 100644 index 000000000000..9174d19a8b77 --- /dev/null +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -0,0 +1,994 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MPEG4Extractor" +#include <utils/Log.h> + +#include <arpa/inet.h> + +#include <ctype.h> +#include <stdint.h> +#include <stdlib.h> +#include <string.h> + +#include <media/stagefright/DataSource.h> +#include <media/stagefright/MPEG4Extractor.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaBufferGroup.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/SampleTable.h> +#include <media/stagefright/Utils.h> +#include <utils/String8.h> + +namespace android { + +class MPEG4Source : public MediaSource { +public: + // Caller retains ownership of both "dataSource" and "sampleTable". + MPEG4Source(const sp<MetaData> &format, + const sp<DataSource> &dataSource, + const sp<SampleTable> &sampleTable); + + virtual status_t start(MetaData *params = NULL); + virtual status_t stop(); + + virtual sp<MetaData> getFormat(); + + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + +protected: + virtual ~MPEG4Source(); + +private: + sp<MetaData> mFormat; + sp<DataSource> mDataSource; + int32_t mTimescale; + sp<SampleTable> mSampleTable; + uint32_t mCurrentSampleIndex; + + bool mIsAVC; + bool mStarted; + + MediaBufferGroup *mGroup; + + MediaBuffer *mBuffer; + + bool mWantsNALFragments; + + uint8_t *mSrcBuffer; + + MPEG4Source(const MPEG4Source &); + MPEG4Source &operator=(const MPEG4Source &); +}; + +static void hexdump(const void *_data, size_t size) { + const uint8_t *data = (const uint8_t *)_data; + size_t offset = 0; + while (offset < size) { + printf("0x%04x ", offset); + + size_t n = size - offset; + if (n > 16) { + n = 16; + } + + for (size_t i = 0; i < 16; ++i) { + if (i == 8) { + printf(" "); + } + + if (offset + i < size) { + printf("%02x ", data[offset + i]); + } else { + printf(" "); + } + } + + printf(" "); + + for (size_t i = 0; i < n; ++i) { + if (isprint(data[offset + i])) { + printf("%c", data[offset + i]); + } else { + printf("."); + } + } + + printf("\n"); + + offset += 16; + } +} + +static const char *FourCC2MIME(uint32_t fourcc) { + switch (fourcc) { + case FOURCC('m', 'p', '4', 'a'): + return MEDIA_MIMETYPE_AUDIO_AAC; + + case FOURCC('s', 'a', 'm', 'r'): + return MEDIA_MIMETYPE_AUDIO_AMR_NB; + + case FOURCC('s', 'a', 'w', 'b'): + return MEDIA_MIMETYPE_AUDIO_AMR_WB; + + case FOURCC('m', 'p', '4', 'v'): + return MEDIA_MIMETYPE_VIDEO_MPEG4; + + case FOURCC('s', '2', '6', '3'): + return MEDIA_MIMETYPE_VIDEO_H263; + + case FOURCC('a', 'v', 'c', '1'): + return MEDIA_MIMETYPE_VIDEO_AVC; + + default: + CHECK(!"should not be here."); + return NULL; + } +} + +MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source) + : mDataSource(source), + mHaveMetadata(false), + mFirstTrack(NULL), + mLastTrack(NULL) { +} + +MPEG4Extractor::~MPEG4Extractor() { + Track *track = mFirstTrack; + while (track) { + Track *next = track->next; + + delete track; + track = next; + } + mFirstTrack = mLastTrack = NULL; +} + +size_t MPEG4Extractor::countTracks() { + status_t err; + if ((err = readMetaData()) != OK) { + return 0; + } + + size_t n = 0; + Track *track = mFirstTrack; + while (track) { + ++n; + track = track->next; + } + + return n; +} + +sp<MetaData> MPEG4Extractor::getTrackMetaData(size_t index) { + status_t err; + if ((err = readMetaData()) != OK) { + return NULL; + } + + Track *track = mFirstTrack; + while (index > 0) { + if (track == NULL) { + return NULL; + } + + track = track->next; + --index; + } + + if (track == NULL) { + return NULL; + } + + return track->meta; +} + +status_t MPEG4Extractor::readMetaData() { + if (mHaveMetadata) { + return OK; + } + + off_t offset = 0; + status_t err; + while ((err = parseChunk(&offset, 0)) == OK) { + } + + if (mHaveMetadata) { + return OK; + } + + return err; +} + +static void MakeFourCCString(uint32_t x, char *s) { + s[0] = x >> 24; + s[1] = (x >> 16) & 0xff; + s[2] = (x >> 8) & 0xff; + s[3] = x & 0xff; + s[4] = '\0'; +} + +status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { + uint32_t hdr[2]; + if (mDataSource->read_at(*offset, hdr, 8) < 8) { + return ERROR_IO; + } + uint64_t chunk_size = ntohl(hdr[0]); + uint32_t chunk_type = ntohl(hdr[1]); + off_t data_offset = *offset + 8; + + if (chunk_size == 1) { + if (mDataSource->read_at(*offset + 8, &chunk_size, 8) < 8) { + return ERROR_IO; + } + chunk_size = ntoh64(chunk_size); + data_offset += 8; + } + + char chunk[5]; + MakeFourCCString(chunk_type, chunk); + +#if 0 + static const char kWhitespace[] = " "; + const char *indent = &kWhitespace[sizeof(kWhitespace) - 1 - 2 * depth]; + printf("%sfound chunk '%s' of size %lld\n", indent, chunk, chunk_size); + + char buffer[256]; + if (chunk_size <= sizeof(buffer)) { + if (mDataSource->read_at(*offset, buffer, chunk_size) < chunk_size) { + return ERROR_IO; + } + + hexdump(buffer, chunk_size); + } +#endif + + off_t chunk_data_size = *offset + chunk_size - data_offset; + + switch(chunk_type) { + case FOURCC('m', 'o', 'o', 'v'): + case FOURCC('t', 'r', 'a', 'k'): + case FOURCC('m', 'd', 'i', 'a'): + case FOURCC('m', 'i', 'n', 'f'): + case FOURCC('d', 'i', 'n', 'f'): + case FOURCC('s', 't', 'b', 'l'): + case FOURCC('m', 'v', 'e', 'x'): + case FOURCC('m', 'o', 'o', 'f'): + case FOURCC('t', 'r', 'a', 'f'): + case FOURCC('m', 'f', 'r', 'a'): + case FOURCC('s', 'k', 'i' ,'p'): + { + off_t stop_offset = *offset + chunk_size; + *offset = data_offset; + while (*offset < stop_offset) { + status_t err = parseChunk(offset, depth + 1); + if (err != OK) { + return err; + } + } + CHECK_EQ(*offset, stop_offset); + + if (chunk_type == FOURCC('m', 'o', 'o', 'v')) { + mHaveMetadata = true; + + return UNKNOWN_ERROR; // Return a dummy error. + } + break; + } + + case FOURCC('t', 'k', 'h', 'd'): + { + CHECK(chunk_data_size >= 4); + + uint8_t version; + if (mDataSource->read_at(data_offset, &version, 1) < 1) { + return ERROR_IO; + } + + uint64_t ctime, mtime, duration; + int32_t id; + uint32_t width, height; + + if (version == 1) { + if (chunk_data_size != 36 + 60) { + return ERROR_MALFORMED; + } + + uint8_t buffer[36 + 60]; + if (mDataSource->read_at( + data_offset, buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) { + return ERROR_IO; + } + + ctime = U64_AT(&buffer[4]); + mtime = U64_AT(&buffer[12]); + id = U32_AT(&buffer[20]); + duration = U64_AT(&buffer[28]); + width = U32_AT(&buffer[88]); + height = U32_AT(&buffer[92]); + } else if (version == 0) { + if (chunk_data_size != 24 + 60) { + return ERROR_MALFORMED; + } + + uint8_t buffer[24 + 60]; + if (mDataSource->read_at( + data_offset, buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) { + return ERROR_IO; + } + ctime = U32_AT(&buffer[4]); + mtime = U32_AT(&buffer[8]); + id = U32_AT(&buffer[12]); + duration = U32_AT(&buffer[20]); + width = U32_AT(&buffer[76]); + height = U32_AT(&buffer[80]); + } + + Track *track = new Track; + track->next = NULL; + if (mLastTrack) { + mLastTrack->next = track; + } else { + mFirstTrack = track; + } + mLastTrack = track; + + track->meta = new MetaData; + track->timescale = 0; + track->sampleTable = new SampleTable(mDataSource); + track->meta->setCString(kKeyMIMEType, "application/octet-stream"); + + *offset += chunk_size; + break; + } + + case FOURCC('m', 'd', 'h', 'd'): + { + if (chunk_data_size < 4) { + return ERROR_MALFORMED; + } + + uint8_t version; + if (mDataSource->read_at( + data_offset, &version, sizeof(version)) + < (ssize_t)sizeof(version)) { + return ERROR_IO; + } + + off_t timescale_offset; + + if (version == 1) { + timescale_offset = data_offset + 4 + 16; + } else if (version == 0) { + timescale_offset = data_offset + 4 + 8; + } else { + return ERROR_IO; + } + + uint32_t timescale; + if (mDataSource->read_at( + timescale_offset, ×cale, sizeof(timescale)) + < (ssize_t)sizeof(timescale)) { + return ERROR_IO; + } + + mLastTrack->timescale = ntohl(timescale); + mLastTrack->meta->setInt32(kKeyTimeScale, mLastTrack->timescale); + + int64_t duration; + if (version == 1) { + if (mDataSource->read_at( + timescale_offset + 4, &duration, sizeof(duration)) + < (ssize_t)sizeof(duration)) { + return ERROR_IO; + } + duration = ntoh64(duration); + } else { + int32_t duration32; + if (mDataSource->read_at( + timescale_offset + 4, &duration32, sizeof(duration32)) + < (ssize_t)sizeof(duration32)) { + return ERROR_IO; + } + duration = ntohl(duration32); + } + mLastTrack->meta->setInt32(kKeyDuration, duration); + + *offset += chunk_size; + break; + } + + case FOURCC('h', 'd', 'l', 'r'): + { + if (chunk_data_size < 25) { + return ERROR_MALFORMED; + } + + uint8_t buffer[24]; + if (mDataSource->read_at(data_offset, buffer, 24) < 24) { + return ERROR_IO; + } + + if (U32_AT(buffer) != 0) { + // Should be version 0, flags 0. + return ERROR_MALFORMED; + } + + if (U32_AT(&buffer[4]) != 0) { + // pre_defined should be 0. + return ERROR_MALFORMED; + } + + mHandlerType = U32_AT(&buffer[8]); + *offset += chunk_size; + break; + } + + case FOURCC('s', 't', 's', 'd'): + { + if (chunk_data_size < 8) { + return ERROR_MALFORMED; + } + + uint8_t buffer[8]; + CHECK(chunk_data_size >= (off_t)sizeof(buffer)); + if (mDataSource->read_at( + data_offset, buffer, 8) < 8) { + return ERROR_IO; + } + + if (U32_AT(buffer) != 0) { + // Should be version 0, flags 0. + return ERROR_MALFORMED; + } + + uint32_t entry_count = U32_AT(&buffer[4]); + + if (entry_count > 1) { + // For now we only support a single type of media per track. + return ERROR_UNSUPPORTED; + } + + off_t stop_offset = *offset + chunk_size; + *offset = data_offset + 8; + for (uint32_t i = 0; i < entry_count; ++i) { + status_t err = parseChunk(offset, depth + 1); + if (err != OK) { + return err; + } + } + CHECK_EQ(*offset, stop_offset); + break; + } + + case FOURCC('m', 'p', '4', 'a'): + case FOURCC('s', 'a', 'm', 'r'): + case FOURCC('s', 'a', 'w', 'b'): + { + if (mHandlerType != FOURCC('s', 'o', 'u', 'n')) { + return ERROR_MALFORMED; + } + + uint8_t buffer[8 + 20]; + if (chunk_data_size < (ssize_t)sizeof(buffer)) { + // Basic AudioSampleEntry size. + return ERROR_MALFORMED; + } + + if (mDataSource->read_at( + data_offset, buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) { + return ERROR_IO; + } + + uint16_t data_ref_index = U16_AT(&buffer[6]); + uint16_t num_channels = U16_AT(&buffer[16]); + + if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, + FourCC2MIME(chunk_type)) + || !strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, + FourCC2MIME(chunk_type))) { + // AMR audio is always mono. + num_channels = 1; + } + + uint16_t sample_size = U16_AT(&buffer[18]); + uint32_t sample_rate = U32_AT(&buffer[24]) >> 16; + + printf("*** coding='%s' %d channels, size %d, rate %d\n", + chunk, num_channels, sample_size, sample_rate); + + mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); + mLastTrack->meta->setInt32(kKeyChannelCount, num_channels); + mLastTrack->meta->setInt32(kKeySampleRate, sample_rate); + + off_t stop_offset = *offset + chunk_size; + *offset = data_offset + sizeof(buffer); + while (*offset < stop_offset) { + status_t err = parseChunk(offset, depth + 1); + if (err != OK) { + return err; + } + } + CHECK_EQ(*offset, stop_offset); + break; + } + + case FOURCC('m', 'p', '4', 'v'): + case FOURCC('s', '2', '6', '3'): + case FOURCC('a', 'v', 'c', '1'): + { + if (mHandlerType != FOURCC('v', 'i', 'd', 'e')) { + return ERROR_MALFORMED; + } + + uint8_t buffer[78]; + if (chunk_data_size < (ssize_t)sizeof(buffer)) { + // Basic VideoSampleEntry size. + return ERROR_MALFORMED; + } + + if (mDataSource->read_at( + data_offset, buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) { + return ERROR_IO; + } + + uint16_t data_ref_index = U16_AT(&buffer[6]); + uint16_t width = U16_AT(&buffer[6 + 18]); + uint16_t height = U16_AT(&buffer[6 + 20]); + + printf("*** coding='%s' width=%d height=%d\n", + chunk, width, height); + + mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); + mLastTrack->meta->setInt32(kKeyWidth, width); + mLastTrack->meta->setInt32(kKeyHeight, height); + + off_t stop_offset = *offset + chunk_size; + *offset = data_offset + sizeof(buffer); + while (*offset < stop_offset) { + status_t err = parseChunk(offset, depth + 1); + if (err != OK) { + return err; + } + } + CHECK_EQ(*offset, stop_offset); + break; + } + + case FOURCC('s', 't', 'c', 'o'): + case FOURCC('c', 'o', '6', '4'): + { + status_t err = + mLastTrack->sampleTable->setChunkOffsetParams( + chunk_type, data_offset, chunk_data_size); + + if (err != OK) { + return err; + } + + *offset += chunk_size; + break; + } + + case FOURCC('s', 't', 's', 'c'): + { + status_t err = + mLastTrack->sampleTable->setSampleToChunkParams( + data_offset, chunk_data_size); + + if (err != OK) { + return err; + } + + *offset += chunk_size; + break; + } + + case FOURCC('s', 't', 's', 'z'): + case FOURCC('s', 't', 'z', '2'): + { + status_t err = + mLastTrack->sampleTable->setSampleSizeParams( + chunk_type, data_offset, chunk_data_size); + + if (err != OK) { + return err; + } + + *offset += chunk_size; + break; + } + + case FOURCC('s', 't', 't', 's'): + { + status_t err = + mLastTrack->sampleTable->setTimeToSampleParams( + data_offset, chunk_data_size); + + if (err != OK) { + return err; + } + + *offset += chunk_size; + break; + } + + case FOURCC('s', 't', 's', 's'): + { + status_t err = + mLastTrack->sampleTable->setSyncSampleParams( + data_offset, chunk_data_size); + + if (err != OK) { + return err; + } + + *offset += chunk_size; + break; + } + + case FOURCC('e', 's', 'd', 's'): + { + if (chunk_data_size < 4) { + return ERROR_MALFORMED; + } + + uint8_t buffer[256]; + if (chunk_data_size > (off_t)sizeof(buffer)) { + return ERROR_BUFFER_TOO_SMALL; + } + + if (mDataSource->read_at( + data_offset, buffer, chunk_data_size) < chunk_data_size) { + return ERROR_IO; + } + + if (U32_AT(buffer) != 0) { + // Should be version 0, flags 0. + return ERROR_MALFORMED; + } + + mLastTrack->meta->setData( + kKeyESDS, kTypeESDS, &buffer[4], chunk_data_size - 4); + + *offset += chunk_size; + break; + } + + case FOURCC('a', 'v', 'c', 'C'): + { + char buffer[256]; + if (chunk_data_size > (off_t)sizeof(buffer)) { + return ERROR_BUFFER_TOO_SMALL; + } + + if (mDataSource->read_at( + data_offset, buffer, chunk_data_size) < chunk_data_size) { + return ERROR_IO; + } + + mLastTrack->meta->setData( + kKeyAVCC, kTypeAVCC, buffer, chunk_data_size); + + *offset += chunk_size; + break; + } + + default: + { + *offset += chunk_size; + break; + } + } + + return OK; +} + +sp<MediaSource> MPEG4Extractor::getTrack(size_t index) { + status_t err; + if ((err = readMetaData()) != OK) { + return NULL; + } + + Track *track = mFirstTrack; + while (index > 0) { + if (track == NULL) { + return NULL; + } + + track = track->next; + --index; + } + + if (track == NULL) { + return NULL; + } + + return new MPEG4Source( + track->meta, mDataSource, track->sampleTable); +} + +//////////////////////////////////////////////////////////////////////////////// + +MPEG4Source::MPEG4Source( + const sp<MetaData> &format, + const sp<DataSource> &dataSource, + const sp<SampleTable> &sampleTable) + : mFormat(format), + mDataSource(dataSource), + mTimescale(0), + mSampleTable(sampleTable), + mCurrentSampleIndex(0), + mIsAVC(false), + mStarted(false), + mGroup(NULL), + mBuffer(NULL), + mWantsNALFragments(false), + mSrcBuffer(NULL) { + const char *mime; + bool success = mFormat->findCString(kKeyMIMEType, &mime); + CHECK(success); + + success = mFormat->findInt32(kKeyTimeScale, &mTimescale); + CHECK(success); + + mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC); +} + +MPEG4Source::~MPEG4Source() { + if (mStarted) { + stop(); + } +} + +status_t MPEG4Source::start(MetaData *params) { + CHECK(!mStarted); + + int32_t val; + if (params && params->findInt32(kKeyWantsNALFragments, &val) + && val != 0) { + mWantsNALFragments = true; + } else { + mWantsNALFragments = false; + } + + mGroup = new MediaBufferGroup; + + size_t max_size; + status_t err = mSampleTable->getMaxSampleSize(&max_size); + CHECK_EQ(err, OK); + + // Assume that a given buffer only contains at most 10 fragments, + // each fragment originally prefixed with a 2 byte length will + // have a 4 byte header (0x00 0x00 0x00 0x01) after conversion, + // and thus will grow by 2 bytes per fragment. + mGroup->add_buffer(new MediaBuffer(max_size + 10 * 2)); + + mSrcBuffer = new uint8_t[max_size]; + + mStarted = true; + + return OK; +} + +status_t MPEG4Source::stop() { + CHECK(mStarted); + + if (mBuffer != NULL) { + mBuffer->release(); + mBuffer = NULL; + } + + delete[] mSrcBuffer; + mSrcBuffer = NULL; + + delete mGroup; + mGroup = NULL; + + mStarted = false; + mCurrentSampleIndex = 0; + + return OK; +} + +sp<MetaData> MPEG4Source::getFormat() { + return mFormat; +} + +status_t MPEG4Source::read( + MediaBuffer **out, const ReadOptions *options) { + CHECK(mStarted); + + *out = NULL; + + int64_t seekTimeUs; + if (options && options->getSeekTo(&seekTimeUs)) { + uint32_t sampleIndex; + status_t err = mSampleTable->findClosestSample( + seekTimeUs * mTimescale / 1000000, + &sampleIndex, SampleTable::kSyncSample_Flag); + + if (err != OK) { + return err; + } + + mCurrentSampleIndex = sampleIndex; + if (mBuffer != NULL) { + mBuffer->release(); + mBuffer = NULL; + } + + // fall through + } + + off_t offset; + size_t size; + uint32_t dts; + bool newBuffer = false; + if (mBuffer == NULL) { + newBuffer = true; + + status_t err = mSampleTable->getSampleOffsetAndSize( + mCurrentSampleIndex, &offset, &size); + + if (err != OK) { + return err; + } + + err = mSampleTable->getDecodingTime(mCurrentSampleIndex, &dts); + + if (err != OK) { + return err; + } + + err = mGroup->acquire_buffer(&mBuffer); + if (err != OK) { + CHECK_EQ(mBuffer, NULL); + return err; + } + } + + if (!mIsAVC || mWantsNALFragments) { + if (newBuffer) { + ssize_t num_bytes_read = + mDataSource->read_at(offset, (uint8_t *)mBuffer->data(), size); + + if (num_bytes_read < (ssize_t)size) { + mBuffer->release(); + mBuffer = NULL; + + return ERROR_IO; + } + + mBuffer->set_range(0, size); + mBuffer->meta_data()->clear(); + mBuffer->meta_data()->setInt32(kKeyTimeUnits, dts); + mBuffer->meta_data()->setInt32(kKeyTimeScale, mTimescale); + ++mCurrentSampleIndex; + } + + if (!mIsAVC) { + *out = mBuffer; + mBuffer = NULL; + + return OK; + } + + // Each NAL unit is split up into its constituent fragments and + // each one of them returned in its own buffer. + + CHECK(mBuffer->range_length() >= 2); + + const uint8_t *src = + (const uint8_t *)mBuffer->data() + mBuffer->range_offset(); + + size_t nal_size = U16_AT(src); + + CHECK(mBuffer->range_length() >= 2 + nal_size); + + MediaBuffer *clone = mBuffer->clone(); + clone->set_range(mBuffer->range_offset() + 2, nal_size); + + mBuffer->set_range( + mBuffer->range_offset() + 2 + nal_size, + mBuffer->range_length() - 2 - nal_size); + + if (mBuffer->range_length() == 0) { + mBuffer->release(); + mBuffer = NULL; + } + + *out = clone; + + return OK; + } else { + // Whole NAL units are returned but each fragment is prefixed by + // the start code (0x00 00 00 01). + + ssize_t num_bytes_read = + mDataSource->read_at(offset, mSrcBuffer, size); + + if (num_bytes_read < (ssize_t)size) { + mBuffer->release(); + mBuffer = NULL; + + return ERROR_IO; + } + + uint8_t *dstData = (uint8_t *)mBuffer->data(); + size_t srcOffset = 0; + size_t dstOffset = 0; + while (srcOffset < size) { + CHECK(srcOffset + 1 < size); + size_t nalLength = + (mSrcBuffer[srcOffset] << 8) | mSrcBuffer[srcOffset + 1]; + CHECK(srcOffset + 1 + nalLength < size); + srcOffset += 2; + + if (nalLength == 0) { + continue; + } + + CHECK(dstOffset + 4 <= mBuffer->size()); + + dstData[dstOffset++] = 0; + dstData[dstOffset++] = 0; + dstData[dstOffset++] = 0; + dstData[dstOffset++] = 1; + memcpy(&dstData[dstOffset], &mSrcBuffer[srcOffset], nalLength); + srcOffset += nalLength; + dstOffset += nalLength; + } + + mBuffer->set_range(0, dstOffset); + mBuffer->meta_data()->clear(); + mBuffer->meta_data()->setInt32(kKeyTimeUnits, dts); + mBuffer->meta_data()->setInt32(kKeyTimeScale, mTimescale); + ++mCurrentSampleIndex; + + *out = mBuffer; + mBuffer = NULL; + + return OK; + } +} + +bool SniffMPEG4( + const sp<DataSource> &source, String8 *mimeType, float *confidence) { + uint8_t header[8]; + + ssize_t n = source->read_at(4, header, sizeof(header)); + if (n < (ssize_t)sizeof(header)) { + return false; + } + + if (!memcmp(header, "ftyp3gp", 7) || !memcmp(header, "ftypmp42", 8) + || !memcmp(header, "ftypisom", 8) || !memcmp(header, "ftypM4V ", 8)) { + *mimeType = MEDIA_MIMETYPE_CONTAINER_MPEG4; + *confidence = 0.1; + + return true; + } + + return false; +} + +} // namespace android + diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp new file mode 100644 index 000000000000..fa3576881082 --- /dev/null +++ b/media/libstagefright/MPEG4Writer.cpp @@ -0,0 +1,696 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <arpa/inet.h> + +#include <ctype.h> +#include <pthread.h> + +#include <media/stagefright/MPEG4Writer.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/Utils.h> + +namespace android { + +class MPEG4Writer::Track { +public: + Track(MPEG4Writer *owner, const sp<MediaSource> &source); + ~Track(); + + status_t start(); + void stop(); + bool reachedEOS(); + + int64_t getDuration() const; + void writeTrackHeader(int32_t trackID); + +private: + MPEG4Writer *mOwner; + sp<MetaData> mMeta; + sp<MediaSource> mSource; + volatile bool mDone; + + pthread_t mThread; + + struct SampleInfo { + size_t size; + off_t offset; + int64_t timestamp; + }; + List<SampleInfo> mSampleInfos; + + void *mCodecSpecificData; + size_t mCodecSpecificDataSize; + + bool mReachedEOS; + + static void *ThreadWrapper(void *me); + void threadEntry(); + + Track(const Track &); + Track &operator=(const Track &); +}; + +MPEG4Writer::MPEG4Writer(const char *filename) + : mFile(fopen(filename, "wb")), + mOffset(0), + mMdatOffset(0) { + CHECK(mFile != NULL); +} + +MPEG4Writer::~MPEG4Writer() { + stop(); + + for (List<Track *>::iterator it = mTracks.begin(); + it != mTracks.end(); ++it) { + delete *it; + } + mTracks.clear(); +} + +void MPEG4Writer::addSource(const sp<MediaSource> &source) { + Track *track = new Track(this, source); + mTracks.push_back(track); +} + +status_t MPEG4Writer::start() { + if (mFile == NULL) { + return UNKNOWN_ERROR; + } + + beginBox("ftyp"); + writeFourcc("isom"); + writeInt32(0); + writeFourcc("isom"); + endBox(); + + mMdatOffset = mOffset; + write("\x00\x00\x00\x01mdat????????", 16); + + for (List<Track *>::iterator it = mTracks.begin(); + it != mTracks.end(); ++it) { + status_t err = (*it)->start(); + + if (err != OK) { + for (List<Track *>::iterator it2 = mTracks.begin(); + it2 != it; ++it2) { + (*it2)->stop(); + } + + return err; + } + } + + return OK; +} + +void MPEG4Writer::stop() { + if (mFile == NULL) { + return; + } + + int64_t max_duration = 0; + for (List<Track *>::iterator it = mTracks.begin(); + it != mTracks.end(); ++it) { + (*it)->stop(); + + int64_t duration = (*it)->getDuration(); + if (duration > max_duration) { + max_duration = duration; + } + } + + // Fix up the size of the 'mdat' chunk. + fseek(mFile, mMdatOffset + 8, SEEK_SET); + int64_t size = mOffset - mMdatOffset; + size = hton64(size); + fwrite(&size, 1, 8, mFile); + fseek(mFile, mOffset, SEEK_SET); + + time_t now = time(NULL); + + beginBox("moov"); + + beginBox("mvhd"); + writeInt32(0); // version=0, flags=0 + writeInt32(now); // creation time + writeInt32(now); // modification time + writeInt32(1000); // timescale + writeInt32(max_duration); + writeInt32(0x10000); // rate + writeInt16(0x100); // volume + writeInt16(0); // reserved + writeInt32(0); // reserved + writeInt32(0); // reserved + writeInt32(0x10000); // matrix + writeInt32(0); + writeInt32(0); + writeInt32(0); + writeInt32(0x10000); + writeInt32(0); + writeInt32(0); + writeInt32(0); + writeInt32(0x40000000); + writeInt32(0); // predefined + writeInt32(0); // predefined + writeInt32(0); // predefined + writeInt32(0); // predefined + writeInt32(0); // predefined + writeInt32(0); // predefined + writeInt32(mTracks.size() + 1); // nextTrackID + endBox(); // mvhd + + int32_t id = 1; + for (List<Track *>::iterator it = mTracks.begin(); + it != mTracks.end(); ++it, ++id) { + (*it)->writeTrackHeader(id); + } + endBox(); // moov + + CHECK(mBoxes.empty()); + + fclose(mFile); + mFile = NULL; +} + +off_t MPEG4Writer::addSample(MediaBuffer *buffer) { + Mutex::Autolock autoLock(mLock); + + off_t old_offset = mOffset; + + fwrite((const uint8_t *)buffer->data() + buffer->range_offset(), + 1, buffer->range_length(), mFile); + + mOffset += buffer->range_length(); + + return old_offset; +} + +void MPEG4Writer::beginBox(const char *fourcc) { + CHECK_EQ(strlen(fourcc), 4); + + mBoxes.push_back(mOffset); + + writeInt32(0); + writeFourcc(fourcc); +} + +void MPEG4Writer::endBox() { + CHECK(!mBoxes.empty()); + + off_t offset = *--mBoxes.end(); + mBoxes.erase(--mBoxes.end()); + + fseek(mFile, offset, SEEK_SET); + writeInt32(mOffset - offset); + mOffset -= 4; + fseek(mFile, mOffset, SEEK_SET); +} + +void MPEG4Writer::writeInt8(int8_t x) { + fwrite(&x, 1, 1, mFile); + ++mOffset; +} + +void MPEG4Writer::writeInt16(int16_t x) { + x = htons(x); + fwrite(&x, 1, 2, mFile); + mOffset += 2; +} + +void MPEG4Writer::writeInt32(int32_t x) { + x = htonl(x); + fwrite(&x, 1, 4, mFile); + mOffset += 4; +} + +void MPEG4Writer::writeInt64(int64_t x) { + x = hton64(x); + fwrite(&x, 1, 8, mFile); + mOffset += 8; +} + +void MPEG4Writer::writeCString(const char *s) { + size_t n = strlen(s); + + fwrite(s, 1, n + 1, mFile); + mOffset += n + 1; +} + +void MPEG4Writer::writeFourcc(const char *s) { + CHECK_EQ(strlen(s), 4); + fwrite(s, 1, 4, mFile); + mOffset += 4; +} + +void MPEG4Writer::write(const void *data, size_t size) { + fwrite(data, 1, size, mFile); + mOffset += size; +} + +bool MPEG4Writer::reachedEOS() { + bool allDone = true; + for (List<Track *>::iterator it = mTracks.begin(); + it != mTracks.end(); ++it) { + if (!(*it)->reachedEOS()) { + allDone = false; + break; + } + } + + return allDone; +} + +//////////////////////////////////////////////////////////////////////////////// + +MPEG4Writer::Track::Track( + MPEG4Writer *owner, const sp<MediaSource> &source) + : mOwner(owner), + mMeta(source->getFormat()), + mSource(source), + mDone(false), + mCodecSpecificData(NULL), + mCodecSpecificDataSize(0), + mReachedEOS(false) { +} + +MPEG4Writer::Track::~Track() { + stop(); + + if (mCodecSpecificData != NULL) { + free(mCodecSpecificData); + mCodecSpecificData = NULL; + } +} + +status_t MPEG4Writer::Track::start() { + status_t err = mSource->start(); + + if (err != OK) { + mDone = mReachedEOS = true; + return err; + } + + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + mDone = false; + mReachedEOS = false; + + pthread_create(&mThread, &attr, ThreadWrapper, this); + pthread_attr_destroy(&attr); + + return OK; +} + +void MPEG4Writer::Track::stop() { + if (mDone) { + return; + } + + mDone = true; + + void *dummy; + pthread_join(mThread, &dummy); + + mSource->stop(); +} + +bool MPEG4Writer::Track::reachedEOS() { + return mReachedEOS; +} + +// static +void *MPEG4Writer::Track::ThreadWrapper(void *me) { + Track *track = static_cast<Track *>(me); + + track->threadEntry(); + + return NULL; +} + +void MPEG4Writer::Track::threadEntry() { + bool is_mpeg4 = false; + sp<MetaData> meta = mSource->getFormat(); + const char *mime; + meta->findCString(kKeyMIMEType, &mime); + is_mpeg4 = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4); + + MediaBuffer *buffer; + while (!mDone && mSource->read(&buffer) == OK) { + if (buffer->range_length() == 0) { + buffer->release(); + buffer = NULL; + + continue; + } + + if (mCodecSpecificData == NULL && is_mpeg4) { + const uint8_t *data = + (const uint8_t *)buffer->data() + buffer->range_offset(); + + const size_t size = buffer->range_length(); + + size_t offset = 0; + while (offset + 3 < size) { + if (data[offset] == 0x00 && data[offset + 1] == 0x00 + && data[offset + 2] == 0x01 && data[offset + 3] == 0xb6) { + break; + } + + ++offset; + } + + // CHECK(offset + 3 < size); + if (offset + 3 >= size) { + // XXX assume the entire first chunk of data is the codec specific + // data. + offset = size; + } + + mCodecSpecificDataSize = offset; + mCodecSpecificData = malloc(offset); + memcpy(mCodecSpecificData, data, offset); + + buffer->set_range(buffer->range_offset() + offset, size - offset); + } + + off_t offset = mOwner->addSample(buffer); + + SampleInfo info; + info.size = buffer->range_length(); + info.offset = offset; + + int32_t units, scale; + bool success = + buffer->meta_data()->findInt32(kKeyTimeUnits, &units); + CHECK(success); + success = + buffer->meta_data()->findInt32(kKeyTimeScale, &scale); + CHECK(success); + + info.timestamp = (int64_t)units * 1000 / scale; + + mSampleInfos.push_back(info); + + buffer->release(); + buffer = NULL; + } + + mReachedEOS = true; +} + +int64_t MPEG4Writer::Track::getDuration() const { + return 10000; // XXX +} + +void MPEG4Writer::Track::writeTrackHeader(int32_t trackID) { + const char *mime; + bool success = mMeta->findCString(kKeyMIMEType, &mime); + CHECK(success); + + bool is_audio = !strncasecmp(mime, "audio/", 6); + + time_t now = time(NULL); + + mOwner->beginBox("trak"); + + mOwner->beginBox("tkhd"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt32(now); // creation time + mOwner->writeInt32(now); // modification time + mOwner->writeInt32(trackID); + mOwner->writeInt32(0); // reserved + mOwner->writeInt32(getDuration()); + mOwner->writeInt32(0); // reserved + mOwner->writeInt32(0); // reserved + mOwner->writeInt16(0); // layer + mOwner->writeInt16(0); // alternate group + mOwner->writeInt16(is_audio ? 0x100 : 0); // volume + mOwner->writeInt16(0); // reserved + + mOwner->writeInt32(0x10000); // matrix + mOwner->writeInt32(0); + mOwner->writeInt32(0); + mOwner->writeInt32(0); + mOwner->writeInt32(0x10000); + mOwner->writeInt32(0); + mOwner->writeInt32(0); + mOwner->writeInt32(0); + mOwner->writeInt32(0x40000000); + + if (is_audio) { + mOwner->writeInt32(0); + mOwner->writeInt32(0); + } else { + int32_t width, height; + bool success = mMeta->findInt32(kKeyWidth, &width); + success = success && mMeta->findInt32(kKeyHeight, &height); + CHECK(success); + + mOwner->writeInt32(width); + mOwner->writeInt32(height); + } + mOwner->endBox(); // tkhd + + mOwner->beginBox("mdia"); + + mOwner->beginBox("mdhd"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt32(now); // creation time + mOwner->writeInt32(now); // modification time + mOwner->writeInt32(1000); // timescale + mOwner->writeInt32(getDuration()); + mOwner->writeInt16(0); // language code XXX + mOwner->writeInt16(0); // predefined + mOwner->endBox(); + + mOwner->beginBox("hdlr"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt32(0); // predefined + mOwner->writeFourcc(is_audio ? "soun" : "vide"); + mOwner->writeInt32(0); // reserved + mOwner->writeInt32(0); // reserved + mOwner->writeInt32(0); // reserved + mOwner->writeCString(""); // name + mOwner->endBox(); + + mOwner->beginBox("minf"); + + mOwner->beginBox("dinf"); + mOwner->beginBox("dref"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt32(1); + mOwner->beginBox("url "); + mOwner->writeInt32(1); // version=0, flags=1 + mOwner->endBox(); // url + mOwner->endBox(); // dref + mOwner->endBox(); // dinf + + if (is_audio) { + mOwner->beginBox("smhd"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt16(0); // balance + mOwner->writeInt16(0); // reserved + mOwner->endBox(); + } else { + mOwner->beginBox("vmhd"); + mOwner->writeInt32(0x00000001); // version=0, flags=1 + mOwner->writeInt16(0); // graphics mode + mOwner->writeInt16(0); // opcolor + mOwner->writeInt16(0); + mOwner->writeInt16(0); + mOwner->endBox(); + } + mOwner->endBox(); // minf + + mOwner->beginBox("stbl"); + + mOwner->beginBox("stsd"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt32(1); // entry count + if (is_audio) { + const char *fourcc = NULL; + if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mime)) { + fourcc = "samr"; + } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mime)) { + fourcc = "sawb"; + } else { + LOGE("Unknown mime type '%s'.", mime); + CHECK(!"should not be here, unknown mime type."); + } + + mOwner->beginBox(fourcc); // audio format + mOwner->writeInt32(0); // reserved + mOwner->writeInt16(0); // reserved + mOwner->writeInt16(0); // data ref index + mOwner->writeInt32(0); // reserved + mOwner->writeInt32(0); // reserved + mOwner->writeInt16(2); // channel count + mOwner->writeInt16(16); // sample size + mOwner->writeInt16(0); // predefined + mOwner->writeInt16(0); // reserved + + int32_t samplerate; + bool success = mMeta->findInt32(kKeySampleRate, &samplerate); + CHECK(success); + + mOwner->writeInt32(samplerate << 16); + mOwner->endBox(); + } else { + if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) { + mOwner->beginBox("mp4v"); + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { + mOwner->beginBox("s263"); + } else { + LOGE("Unknown mime type '%s'.", mime); + CHECK(!"should not be here, unknown mime type."); + } + + mOwner->writeInt32(0); // reserved + mOwner->writeInt16(0); // reserved + mOwner->writeInt16(0); // data ref index + mOwner->writeInt16(0); // predefined + mOwner->writeInt16(0); // reserved + mOwner->writeInt32(0); // predefined + mOwner->writeInt32(0); // predefined + mOwner->writeInt32(0); // predefined + + int32_t width, height; + bool success = mMeta->findInt32(kKeyWidth, &width); + success = success && mMeta->findInt32(kKeyHeight, &height); + CHECK(success); + + mOwner->writeInt16(width); + mOwner->writeInt16(height); + mOwner->writeInt32(0x480000); // horiz resolution + mOwner->writeInt32(0x480000); // vert resolution + mOwner->writeInt32(0); // reserved + mOwner->writeInt16(1); // frame count + mOwner->write(" ", 32); + mOwner->writeInt16(0x18); // depth + mOwner->writeInt16(-1); // predefined + + CHECK(23 + mCodecSpecificDataSize < 128); + + if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) { + mOwner->beginBox("esds"); + + mOwner->writeInt32(0); // version=0, flags=0 + + mOwner->writeInt8(0x03); // ES_DescrTag + mOwner->writeInt8(23 + mCodecSpecificDataSize); + mOwner->writeInt16(0x0000); // ES_ID + mOwner->writeInt8(0x1f); + + mOwner->writeInt8(0x04); // DecoderConfigDescrTag + mOwner->writeInt8(15 + mCodecSpecificDataSize); + mOwner->writeInt8(0x20); // objectTypeIndication ISO/IEC 14492-2 + mOwner->writeInt8(0x11); // streamType VisualStream + + static const uint8_t kData[] = { + 0x01, 0x77, 0x00, + 0x00, 0x03, 0xe8, 0x00, + 0x00, 0x03, 0xe8, 0x00 + }; + mOwner->write(kData, sizeof(kData)); + + mOwner->writeInt8(0x05); // DecoderSpecificInfoTag + + mOwner->writeInt8(mCodecSpecificDataSize); + mOwner->write(mCodecSpecificData, mCodecSpecificDataSize); + + static const uint8_t kData2[] = { + 0x06, // SLConfigDescriptorTag + 0x01, + 0x02 + }; + mOwner->write(kData2, sizeof(kData2)); + + mOwner->endBox(); // esds + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { + mOwner->beginBox("d263"); + + mOwner->writeInt32(0); // vendor + mOwner->writeInt8(0); // decoder version + mOwner->writeInt8(10); // level: 10 + mOwner->writeInt8(0); // profile: 0 + + mOwner->endBox(); // d263 + } + mOwner->endBox(); // mp4v or s263 + } + mOwner->endBox(); // stsd + + mOwner->beginBox("stts"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt32(mSampleInfos.size() - 1); + + List<SampleInfo>::iterator it = mSampleInfos.begin(); + int64_t last = (*it).timestamp; + ++it; + while (it != mSampleInfos.end()) { + mOwner->writeInt32(1); + mOwner->writeInt32((*it).timestamp - last); + + last = (*it).timestamp; + + ++it; + } + mOwner->endBox(); // stts + + mOwner->beginBox("stsz"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt32(0); // default sample size + mOwner->writeInt32(mSampleInfos.size()); + for (List<SampleInfo>::iterator it = mSampleInfos.begin(); + it != mSampleInfos.end(); ++it) { + mOwner->writeInt32((*it).size); + } + mOwner->endBox(); // stsz + + mOwner->beginBox("stsc"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt32(mSampleInfos.size()); + int32_t n = 1; + for (List<SampleInfo>::iterator it = mSampleInfos.begin(); + it != mSampleInfos.end(); ++it, ++n) { + mOwner->writeInt32(n); + mOwner->writeInt32(1); + mOwner->writeInt32(1); + } + mOwner->endBox(); // stsc + + mOwner->beginBox("co64"); + mOwner->writeInt32(0); // version=0, flags=0 + mOwner->writeInt32(mSampleInfos.size()); + for (List<SampleInfo>::iterator it = mSampleInfos.begin(); + it != mSampleInfos.end(); ++it, ++n) { + mOwner->writeInt64((*it).offset); + } + mOwner->endBox(); // co64 + + mOwner->endBox(); // stbl + mOwner->endBox(); // mdia + mOwner->endBox(); // trak +} + +} // namespace android diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp new file mode 100644 index 000000000000..f3c0e7323207 --- /dev/null +++ b/media/libstagefright/MediaBuffer.cpp @@ -0,0 +1,172 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MediaBuffer" +#include <utils/Log.h> + +#include <errno.h> +#include <pthread.h> +#include <stdlib.h> + +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MetaData.h> + +namespace android { + +// XXX make this truly atomic. +static int atomic_add(int *value, int delta) { + int prev_value = *value; + *value += delta; + + return prev_value; +} + +MediaBuffer::MediaBuffer(void *data, size_t size) + : mObserver(NULL), + mNextBuffer(NULL), + mRefCount(0), + mData(data), + mSize(size), + mRangeOffset(0), + mRangeLength(size), + mOwnsData(false), + mMetaData(new MetaData), + mOriginal(NULL) { +} + +MediaBuffer::MediaBuffer(size_t size) + : mObserver(NULL), + mNextBuffer(NULL), + mRefCount(0), + mData(malloc(size)), + mSize(size), + mRangeOffset(0), + mRangeLength(size), + mOwnsData(true), + mMetaData(new MetaData), + mOriginal(NULL) { +} + +void MediaBuffer::release() { + if (mObserver == NULL) { + CHECK_EQ(mRefCount, 0); + delete this; + return; + } + + int prevCount = atomic_add(&mRefCount, -1); + if (prevCount == 1) { + if (mObserver == NULL) { + delete this; + return; + } + + mObserver->signalBufferReturned(this); + } + CHECK(prevCount > 0); +} + +void MediaBuffer::claim() { + CHECK(mObserver != NULL); + CHECK_EQ(mRefCount, 1); + + mRefCount = 0; +} + +void MediaBuffer::add_ref() { + atomic_add(&mRefCount, 1); +} + +void *MediaBuffer::data() const { + return mData; +} + +size_t MediaBuffer::size() const { + return mSize; +} + +size_t MediaBuffer::range_offset() const { + return mRangeOffset; +} + +size_t MediaBuffer::range_length() const { + return mRangeLength; +} + +void MediaBuffer::set_range(size_t offset, size_t length) { + if (offset < 0 || offset + length > mSize) { + LOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize); + } + CHECK(offset >= 0 && offset + length <= mSize); + + mRangeOffset = offset; + mRangeLength = length; +} + +sp<MetaData> MediaBuffer::meta_data() { + return mMetaData; +} + +void MediaBuffer::reset() { + mMetaData->clear(); + set_range(0, mSize); +} + +MediaBuffer::~MediaBuffer() { + CHECK_EQ(mObserver, NULL); + + if (mOwnsData && mData != NULL) { + free(mData); + mData = NULL; + } + + if (mOriginal != NULL) { + mOriginal->release(); + mOriginal = NULL; + } +} + +void MediaBuffer::setObserver(MediaBufferObserver *observer) { + CHECK(observer == NULL || mObserver == NULL); + mObserver = observer; +} + +void MediaBuffer::setNextBuffer(MediaBuffer *buffer) { + mNextBuffer = buffer; +} + +MediaBuffer *MediaBuffer::nextBuffer() { + return mNextBuffer; +} + +int MediaBuffer::refcount() const { + return mRefCount; +} + +MediaBuffer *MediaBuffer::clone() { + MediaBuffer *buffer = new MediaBuffer(mData, mSize); + buffer->set_range(mRangeOffset, mRangeLength); + buffer->mMetaData = new MetaData(*mMetaData.get()); + + add_ref(); + buffer->mOriginal = this; + + return buffer; +} + +} // namespace android + diff --git a/media/libstagefright/MediaBufferGroup.cpp b/media/libstagefright/MediaBufferGroup.cpp new file mode 100644 index 000000000000..c8d05f4d8e89 --- /dev/null +++ b/media/libstagefright/MediaBufferGroup.cpp @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MediaBufferGroup" +#include <utils/Log.h> + +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaBufferGroup.h> +#include <media/stagefright/MediaDebug.h> + +namespace android { + +MediaBufferGroup::MediaBufferGroup() + : mFirstBuffer(NULL), + mLastBuffer(NULL) { +} + +MediaBufferGroup::~MediaBufferGroup() { + MediaBuffer *next; + for (MediaBuffer *buffer = mFirstBuffer; buffer != NULL; + buffer = next) { + next = buffer->nextBuffer(); + + CHECK_EQ(buffer->refcount(), 0); + + buffer->setObserver(NULL); + buffer->release(); + } +} + +void MediaBufferGroup::add_buffer(MediaBuffer *buffer) { + Mutex::Autolock autoLock(mLock); + + buffer->setObserver(this); + + if (mLastBuffer) { + mLastBuffer->setNextBuffer(buffer); + } else { + mFirstBuffer = buffer; + } + + mLastBuffer = buffer; +} + +status_t MediaBufferGroup::acquire_buffer(MediaBuffer **out) { + Mutex::Autolock autoLock(mLock); + + for (;;) { + for (MediaBuffer *buffer = mFirstBuffer; + buffer != NULL; buffer = buffer->nextBuffer()) { + if (buffer->refcount() == 0) { + buffer->add_ref(); + buffer->reset(); + + *out = buffer; + goto exit; + } + } + + // All buffers are in use. Block until one of them is returned to us. + mCondition.wait(mLock); + } + +exit: + return OK; +} + +void MediaBufferGroup::signalBufferReturned(MediaBuffer *) { + Mutex::Autolock autoLock(mLock); + mCondition.signal(); +} + +} // namespace android diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp new file mode 100644 index 000000000000..87b5b24ec22b --- /dev/null +++ b/media/libstagefright/MediaDefs.cpp @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/stagefright/MediaDefs.h> + +namespace android { + +const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg"; + +const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc"; +const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es"; +const char *MEDIA_MIMETYPE_VIDEO_H263 = "video/3gpp"; +const char *MEDIA_MIMETYPE_VIDEO_RAW = "video/raw"; + +const char *MEDIA_MIMETYPE_AUDIO_AMR_NB = "audio/3gpp"; +const char *MEDIA_MIMETYPE_AUDIO_AMR_WB = "audio/amr-wb"; +const char *MEDIA_MIMETYPE_AUDIO_MPEG = "audio/mpeg"; +const char *MEDIA_MIMETYPE_AUDIO_AAC = "audio/mp4a-latm"; +const char *MEDIA_MIMETYPE_AUDIO_RAW = "audio/raw"; + +const char *MEDIA_MIMETYPE_CONTAINER_MPEG4 = "video/mpeg4"; + +} // namespace android diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp new file mode 100644 index 000000000000..8535f5289026 --- /dev/null +++ b/media/libstagefright/MediaExtractor.cpp @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaExtractor" +#include <utils/Log.h> + +#include <media/stagefright/AMRExtractor.h> +#include <media/stagefright/DataSource.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MP3Extractor.h> +#include <media/stagefright/MPEG4Extractor.h> +#include <media/stagefright/MediaExtractor.h> +#include <utils/String8.h> + +namespace android { + +// static +sp<MediaExtractor> MediaExtractor::Create( + const sp<DataSource> &source, const char *mime) { + String8 tmp; + if (mime == NULL) { + float confidence; + if (!source->sniff(&tmp, &confidence)) { + LOGE("FAILED to autodetect media content."); + + return NULL; + } + + mime = tmp.string(); + LOGI("Autodetected media content as '%s' with confidence %.2f", + mime, confidence); + } + + if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG4) + || !strcasecmp(mime, "audio/mp4")) { + return new MPEG4Extractor(source); + } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { + return new MP3Extractor(source); + } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB) + || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { + return new AMRExtractor(source); + } + + return NULL; +} + +} // namespace android diff --git a/media/libstagefright/MediaPlayerImpl.cpp b/media/libstagefright/MediaPlayerImpl.cpp new file mode 100644 index 000000000000..622ea7eee409 --- /dev/null +++ b/media/libstagefright/MediaPlayerImpl.cpp @@ -0,0 +1,659 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaPlayerImpl" +#include "utils/Log.h" + +#include <OMX_Component.h> + +#include <unistd.h> + +#include <media/stagefright/AudioPlayer.h> +#include <media/stagefright/CachingDataSource.h> +// #include <media/stagefright/CameraSource.h> +#include <media/stagefright/HTTPDataSource.h> +#include <media/stagefright/HTTPStream.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaExtractor.h> +#include <media/stagefright/MediaPlayerImpl.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/MmapSource.h> +#include <media/stagefright/OMXCodec.h> +#include <media/stagefright/ShoutcastSource.h> +#include <media/stagefright/TimeSource.h> +#include <ui/PixelFormat.h> +#include <ui/Surface.h> + +namespace android { + +MediaPlayerImpl::MediaPlayerImpl(const char *uri) + : mInitCheck(NO_INIT), + mTimeSource(NULL), + mAudioPlayer(NULL), + mVideoWidth(0), + mVideoHeight(0), + mVideoPosition(0), + mDuration(0), + mPlaying(false), + mPaused(false), + mSeeking(false) { + LOGI("MediaPlayerImpl(%s)", uri); + DataSource::RegisterDefaultSniffers(); + + status_t err = mClient.connect(); + if (err != OK) { + LOGE("Failed to connect to OMXClient."); + return; + } + + if (!strncasecmp("shoutcast://", uri, 12)) { + setAudioSource(makeShoutcastSource(uri)); +#if 0 + } else if (!strncasecmp("camera:", uri, 7)) { + mVideoWidth = 480; + mVideoHeight = 320; + mVideoDecoder = CameraSource::Create(); +#endif + } else { + sp<DataSource> source; + if (!strncasecmp("file://", uri, 7)) { + source = new MmapSource(uri + 7); + } else if (!strncasecmp("http://", uri, 7)) { + source = new HTTPDataSource(uri); + source = new CachingDataSource(source, 64 * 1024, 10); + } else { + // Assume it's a filename. + source = new MmapSource(uri); + } + + mExtractor = MediaExtractor::Create(source); + + if (mExtractor == NULL) { + return; + } + } + + init(); + + mInitCheck = OK; +} + +MediaPlayerImpl::MediaPlayerImpl(int fd, int64_t offset, int64_t length) + : mInitCheck(NO_INIT), + mTimeSource(NULL), + mAudioPlayer(NULL), + mVideoWidth(0), + mVideoHeight(0), + mVideoPosition(0), + mDuration(0), + mPlaying(false), + mPaused(false), + mSeeking(false) { + LOGI("MediaPlayerImpl(%d, %lld, %lld)", fd, offset, length); + DataSource::RegisterDefaultSniffers(); + + status_t err = mClient.connect(); + if (err != OK) { + LOGE("Failed to connect to OMXClient."); + return; + } + + mExtractor = MediaExtractor::Create( + new MmapSource(fd, offset, length)); + + if (mExtractor == NULL) { + return; + } + + init(); + + mInitCheck = OK; +} + +status_t MediaPlayerImpl::initCheck() const { + return mInitCheck; +} + +MediaPlayerImpl::~MediaPlayerImpl() { + stop(); + setSurface(NULL); + + if (mInitCheck == OK) { + mClient.disconnect(); + } + + LOGV("~MediaPlayerImpl done."); +} + +void MediaPlayerImpl::play() { + LOGI("play"); + + if (mPlaying) { + if (mPaused) { + if (mAudioSource != NULL) { + mAudioPlayer->resume(); + } + mPaused = false; + } + return; + } + + mPlaying = true; + + if (mAudioSource != NULL) { + mAudioPlayer = new AudioPlayer(mAudioSink); + mAudioPlayer->setSource(mAudioDecoder); + + if (mVideoDecoder == NULL) { + // If there is no video, start playing right away, + // otherwise we'll start the audio player after we decode + // the first video frame, this way we won't be behind right + // away. + mAudioPlayer->start(); + } + + mTimeSource = mAudioPlayer; + } else { + mTimeSource = new SystemTimeSource; + } + + if (mVideoDecoder != NULL) { + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + pthread_create(&mVideoThread, &attr, VideoWrapper, this); + + pthread_attr_destroy(&attr); + } +} + +void MediaPlayerImpl::pause() { + if (!mPlaying || mPaused) { + return; + } + + if (mAudioSource != NULL) { + mAudioPlayer->pause(); + } + + mPaused = true; +} + +void MediaPlayerImpl::stop() { + if (!mPlaying) { + return; + } + + mPlaying = false; + + if (mVideoDecoder != NULL) { + void *dummy; + pthread_join(mVideoThread, &dummy); + } + + if (mAudioSource != NULL) { + mAudioPlayer->stop(); + + delete mAudioPlayer; + mAudioPlayer = NULL; + } else { + delete mTimeSource; + } + + mTimeSource = NULL; +} + +// static +void *MediaPlayerImpl::VideoWrapper(void *me) { + ((MediaPlayerImpl *)me)->videoEntry(); + + return NULL; +} + +void MediaPlayerImpl::videoEntry() { + bool firstFrame = true; + bool eof = false; + + status_t err = mVideoDecoder->start(); + CHECK_EQ(err, OK); + + while (mPlaying) { + MediaBuffer *buffer; + + MediaSource::ReadOptions options; + bool seeking = false; + + { + Mutex::Autolock autoLock(mLock); + if (mSeeking) { + LOGI("seek-options to %lld", mSeekTimeUs); + options.setSeekTo(mSeekTimeUs); + + mSeeking = false; + seeking = true; + eof = false; + } + } + + if (eof || mPaused) { + usleep(100000); + continue; + } + + status_t err = mVideoDecoder->read(&buffer, &options); + CHECK((err == OK && buffer != NULL) || (err != OK && buffer == NULL)); + + if (err == ERROR_END_OF_STREAM || err != OK) { + eof = true; + continue; + } + + if (buffer->range_length() == 0) { + // The final buffer is empty. + buffer->release(); + continue; + } + + int32_t units, scale; + bool success = + buffer->meta_data()->findInt32(kKeyTimeUnits, &units); + CHECK(success); + success = + buffer->meta_data()->findInt32(kKeyTimeScale, &scale); + CHECK(success); + + int64_t pts_us = (int64_t)units * 1000000 / scale; + { + Mutex::Autolock autoLock(mLock); + mVideoPosition = pts_us; + + LOGV("now_video = %.2f secs (%lld ms)", + pts_us / 1E6, (pts_us + 500) / 1000); + } + + if (seeking && mAudioPlayer != NULL) { + // Now that we know where exactly video seeked (taking sync-samples + // into account), we will seek the audio track to the same time. + mAudioPlayer->seekTo(pts_us); + } + + if (firstFrame || seeking) { + if (firstFrame && mAudioPlayer != NULL) { + // We've deferred starting the audio player until now. + mAudioPlayer->start(); + } + mTimeSourceDeltaUs = mTimeSource->getRealTimeUs() - pts_us; + firstFrame = false; + } + + displayOrDiscardFrame(buffer, pts_us); + } + + mVideoDecoder->stop(); +} + +void MediaPlayerImpl::displayOrDiscardFrame( + MediaBuffer *buffer, int64_t pts_us) { + for (;;) { + if (!mPlaying || mPaused) { + buffer->release(); + buffer = NULL; + + return; + } + + int64_t realtime_us, mediatime_us; + if (mAudioPlayer != NULL + && mAudioPlayer->getMediaTimeMapping(&realtime_us, &mediatime_us)) { + mTimeSourceDeltaUs = realtime_us - mediatime_us; + LOGV("mTimeSourceDeltaUs = %.2f secs", mTimeSourceDeltaUs / 1E6); + } + + int64_t now_us = mTimeSource->getRealTimeUs(); + now_us -= mTimeSourceDeltaUs; + + int64_t delay_us = pts_us - now_us; + + if (delay_us < -15000) { + // We're late. + + LOGI("we're late by %lld ms, dropping a frame\n", + -delay_us / 1000); + + buffer->release(); + buffer = NULL; + return; + } else if (delay_us > 100000) { + LOGI("we're much too early (by %lld ms)\n", + delay_us / 1000); + usleep(100000); + continue; + } else if (delay_us > 0) { + usleep(delay_us); + } + + break; + } + + { + Mutex::Autolock autoLock(mLock); + if (mVideoRenderer.get() != NULL) { + sendFrameToISurface(buffer); + } + } + + buffer->release(); + buffer = NULL; +} + +void MediaPlayerImpl::init() { + if (mExtractor != NULL) { + size_t num_tracks = mExtractor->countTracks(); + + mDuration = 0; + + for (size_t i = 0; i < num_tracks; ++i) { + const sp<MetaData> meta = mExtractor->getTrackMetaData(i); + CHECK(meta != NULL); + + const char *mime; + if (!meta->findCString(kKeyMIMEType, &mime)) { + continue; + } + + bool is_audio = false; + bool is_acceptable = false; + if (!strncasecmp(mime, "audio/", 6)) { + is_audio = true; + is_acceptable = (mAudioSource == NULL); + } else if (!strncasecmp(mime, "video/", 6)) { + is_acceptable = (mVideoSource == NULL); + } + + if (!is_acceptable) { + continue; + } + + sp<MediaSource> source = mExtractor->getTrack(i); + + int32_t units, scale; + if (meta->findInt32(kKeyDuration, &units) + && meta->findInt32(kKeyTimeScale, &scale)) { + int64_t duration_us = (int64_t)units * 1000000 / scale; + if (duration_us > mDuration) { + mDuration = duration_us; + } + } + + if (is_audio) { + setAudioSource(source); + } else { + setVideoSource(source); + } + } + } +} + +void MediaPlayerImpl::setAudioSource(const sp<MediaSource> &source) { + LOGI("setAudioSource"); + mAudioSource = source; + + sp<MetaData> meta = source->getFormat(); + + mAudioDecoder = OMXCodec::Create( + mClient.interface(), meta, false /* createEncoder */, source); +} + +void MediaPlayerImpl::setVideoSource(const sp<MediaSource> &source) { + LOGI("setVideoSource"); + mVideoSource = source; + + sp<MetaData> meta = source->getFormat(); + + bool success = meta->findInt32(kKeyWidth, &mVideoWidth); + CHECK(success); + + success = meta->findInt32(kKeyHeight, &mVideoHeight); + CHECK(success); + + mVideoDecoder = OMXCodec::Create( + mClient.interface(), meta, false /* createEncoder */, source); + + if (mISurface.get() != NULL || mSurface.get() != NULL) { + depopulateISurface(); + populateISurface(); + } +} + +void MediaPlayerImpl::setSurface(const sp<Surface> &surface) { + LOGI("setSurface %p", surface.get()); + Mutex::Autolock autoLock(mLock); + + depopulateISurface(); + + mSurface = surface; + mISurface = NULL; + + if (mSurface.get() != NULL) { + populateISurface(); + } +} + +void MediaPlayerImpl::setISurface(const sp<ISurface> &isurface) { + LOGI("setISurface %p", isurface.get()); + Mutex::Autolock autoLock(mLock); + + depopulateISurface(); + + mSurface = NULL; + mISurface = isurface; + + if (mISurface.get() != NULL) { + populateISurface(); + } +} + +MediaSource *MediaPlayerImpl::makeShoutcastSource(const char *uri) { + if (strncasecmp(uri, "shoutcast://", 12)) { + return NULL; + } + + string host; + string path; + int port; + + char *slash = strchr(uri + 12, '/'); + if (slash == NULL) { + host = uri + 12; + path = "/"; + } else { + host = string(uri + 12, slash - (uri + 12)); + path = slash; + } + + char *colon = strchr(host.c_str(), ':'); + if (colon == NULL) { + port = 80; + } else { + char *end; + long tmp = strtol(colon + 1, &end, 10); + CHECK(end > colon + 1); + CHECK(tmp > 0 && tmp < 65536); + port = tmp; + + host = string(host, 0, colon - host.c_str()); + } + + LOGI("Connecting to host '%s', port %d, path '%s'", + host.c_str(), port, path.c_str()); + + HTTPStream *http = new HTTPStream; + int http_status; + + for (;;) { + status_t err = http->connect(host.c_str(), port); + CHECK_EQ(err, OK); + + err = http->send("GET "); + err = http->send(path.c_str()); + err = http->send(" HTTP/1.1\r\n"); + err = http->send("Host: "); + err = http->send(host.c_str()); + err = http->send("\r\n"); + err = http->send("Icy-MetaData: 1\r\n\r\n"); + + CHECK_EQ(OK, http->receive_header(&http_status)); + + if (http_status == 301 || http_status == 302) { + string location; + CHECK(http->find_header_value("Location", &location)); + + CHECK(string(location, 0, 7) == "http://"); + location.erase(0, 7); + string::size_type slashPos = location.find('/'); + if (slashPos == string::npos) { + slashPos = location.size(); + location += '/'; + } + + http->disconnect(); + + LOGI("Redirecting to %s\n", location.c_str()); + + host = string(location, 0, slashPos); + + string::size_type colonPos = host.find(':'); + if (colonPos != string::npos) { + const char *start = host.c_str() + colonPos + 1; + char *end; + long tmp = strtol(start, &end, 10); + CHECK(end > start && (*end == '\0')); + + port = (tmp >= 0 && tmp < 65536) ? (int)tmp : 80; + } else { + port = 80; + } + + path = string(location, slashPos); + + continue; + } + + break; + } + + if (http_status != 200) { + LOGE("Connection failed: http_status = %d", http_status); + return NULL; + } + + MediaSource *source = new ShoutcastSource(http); + + return source; +} + +bool MediaPlayerImpl::isPlaying() const { + return mPlaying && !mPaused; +} + +int64_t MediaPlayerImpl::getDuration() { + return mDuration; +} + +int64_t MediaPlayerImpl::getPosition() { + int64_t position = 0; + if (mVideoSource != NULL) { + Mutex::Autolock autoLock(mLock); + position = mVideoPosition; + } else if (mAudioPlayer != NULL) { + position = mAudioPlayer->getMediaTimeUs(); + } + + return position; +} + +status_t MediaPlayerImpl::seekTo(int64_t time) { + LOGI("seekTo %lld", time); + + if (mPaused) { + return UNKNOWN_ERROR; + } + + if (mVideoSource == NULL && mAudioPlayer != NULL) { + mAudioPlayer->seekTo(time); + } else { + Mutex::Autolock autoLock(mLock); + mSeekTimeUs = time; + mSeeking = true; + } + + return OK; +} + +void MediaPlayerImpl::populateISurface() { + if (mVideoSource == NULL) { + return; + } + + sp<MetaData> meta = mVideoDecoder->getFormat(); + + int32_t format; + const char *component; + int32_t decodedWidth, decodedHeight; + bool success = meta->findInt32(kKeyColorFormat, &format); + success = success && meta->findCString(kKeyDecoderComponent, &component); + success = success && meta->findInt32(kKeyWidth, &decodedWidth); + success = success && meta->findInt32(kKeyHeight, &decodedHeight); + CHECK(success); + + if (mSurface.get() != NULL) { + mVideoRenderer = + mClient.interface()->createRenderer( + mSurface, component, + (OMX_COLOR_FORMATTYPE)format, + decodedWidth, decodedHeight, + mVideoWidth, mVideoHeight); + } else { + mVideoRenderer = + mClient.interface()->createRenderer( + mISurface, component, + (OMX_COLOR_FORMATTYPE)format, + decodedWidth, decodedHeight, + mVideoWidth, mVideoHeight); + } +} + +void MediaPlayerImpl::depopulateISurface() { + mVideoRenderer.clear(); +} + +void MediaPlayerImpl::sendFrameToISurface(MediaBuffer *buffer) { + void *id; + if (buffer->meta_data()->findPointer(kKeyBufferID, &id)) { + mVideoRenderer->render((IOMX::buffer_id)id); + } +} + +void MediaPlayerImpl::setAudioSink( + const sp<MediaPlayerBase::AudioSink> &audioSink) { + LOGI("setAudioSink %p", audioSink.get()); + mAudioSink = audioSink; +} + +} // namespace android + diff --git a/media/libstagefright/MediaSource.cpp b/media/libstagefright/MediaSource.cpp new file mode 100644 index 000000000000..ec89b7442597 --- /dev/null +++ b/media/libstagefright/MediaSource.cpp @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/stagefright/MediaSource.h> + +namespace android { + +MediaSource::MediaSource() {} + +MediaSource::~MediaSource() {} + +//////////////////////////////////////////////////////////////////////////////// + +MediaSource::ReadOptions::ReadOptions() { + reset(); +} + +void MediaSource::ReadOptions::reset() { + mOptions = 0; + mSeekTimeUs = 0; + mLatenessUs = 0; +} + +void MediaSource::ReadOptions::setSeekTo(int64_t time_us) { + mOptions |= kSeekTo_Option; + mSeekTimeUs = time_us; +} + +void MediaSource::ReadOptions::clearSeekTo() { + mOptions &= ~kSeekTo_Option; + mSeekTimeUs = 0; +} + +bool MediaSource::ReadOptions::getSeekTo(int64_t *time_us) const { + *time_us = mSeekTimeUs; + return (mOptions & kSeekTo_Option) != 0; +} + +void MediaSource::ReadOptions::setLateBy(int64_t lateness_us) { + mLatenessUs = lateness_us; +} + +int64_t MediaSource::ReadOptions::getLateBy() const { + return mLatenessUs; +} + +} // namespace android diff --git a/media/libstagefright/MetaData.cpp b/media/libstagefright/MetaData.cpp new file mode 100644 index 000000000000..6b067cbc7e3c --- /dev/null +++ b/media/libstagefright/MetaData.cpp @@ -0,0 +1,232 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <stdlib.h> +#include <string.h> + +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MetaData.h> + +namespace android { + +MetaData::MetaData() { +} + +MetaData::MetaData(const MetaData &from) + : RefBase(), + mItems(from.mItems) { +} + +MetaData::~MetaData() { + clear(); +} + +void MetaData::clear() { + mItems.clear(); +} + +bool MetaData::remove(uint32_t key) { + ssize_t i = mItems.indexOfKey(key); + + if (i < 0) { + return false; + } + + mItems.removeItemsAt(i); + + return true; +} + +bool MetaData::setCString(uint32_t key, const char *value) { + return setData(key, TYPE_C_STRING, value, strlen(value) + 1); +} + +bool MetaData::setInt32(uint32_t key, int32_t value) { + return setData(key, TYPE_INT32, &value, sizeof(value)); +} + +bool MetaData::setFloat(uint32_t key, float value) { + return setData(key, TYPE_FLOAT, &value, sizeof(value)); +} + +bool MetaData::setPointer(uint32_t key, void *value) { + return setData(key, TYPE_POINTER, &value, sizeof(value)); +} + +bool MetaData::findCString(uint32_t key, const char **value) { + uint32_t type; + const void *data; + size_t size; + if (!findData(key, &type, &data, &size) || type != TYPE_C_STRING) { + return false; + } + + *value = (const char *)data; + + return true; +} + +bool MetaData::findInt32(uint32_t key, int32_t *value) { + uint32_t type; + const void *data; + size_t size; + if (!findData(key, &type, &data, &size) || type != TYPE_INT32) { + return false; + } + + CHECK_EQ(size, sizeof(*value)); + + *value = *(int32_t *)data; + + return true; +} + +bool MetaData::findFloat(uint32_t key, float *value) { + uint32_t type; + const void *data; + size_t size; + if (!findData(key, &type, &data, &size) || type != TYPE_FLOAT) { + return false; + } + + CHECK_EQ(size, sizeof(*value)); + + *value = *(float *)data; + + return true; +} + +bool MetaData::findPointer(uint32_t key, void **value) { + uint32_t type; + const void *data; + size_t size; + if (!findData(key, &type, &data, &size) || type != TYPE_POINTER) { + return false; + } + + CHECK_EQ(size, sizeof(*value)); + + *value = *(void **)data; + + return true; +} + +bool MetaData::setData( + uint32_t key, uint32_t type, const void *data, size_t size) { + bool overwrote_existing = true; + + ssize_t i = mItems.indexOfKey(key); + if (i < 0) { + typed_data item; + i = mItems.add(key, item); + + overwrote_existing = false; + } + + typed_data &item = mItems.editValueAt(i); + + item.setData(type, data, size); + + return overwrote_existing; +} + +bool MetaData::findData(uint32_t key, uint32_t *type, + const void **data, size_t *size) const { + ssize_t i = mItems.indexOfKey(key); + + if (i < 0) { + return false; + } + + const typed_data &item = mItems.valueAt(i); + + item.getData(type, data, size); + + return true; +} + +MetaData::typed_data::typed_data() + : mType(0), + mSize(0) { +} + +MetaData::typed_data::~typed_data() { + clear(); +} + +MetaData::typed_data::typed_data(const typed_data &from) + : mType(from.mType), + mSize(0) { + allocateStorage(from.mSize); + memcpy(storage(), from.storage(), mSize); +} + +MetaData::typed_data &MetaData::typed_data::operator=( + const MetaData::typed_data &from) { + if (this != &from) { + clear(); + mType = from.mType; + allocateStorage(from.mSize); + memcpy(storage(), from.storage(), mSize); + } + + return *this; +} + +void MetaData::typed_data::clear() { + freeStorage(); + + mType = 0; +} + +void MetaData::typed_data::setData( + uint32_t type, const void *data, size_t size) { + clear(); + + mType = type; + allocateStorage(size); + memcpy(storage(), data, size); +} + +void MetaData::typed_data::getData( + uint32_t *type, const void **data, size_t *size) const { + *type = mType; + *size = mSize; + *data = storage(); +} + +void MetaData::typed_data::allocateStorage(size_t size) { + mSize = size; + + if (usesReservoir()) { + return; + } + + u.ext_data = malloc(mSize); +} + +void MetaData::typed_data::freeStorage() { + if (!usesReservoir()) { + if (u.ext_data) { + free(u.ext_data); + } + } + + mSize = 0; +} + +} // namespace android + diff --git a/media/libstagefright/MmapSource.cpp b/media/libstagefright/MmapSource.cpp new file mode 100644 index 000000000000..47d95f94ca68 --- /dev/null +++ b/media/libstagefright/MmapSource.cpp @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MmapSource" +#include <utils/Log.h> + +#include <sys/mman.h> + +#include <fcntl.h> +#include <string.h> +#include <unistd.h> + +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MmapSource.h> + +namespace android { + +MmapSource::MmapSource(const char *filename) + : mFd(open(filename, O_RDONLY)), + mBase(NULL), + mSize(0) { + LOGV("MmapSource '%s'", filename); + CHECK(mFd >= 0); + + off_t size = lseek(mFd, 0, SEEK_END); + mSize = (size_t)size; + + mBase = mmap(0, mSize, PROT_READ, MAP_FILE | MAP_SHARED, mFd, 0); + + if (mBase == (void *)-1) { + mBase = NULL; + + close(mFd); + mFd = -1; + } +} + +MmapSource::MmapSource(int fd, int64_t offset, int64_t length) + : mFd(fd), + mBase(NULL), + mSize(length) { + LOGV("MmapSource fd:%d offset:%lld length:%lld", fd, offset, length); + CHECK(fd >= 0); + + mBase = mmap(0, mSize, PROT_READ, MAP_FILE | MAP_SHARED, mFd, offset); + + if (mBase == (void *)-1) { + mBase = NULL; + + close(mFd); + mFd = -1; + } + +} + +MmapSource::~MmapSource() { + if (mFd != -1) { + munmap(mBase, mSize); + mBase = NULL; + mSize = 0; + + close(mFd); + mFd = -1; + } +} + +status_t MmapSource::InitCheck() const { + return mFd == -1 ? NO_INIT : OK; +} + +ssize_t MmapSource::read_at(off_t offset, void *data, size_t size) { + LOGV("read_at offset:%ld data:%p size:%d", offset, data, size); + CHECK(offset >= 0); + + size_t avail = 0; + if (offset >= 0 && offset < (off_t)mSize) { + avail = mSize - offset; + } + + if (size > avail) { + size = avail; + } + + memcpy(data, (const uint8_t *)mBase + offset, size); + + return (ssize_t)size; +} + +status_t MmapSource::getSize(off_t *size) { + *size = mSize; + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp new file mode 100644 index 000000000000..9de873eceed5 --- /dev/null +++ b/media/libstagefright/OMXClient.cpp @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "OMXClient" +#include <utils/Log.h> + +#include <binder/IServiceManager.h> +#include <media/IMediaPlayerService.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/OMXClient.h> + +namespace android { + +OMXClient::OMXClient() { +} + +status_t OMXClient::connect() { + sp<IServiceManager> sm = defaultServiceManager(); + sp<IBinder> binder = sm->getService(String16("media.player")); + sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder); + + CHECK(service.get() != NULL); + + mOMX = service->getOMX(); + CHECK(mOMX.get() != NULL); + + return OK; +} + +void OMXClient::disconnect() { +} + +} // namespace android diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp new file mode 100644 index 000000000000..ebf1e0c532d2 --- /dev/null +++ b/media/libstagefright/OMXCodec.cpp @@ -0,0 +1,2500 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "OMXCodec" +#include <utils/Log.h> + +#include <binder/IServiceManager.h> +#include <binder/MemoryDealer.h> +#include <binder/ProcessState.h> +#include <media/IMediaPlayerService.h> +#include <media/stagefright/ESDS.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaBufferGroup.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaExtractor.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/MmapSource.h> +#include <media/stagefright/OMXCodec.h> +#include <media/stagefright/Utils.h> +#include <utils/Vector.h> + +#include <OMX_Audio.h> +#include <OMX_Component.h> + +namespace android { + +struct CodecInfo { + const char *mime; + const char *codec; +}; + +static const CodecInfo kDecoderInfo[] = { + { MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" }, + { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" }, + { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" }, + { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" }, + { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" }, + { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" }, + { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" }, + { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" }, + { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" }, + { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" }, + { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.Decoder" }, + { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" }, + { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" }, + { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" }, + { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcdec" }, +}; + +static const CodecInfo kEncoderInfo[] = { + { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.encode" }, + { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrencnb" }, + { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.encode" }, + { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" }, + { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" }, + { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" }, + { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" }, + { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" }, + { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" }, + { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" }, +}; + +#define CODEC_LOGI(x, ...) LOGI("[%s] "x, mComponentName, ##__VA_ARGS__) +#define CODEC_LOGV(x, ...) LOGV("[%s] "x, mComponentName, ##__VA_ARGS__) + +struct OMXCodecObserver : public BnOMXObserver { + OMXCodecObserver() { + } + + void setCodec(const sp<OMXCodec> &target) { + mTarget = target; + } + + // from IOMXObserver + virtual void onMessage(const omx_message &msg) { + sp<OMXCodec> codec = mTarget.promote(); + + if (codec.get() != NULL) { + codec->on_message(msg); + } + } + +protected: + virtual ~OMXCodecObserver() {} + +private: + wp<OMXCodec> mTarget; + + OMXCodecObserver(const OMXCodecObserver &); + OMXCodecObserver &operator=(const OMXCodecObserver &); +}; + +static const char *GetCodec(const CodecInfo *info, size_t numInfos, + const char *mime, int index) { + CHECK(index >= 0); + for(size_t i = 0; i < numInfos; ++i) { + if (!strcasecmp(mime, info[i].mime)) { + if (index == 0) { + return info[i].codec; + } + + --index; + } + } + + return NULL; +} + +enum { + kAVCProfileBaseline = 0x42, + kAVCProfileMain = 0x4d, + kAVCProfileExtended = 0x58, + kAVCProfileHigh = 0x64, + kAVCProfileHigh10 = 0x6e, + kAVCProfileHigh422 = 0x7a, + kAVCProfileHigh444 = 0xf4, + kAVCProfileCAVLC444Intra = 0x2c +}; + +static const char *AVCProfileToString(uint8_t profile) { + switch (profile) { + case kAVCProfileBaseline: + return "Baseline"; + case kAVCProfileMain: + return "Main"; + case kAVCProfileExtended: + return "Extended"; + case kAVCProfileHigh: + return "High"; + case kAVCProfileHigh10: + return "High 10"; + case kAVCProfileHigh422: + return "High 422"; + case kAVCProfileHigh444: + return "High 444"; + case kAVCProfileCAVLC444Intra: + return "CAVLC 444 Intra"; + default: return "Unknown"; + } +} + +template<class T> +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 0; + params->nVersion.s.nRevision = 0; + params->nVersion.s.nStep = 0; +} + +// static +sp<OMXCodec> OMXCodec::Create( + const sp<IOMX> &omx, + const sp<MetaData> &meta, bool createEncoder, + const sp<MediaSource> &source, + const char *matchComponentName) { + const char *mime; + bool success = meta->findCString(kKeyMIMEType, &mime); + CHECK(success); + + const char *componentName = NULL; + sp<OMXCodecObserver> observer = new OMXCodecObserver; + IOMX::node_id node = 0; + for (int index = 0;; ++index) { + if (createEncoder) { + componentName = GetCodec( + kEncoderInfo, sizeof(kEncoderInfo) / sizeof(kEncoderInfo[0]), + mime, index); + } else { + componentName = GetCodec( + kDecoderInfo, sizeof(kDecoderInfo) / sizeof(kDecoderInfo[0]), + mime, index); + } + + if (!componentName) { + return NULL; + } + + // If a specific codec is requested, skip the non-matching ones. + if (matchComponentName && strcmp(componentName, matchComponentName)) { + continue; + } + + LOGV("Attempting to allocate OMX node '%s'", componentName); + + status_t err = omx->allocateNode(componentName, observer, &node); + if (err == OK) { + LOGI("Successfully allocated OMX node '%s'", componentName); + break; + } + } + + uint32_t quirks = 0; + if (!strcmp(componentName, "OMX.PV.avcdec")) { + quirks |= kWantsNALFragments; + quirks |= kOutputDimensionsAre16Aligned; + } + if (!strcmp(componentName, "OMX.TI.MP3.decode")) { + quirks |= kNeedsFlushBeforeDisable; + } + if (!strcmp(componentName, "OMX.TI.AAC.decode")) { + quirks |= kNeedsFlushBeforeDisable; + quirks |= kRequiresFlushCompleteEmulation; + + // The following is currently necessary for proper shutdown + // behaviour, but NOT enabled by default in order to make the + // bug reproducible... + // quirks |= kRequiresFlushBeforeShutdown; + } + if (!strncmp(componentName, "OMX.qcom.video.encoder.", 23)) { + quirks |= kRequiresLoadedToIdleAfterAllocation; + quirks |= kRequiresAllocateBufferOnInputPorts; + } + if (!strncmp(componentName, "OMX.qcom.video.decoder.", 23)) { + // XXX Required on P....on only. + quirks |= kRequiresAllocateBufferOnOutputPorts; + quirks |= kOutputDimensionsAre16Aligned; + } + + if (!strncmp(componentName, "OMX.TI.", 7)) { + // Apparently I must not use OMX_UseBuffer on either input or + // output ports on any of the TI components or quote: + // "(I) may have unexpected problem (sic) which can be timing related + // and hard to reproduce." + + quirks |= kRequiresAllocateBufferOnInputPorts; + quirks |= kRequiresAllocateBufferOnOutputPorts; + } + + sp<OMXCodec> codec = new OMXCodec( + omx, node, quirks, createEncoder, mime, componentName, + source); + + observer->setCodec(codec); + + uint32_t type; + const void *data; + size_t size; + if (meta->findData(kKeyESDS, &type, &data, &size)) { + ESDS esds((const char *)data, size); + CHECK_EQ(esds.InitCheck(), OK); + + const void *codec_specific_data; + size_t codec_specific_data_size; + esds.getCodecSpecificInfo( + &codec_specific_data, &codec_specific_data_size); + + printf("found codec-specific data of size %d\n", + codec_specific_data_size); + + codec->addCodecSpecificData( + codec_specific_data, codec_specific_data_size); + } else if (meta->findData(kKeyAVCC, &type, &data, &size)) { + printf("found avcc of size %d\n", size); + + // Parse the AVCDecoderConfigurationRecord + + const uint8_t *ptr = (const uint8_t *)data; + + CHECK(size >= 7); + CHECK_EQ(ptr[0], 1); // configurationVersion == 1 + uint8_t profile = ptr[1]; + uint8_t level = ptr[3]; + + CHECK((ptr[4] >> 2) == 0x3f); // reserved + + size_t lengthSize = 1 + (ptr[4] & 3); + + // commented out check below as H264_QVGA_500_NO_AUDIO.3gp + // violates it... + // CHECK((ptr[5] >> 5) == 7); // reserved + + size_t numSeqParameterSets = ptr[5] & 31; + + ptr += 6; + size -= 6; + + for (size_t i = 0; i < numSeqParameterSets; ++i) { + CHECK(size >= 2); + size_t length = U16_AT(ptr); + + ptr += 2; + size -= 2; + + CHECK(size >= length); + + codec->addCodecSpecificData(ptr, length); + + ptr += length; + size -= length; + } + + CHECK(size >= 1); + size_t numPictureParameterSets = *ptr; + ++ptr; + --size; + + for (size_t i = 0; i < numPictureParameterSets; ++i) { + CHECK(size >= 2); + size_t length = U16_AT(ptr); + + ptr += 2; + size -= 2; + + CHECK(size >= length); + + codec->addCodecSpecificData(ptr, length); + + ptr += length; + size -= length; + } + + LOGI("AVC profile = %d (%s), level = %d", + (int)profile, AVCProfileToString(profile), (int)level / 10); + + if (!strcmp(componentName, "OMX.TI.Video.Decoder") + && (profile != kAVCProfileBaseline || level > 39)) { + // This stream exceeds the decoder's capabilities. The decoder + // does not handle this gracefully and would clobber the heap + // and wreak havoc instead... + + LOGE("Profile and/or level exceed the decoder's capabilities."); + return NULL; + } + } + + if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mime)) { + codec->setAMRFormat(); + } + if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mime)) { + codec->setAMRWBFormat(); + } + if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime)) { + int32_t numChannels, sampleRate; + CHECK(meta->findInt32(kKeyChannelCount, &numChannels)); + CHECK(meta->findInt32(kKeySampleRate, &sampleRate)); + + codec->setAACFormat(numChannels, sampleRate); + } + if (!strncasecmp(mime, "video/", 6)) { + int32_t width, height; + bool success = meta->findInt32(kKeyWidth, &width); + success = success && meta->findInt32(kKeyHeight, &height); + CHECK(success); + + if (createEncoder) { + codec->setVideoInputFormat(mime, width, height); + } else { + codec->setVideoOutputFormat(mime, width, height); + } + } + if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_JPEG) + && !strcmp(componentName, "OMX.TI.JPEG.decode")) { + OMX_COLOR_FORMATTYPE format = + OMX_COLOR_Format32bitARGB8888; + // OMX_COLOR_FormatYUV420PackedPlanar; + // OMX_COLOR_FormatCbYCrY; + // OMX_COLOR_FormatYUV411Planar; + + int32_t width, height; + bool success = meta->findInt32(kKeyWidth, &width); + success = success && meta->findInt32(kKeyHeight, &height); + + int32_t compressedSize; + success = success && meta->findInt32( + kKeyMaxInputSize, &compressedSize); + + CHECK(success); + CHECK(compressedSize > 0); + + codec->setImageOutputFormat(format, width, height); + codec->setJPEGInputFormat(width, height, (OMX_U32)compressedSize); + } + + int32_t maxInputSize; + if (createEncoder && meta->findInt32(kKeyMaxInputSize, &maxInputSize)) { + codec->setMinBufferSize(kPortIndexInput, (OMX_U32)maxInputSize); + } + + if (!strcmp(componentName, "OMX.TI.AMR.encode") + || !strcmp(componentName, "OMX.TI.WBAMR.encode")) { + codec->setMinBufferSize(kPortIndexOutput, 8192); // XXX + } + + codec->initOutputFormat(meta); + + return codec; +} + +void OMXCodec::setMinBufferSize(OMX_U32 portIndex, OMX_U32 size) { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = portIndex; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); + + if (def.nBufferSize < size) { + def.nBufferSize = size; + + } + + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); +} + +status_t OMXCodec::setVideoPortFormatType( + OMX_U32 portIndex, + OMX_VIDEO_CODINGTYPE compressionFormat, + OMX_COLOR_FORMATTYPE colorFormat) { + OMX_VIDEO_PARAM_PORTFORMATTYPE format; + InitOMXParams(&format); + format.nPortIndex = portIndex; + format.nIndex = 0; + bool found = false; + + OMX_U32 index = 0; + for (;;) { + format.nIndex = index; + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamVideoPortFormat, + &format, sizeof(format)); + + if (err != OK) { + return err; + } + + // The following assertion is violated by TI's video decoder. + // CHECK_EQ(format.nIndex, index); + +#if 1 + CODEC_LOGI("portIndex: %ld, index: %ld, eCompressionFormat=%d eColorFormat=%d", + portIndex, + index, format.eCompressionFormat, format.eColorFormat); +#endif + + if (!strcmp("OMX.TI.Video.encoder", mComponentName)) { + if (portIndex == kPortIndexInput + && colorFormat == format.eColorFormat) { + // eCompressionFormat does not seem right. + found = true; + break; + } + if (portIndex == kPortIndexOutput + && compressionFormat == format.eCompressionFormat) { + // eColorFormat does not seem right. + found = true; + break; + } + } + + if (format.eCompressionFormat == compressionFormat + && format.eColorFormat == colorFormat) { + found = true; + break; + } + + ++index; + } + + if (!found) { + return UNKNOWN_ERROR; + } + + CODEC_LOGV("found a match."); + status_t err = mOMX->setParameter( + mNode, OMX_IndexParamVideoPortFormat, + &format, sizeof(format)); + + return err; +} + +void OMXCodec::setVideoInputFormat( + const char *mime, OMX_U32 width, OMX_U32 height) { + CODEC_LOGI("setVideoInputFormat width=%ld, height=%ld", width, height); + + OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused; + if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) { + compressionFormat = OMX_VIDEO_CodingAVC; + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) { + compressionFormat = OMX_VIDEO_CodingMPEG4; + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { + compressionFormat = OMX_VIDEO_CodingH263; + } else { + LOGE("Not a supported video mime type: %s", mime); + CHECK(!"Should not be here. Not a supported video mime type."); + } + + OMX_COLOR_FORMATTYPE colorFormat = + 0 ? OMX_COLOR_FormatYCbYCr : OMX_COLOR_FormatCbYCrY; + + if (!strncmp("OMX.qcom.video.encoder.", mComponentName, 23)) { + colorFormat = OMX_COLOR_FormatYUV420SemiPlanar; + } + + setVideoPortFormatType( + kPortIndexInput, OMX_VIDEO_CodingUnused, + colorFormat); + + setVideoPortFormatType( + kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); + + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexOutput; + + OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + CHECK_EQ(err, OK); + CHECK_EQ(def.eDomain, OMX_PortDomainVideo); + + video_def->nFrameWidth = width; + video_def->nFrameHeight = height; + + video_def->eCompressionFormat = compressionFormat; + video_def->eColorFormat = OMX_COLOR_FormatUnused; + + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); + + //////////////////////////////////////////////////////////////////////////// + + InitOMXParams(&def); + def.nPortIndex = kPortIndexInput; + + err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); + + def.nBufferSize = (width * height * 2); // (width * height * 3) / 2; + CODEC_LOGI("Setting nBufferSize = %ld", def.nBufferSize); + + CHECK_EQ(def.eDomain, OMX_PortDomainVideo); + + video_def->nFrameWidth = width; + video_def->nFrameHeight = height; + video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; + video_def->eColorFormat = colorFormat; + + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); +} + +void OMXCodec::setVideoOutputFormat( + const char *mime, OMX_U32 width, OMX_U32 height) { + CODEC_LOGI("setVideoOutputFormat width=%ld, height=%ld", width, height); + + OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused; + if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) { + compressionFormat = OMX_VIDEO_CodingAVC; + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) { + compressionFormat = OMX_VIDEO_CodingMPEG4; + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { + compressionFormat = OMX_VIDEO_CodingH263; + } else { + LOGE("Not a supported video mime type: %s", mime); + CHECK(!"Should not be here. Not a supported video mime type."); + } + + setVideoPortFormatType( + kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); + +#if 1 + { + OMX_VIDEO_PARAM_PORTFORMATTYPE format; + InitOMXParams(&format); + format.nPortIndex = kPortIndexOutput; + format.nIndex = 0; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamVideoPortFormat, + &format, sizeof(format)); + CHECK_EQ(err, OK); + CHECK_EQ(format.eCompressionFormat, OMX_VIDEO_CodingUnused); + + static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00; + + CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar + || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar + || format.eColorFormat == OMX_COLOR_FormatCbYCrY + || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar); + + err = mOMX->setParameter( + mNode, OMX_IndexParamVideoPortFormat, + &format, sizeof(format)); + CHECK_EQ(err, OK); + } +#endif + + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexInput; + + OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + CHECK_EQ(err, OK); + +#if 1 + // XXX Need a (much) better heuristic to compute input buffer sizes. + const size_t X = 64 * 1024; + if (def.nBufferSize < X) { + def.nBufferSize = X; + } +#endif + + CHECK_EQ(def.eDomain, OMX_PortDomainVideo); + + video_def->nFrameWidth = width; + video_def->nFrameHeight = height; + + video_def->eColorFormat = OMX_COLOR_FormatUnused; + + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); + + //////////////////////////////////////////////////////////////////////////// + + InitOMXParams(&def); + def.nPortIndex = kPortIndexOutput; + + err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); + CHECK_EQ(def.eDomain, OMX_PortDomainVideo); + +#if 0 + def.nBufferSize = + (((width + 15) & -16) * ((height + 15) & -16) * 3) / 2; // YUV420 +#endif + + video_def->nFrameWidth = width; + video_def->nFrameHeight = height; + + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); +} + + +OMXCodec::OMXCodec( + const sp<IOMX> &omx, IOMX::node_id node, uint32_t quirks, + bool isEncoder, + const char *mime, + const char *componentName, + const sp<MediaSource> &source) + : mOMX(omx), + mNode(node), + mQuirks(quirks), + mIsEncoder(isEncoder), + mMIME(strdup(mime)), + mComponentName(strdup(componentName)), + mSource(source), + mCodecSpecificDataIndex(0), + mState(LOADED), + mInitialBufferSubmit(true), + mSignalledEOS(false), + mNoMoreOutputData(false), + mSeekTimeUs(-1) { + mPortStatus[kPortIndexInput] = ENABLED; + mPortStatus[kPortIndexOutput] = ENABLED; + + setComponentRole(); +} + +// static +void OMXCodec::setComponentRole( + const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder, + const char *mime) { + struct MimeToRole { + const char *mime; + const char *decoderRole; + const char *encoderRole; + }; + + static const MimeToRole kMimeToRole[] = { + { MEDIA_MIMETYPE_AUDIO_MPEG, + "audio_decoder.mp3", "audio_encoder.mp3" }, + { MEDIA_MIMETYPE_AUDIO_AMR_NB, + "audio_decoder.amrnb", "audio_encoder.amrnb" }, + { MEDIA_MIMETYPE_AUDIO_AMR_WB, + "audio_decoder.amrwb", "audio_encoder.amrwb" }, + { MEDIA_MIMETYPE_AUDIO_AAC, + "audio_decoder.aac", "audio_encoder.aac" }, + { MEDIA_MIMETYPE_VIDEO_AVC, + "video_decoder.avc", "video_encoder.avc" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, + "video_decoder.mpeg4", "video_encoder.mpeg4" }, + { MEDIA_MIMETYPE_VIDEO_H263, + "video_decoder.h263", "video_encoder.h263" }, + }; + + static const size_t kNumMimeToRole = + sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); + + size_t i; + for (i = 0; i < kNumMimeToRole; ++i) { + if (!strcasecmp(mime, kMimeToRole[i].mime)) { + break; + } + } + + if (i == kNumMimeToRole) { + return; + } + + const char *role = + isEncoder ? kMimeToRole[i].encoderRole + : kMimeToRole[i].decoderRole; + + if (role != NULL) { + OMX_PARAM_COMPONENTROLETYPE roleParams; + InitOMXParams(&roleParams); + + strncpy((char *)roleParams.cRole, + role, OMX_MAX_STRINGNAME_SIZE - 1); + + roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; + + status_t err = omx->setParameter( + node, OMX_IndexParamStandardComponentRole, + &roleParams, sizeof(roleParams)); + + if (err != OK) { + LOGW("Failed to set standard component role '%s'.", role); + } + } +} + +void OMXCodec::setComponentRole() { + setComponentRole(mOMX, mNode, mIsEncoder, mMIME); +} + +OMXCodec::~OMXCodec() { + CHECK(mState == LOADED || mState == ERROR); + + status_t err = mOMX->freeNode(mNode); + CHECK_EQ(err, OK); + + mNode = NULL; + setState(DEAD); + + clearCodecSpecificData(); + + free(mComponentName); + mComponentName = NULL; + + free(mMIME); + mMIME = NULL; +} + +status_t OMXCodec::init() { + // mLock is held. + + CHECK_EQ(mState, LOADED); + + status_t err; + if (!(mQuirks & kRequiresLoadedToIdleAfterAllocation)) { + err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle); + CHECK_EQ(err, OK); + setState(LOADED_TO_IDLE); + } + + err = allocateBuffers(); + CHECK_EQ(err, OK); + + if (mQuirks & kRequiresLoadedToIdleAfterAllocation) { + err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle); + CHECK_EQ(err, OK); + + setState(LOADED_TO_IDLE); + } + + while (mState != EXECUTING && mState != ERROR) { + mAsyncCompletion.wait(mLock); + } + + return mState == ERROR ? UNKNOWN_ERROR : OK; +} + +// static +bool OMXCodec::isIntermediateState(State state) { + return state == LOADED_TO_IDLE + || state == IDLE_TO_EXECUTING + || state == EXECUTING_TO_IDLE + || state == IDLE_TO_LOADED + || state == RECONFIGURING; +} + +status_t OMXCodec::allocateBuffers() { + status_t err = allocateBuffersOnPort(kPortIndexInput); + + if (err != OK) { + return err; + } + + return allocateBuffersOnPort(kPortIndexOutput); +} + +status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = portIndex; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + if (err != OK) { + return err; + } + + size_t totalSize = def.nBufferCountActual * def.nBufferSize; + mDealer[portIndex] = new MemoryDealer(totalSize); + + for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) { + sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize); + CHECK(mem.get() != NULL); + + IOMX::buffer_id buffer; + if (portIndex == kPortIndexInput + && (mQuirks & kRequiresAllocateBufferOnInputPorts)) { + err = mOMX->allocateBufferWithBackup( + mNode, portIndex, mem, &buffer); + } else if (portIndex == kPortIndexOutput + && (mQuirks & kRequiresAllocateBufferOnOutputPorts)) { + err = mOMX->allocateBufferWithBackup( + mNode, portIndex, mem, &buffer); + } else { + err = mOMX->useBuffer(mNode, portIndex, mem, &buffer); + } + + if (err != OK) { + LOGE("allocate_buffer_with_backup failed"); + return err; + } + + BufferInfo info; + info.mBuffer = buffer; + info.mOwnedByComponent = false; + info.mMem = mem; + info.mMediaBuffer = NULL; + + if (portIndex == kPortIndexOutput) { + info.mMediaBuffer = new MediaBuffer(mem->pointer(), mem->size()); + info.mMediaBuffer->setObserver(this); + } + + mPortBuffers[portIndex].push(info); + + CODEC_LOGV("allocated buffer %p on %s port", buffer, + portIndex == kPortIndexInput ? "input" : "output"); + } + + dumpPortStatus(portIndex); + + return OK; +} + +void OMXCodec::on_message(const omx_message &msg) { + Mutex::Autolock autoLock(mLock); + + switch (msg.type) { + case omx_message::EVENT: + { + onEvent( + msg.u.event_data.event, msg.u.event_data.data1, + msg.u.event_data.data2); + + break; + } + + case omx_message::EMPTY_BUFFER_DONE: + { + IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer; + + CODEC_LOGV("EMPTY_BUFFER_DONE(buffer: %p)", buffer); + + Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput]; + size_t i = 0; + while (i < buffers->size() && (*buffers)[i].mBuffer != buffer) { + ++i; + } + + CHECK(i < buffers->size()); + if (!(*buffers)[i].mOwnedByComponent) { + LOGW("We already own input buffer %p, yet received " + "an EMPTY_BUFFER_DONE.", buffer); + } + + buffers->editItemAt(i).mOwnedByComponent = false; + + if (mPortStatus[kPortIndexInput] == DISABLING) { + CODEC_LOGV("Port is disabled, freeing buffer %p", buffer); + + status_t err = + mOMX->freeBuffer(mNode, kPortIndexInput, buffer); + CHECK_EQ(err, OK); + + buffers->removeAt(i); + } else if (mPortStatus[kPortIndexInput] != SHUTTING_DOWN) { + CHECK_EQ(mPortStatus[kPortIndexInput], ENABLED); + drainInputBuffer(&buffers->editItemAt(i)); + } + + break; + } + + case omx_message::FILL_BUFFER_DONE: + { + IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer; + OMX_U32 flags = msg.u.extended_buffer_data.flags; + + CODEC_LOGV("FILL_BUFFER_DONE(buffer: %p, size: %ld, flags: 0x%08lx)", + buffer, + msg.u.extended_buffer_data.range_length, + flags); + + CODEC_LOGV("FILL_BUFFER_DONE(timestamp: %lld us (%.2f secs))", + msg.u.extended_buffer_data.timestamp, + msg.u.extended_buffer_data.timestamp / 1E6); + + Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput]; + size_t i = 0; + while (i < buffers->size() && (*buffers)[i].mBuffer != buffer) { + ++i; + } + + CHECK(i < buffers->size()); + BufferInfo *info = &buffers->editItemAt(i); + + if (!info->mOwnedByComponent) { + LOGW("We already own output buffer %p, yet received " + "a FILL_BUFFER_DONE.", buffer); + } + + info->mOwnedByComponent = false; + + if (mPortStatus[kPortIndexOutput] == DISABLING) { + CODEC_LOGV("Port is disabled, freeing buffer %p", buffer); + + status_t err = + mOMX->freeBuffer(mNode, kPortIndexOutput, buffer); + CHECK_EQ(err, OK); + + buffers->removeAt(i); + } else if (mPortStatus[kPortIndexOutput] == ENABLED + && (flags & OMX_BUFFERFLAG_EOS)) { + CODEC_LOGV("No more output data."); + mNoMoreOutputData = true; + mBufferFilled.signal(); + } else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) { + CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED); + + MediaBuffer *buffer = info->mMediaBuffer; + + buffer->set_range( + msg.u.extended_buffer_data.range_offset, + msg.u.extended_buffer_data.range_length); + + buffer->meta_data()->clear(); + + buffer->meta_data()->setInt32( + kKeyTimeUnits, + (msg.u.extended_buffer_data.timestamp + 500) / 1000); + + buffer->meta_data()->setInt32( + kKeyTimeScale, 1000); + + if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_SYNCFRAME) { + buffer->meta_data()->setInt32(kKeyIsSyncFrame, true); + } + + buffer->meta_data()->setPointer( + kKeyPlatformPrivate, + msg.u.extended_buffer_data.platform_private); + + buffer->meta_data()->setPointer( + kKeyBufferID, + msg.u.extended_buffer_data.buffer); + + mFilledBuffers.push_back(i); + mBufferFilled.signal(); + } + + break; + } + + default: + { + CHECK(!"should not be here."); + break; + } + } +} + +void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + switch (event) { + case OMX_EventCmdComplete: + { + onCmdComplete((OMX_COMMANDTYPE)data1, data2); + break; + } + + case OMX_EventError: + { + LOGE("ERROR(%ld, %ld)", data1, data2); + + setState(ERROR); + break; + } + + case OMX_EventPortSettingsChanged: + { + onPortSettingsChanged(data1); + break; + } + + case OMX_EventBufferFlag: + { + CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1); + + if (data1 == kPortIndexOutput) { + mNoMoreOutputData = true; + } + break; + } + + default: + { + CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2); + break; + } + } +} + +void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) { + switch (cmd) { + case OMX_CommandStateSet: + { + onStateChange((OMX_STATETYPE)data); + break; + } + + case OMX_CommandPortDisable: + { + OMX_U32 portIndex = data; + CODEC_LOGV("PORT_DISABLED(%ld)", portIndex); + + CHECK(mState == EXECUTING || mState == RECONFIGURING); + CHECK_EQ(mPortStatus[portIndex], DISABLING); + CHECK_EQ(mPortBuffers[portIndex].size(), 0); + + mPortStatus[portIndex] = DISABLED; + + if (mState == RECONFIGURING) { + CHECK_EQ(portIndex, kPortIndexOutput); + + enablePortAsync(portIndex); + + status_t err = allocateBuffersOnPort(portIndex); + CHECK_EQ(err, OK); + } + break; + } + + case OMX_CommandPortEnable: + { + OMX_U32 portIndex = data; + CODEC_LOGV("PORT_ENABLED(%ld)", portIndex); + + CHECK(mState == EXECUTING || mState == RECONFIGURING); + CHECK_EQ(mPortStatus[portIndex], ENABLING); + + mPortStatus[portIndex] = ENABLED; + + if (mState == RECONFIGURING) { + CHECK_EQ(portIndex, kPortIndexOutput); + + setState(EXECUTING); + + fillOutputBuffers(); + } + break; + } + + case OMX_CommandFlush: + { + OMX_U32 portIndex = data; + + CODEC_LOGV("FLUSH_DONE(%ld)", portIndex); + + CHECK_EQ(mPortStatus[portIndex], SHUTTING_DOWN); + mPortStatus[portIndex] = ENABLED; + + CHECK_EQ(countBuffersWeOwn(mPortBuffers[portIndex]), + mPortBuffers[portIndex].size()); + + if (mState == RECONFIGURING) { + CHECK_EQ(portIndex, kPortIndexOutput); + + disablePortAsync(portIndex); + } else if (mState == EXECUTING_TO_IDLE) { + if (mPortStatus[kPortIndexInput] == ENABLED + && mPortStatus[kPortIndexOutput] == ENABLED) { + CODEC_LOGV("Finished flushing both ports, now completing " + "transition from EXECUTING to IDLE."); + + mPortStatus[kPortIndexInput] = SHUTTING_DOWN; + mPortStatus[kPortIndexOutput] = SHUTTING_DOWN; + + status_t err = + mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle); + CHECK_EQ(err, OK); + } + } else { + // We're flushing both ports in preparation for seeking. + + if (mPortStatus[kPortIndexInput] == ENABLED + && mPortStatus[kPortIndexOutput] == ENABLED) { + CODEC_LOGV("Finished flushing both ports, now continuing from" + " seek-time."); + + // Clear this flag in case the decoder sent us either + // the EVENT_BUFFER_FLAG(1) or an output buffer with + // the EOS flag set _while_ flushing. Since we're going + // to submit "fresh" input data now, this flag no longer + // applies to our future. + mNoMoreOutputData = false; + + drainInputBuffers(); + fillOutputBuffers(); + } + } + + break; + } + + default: + { + CODEC_LOGV("CMD_COMPLETE(%d, %ld)", cmd, data); + break; + } + } +} + +void OMXCodec::onStateChange(OMX_STATETYPE newState) { + switch (newState) { + case OMX_StateIdle: + { + CODEC_LOGV("Now Idle."); + if (mState == LOADED_TO_IDLE) { + status_t err = mOMX->sendCommand( + mNode, OMX_CommandStateSet, OMX_StateExecuting); + + CHECK_EQ(err, OK); + + setState(IDLE_TO_EXECUTING); + } else { + CHECK_EQ(mState, EXECUTING_TO_IDLE); + + CHECK_EQ( + countBuffersWeOwn(mPortBuffers[kPortIndexInput]), + mPortBuffers[kPortIndexInput].size()); + + CHECK_EQ( + countBuffersWeOwn(mPortBuffers[kPortIndexOutput]), + mPortBuffers[kPortIndexOutput].size()); + + status_t err = mOMX->sendCommand( + mNode, OMX_CommandStateSet, OMX_StateLoaded); + + CHECK_EQ(err, OK); + + err = freeBuffersOnPort(kPortIndexInput); + CHECK_EQ(err, OK); + + err = freeBuffersOnPort(kPortIndexOutput); + CHECK_EQ(err, OK); + + mPortStatus[kPortIndexInput] = ENABLED; + mPortStatus[kPortIndexOutput] = ENABLED; + + setState(IDLE_TO_LOADED); + } + break; + } + + case OMX_StateExecuting: + { + CHECK_EQ(mState, IDLE_TO_EXECUTING); + + CODEC_LOGV("Now Executing."); + + setState(EXECUTING); + + // Buffers will be submitted to the component in the first + // call to OMXCodec::read as mInitialBufferSubmit is true at + // this point. This ensures that this on_message call returns, + // releases the lock and ::init can notice the state change and + // itself return. + break; + } + + case OMX_StateLoaded: + { + CHECK_EQ(mState, IDLE_TO_LOADED); + + CODEC_LOGV("Now Loaded."); + + setState(LOADED); + break; + } + + default: + { + CHECK(!"should not be here."); + break; + } + } +} + +// static +size_t OMXCodec::countBuffersWeOwn(const Vector<BufferInfo> &buffers) { + size_t n = 0; + for (size_t i = 0; i < buffers.size(); ++i) { + if (!buffers[i].mOwnedByComponent) { + ++n; + } + } + + return n; +} + +status_t OMXCodec::freeBuffersOnPort( + OMX_U32 portIndex, bool onlyThoseWeOwn) { + Vector<BufferInfo> *buffers = &mPortBuffers[portIndex]; + + status_t stickyErr = OK; + + for (size_t i = buffers->size(); i-- > 0;) { + BufferInfo *info = &buffers->editItemAt(i); + + if (onlyThoseWeOwn && info->mOwnedByComponent) { + continue; + } + + CHECK_EQ(info->mOwnedByComponent, false); + + CODEC_LOGV("freeing buffer %p on port %ld", info->mBuffer, portIndex); + + status_t err = + mOMX->freeBuffer(mNode, portIndex, info->mBuffer); + + if (err != OK) { + stickyErr = err; + } + + if (info->mMediaBuffer != NULL) { + info->mMediaBuffer->setObserver(NULL); + + // Make sure nobody but us owns this buffer at this point. + CHECK_EQ(info->mMediaBuffer->refcount(), 0); + + info->mMediaBuffer->release(); + } + + buffers->removeAt(i); + } + + CHECK(onlyThoseWeOwn || buffers->isEmpty()); + + return stickyErr; +} + +void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) { + CODEC_LOGV("PORT_SETTINGS_CHANGED(%ld)", portIndex); + + CHECK_EQ(mState, EXECUTING); + CHECK_EQ(portIndex, kPortIndexOutput); + setState(RECONFIGURING); + + if (mQuirks & kNeedsFlushBeforeDisable) { + if (!flushPortAsync(portIndex)) { + onCmdComplete(OMX_CommandFlush, portIndex); + } + } else { + disablePortAsync(portIndex); + } +} + +bool OMXCodec::flushPortAsync(OMX_U32 portIndex) { + CHECK(mState == EXECUTING || mState == RECONFIGURING + || mState == EXECUTING_TO_IDLE); + + CODEC_LOGV("flushPortAsync(%ld): we own %d out of %d buffers already.", + portIndex, countBuffersWeOwn(mPortBuffers[portIndex]), + mPortBuffers[portIndex].size()); + + CHECK_EQ(mPortStatus[portIndex], ENABLED); + mPortStatus[portIndex] = SHUTTING_DOWN; + + if ((mQuirks & kRequiresFlushCompleteEmulation) + && countBuffersWeOwn(mPortBuffers[portIndex]) + == mPortBuffers[portIndex].size()) { + // No flush is necessary and this component fails to send a + // flush-complete event in this case. + + return false; + } + + status_t err = + mOMX->sendCommand(mNode, OMX_CommandFlush, portIndex); + CHECK_EQ(err, OK); + + return true; +} + +void OMXCodec::disablePortAsync(OMX_U32 portIndex) { + CHECK(mState == EXECUTING || mState == RECONFIGURING); + + CHECK_EQ(mPortStatus[portIndex], ENABLED); + mPortStatus[portIndex] = DISABLING; + + status_t err = + mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex); + CHECK_EQ(err, OK); + + freeBuffersOnPort(portIndex, true); +} + +void OMXCodec::enablePortAsync(OMX_U32 portIndex) { + CHECK(mState == EXECUTING || mState == RECONFIGURING); + + CHECK_EQ(mPortStatus[portIndex], DISABLED); + mPortStatus[portIndex] = ENABLING; + + status_t err = + mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex); + CHECK_EQ(err, OK); +} + +void OMXCodec::fillOutputBuffers() { + CHECK_EQ(mState, EXECUTING); + + Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput]; + for (size_t i = 0; i < buffers->size(); ++i) { + fillOutputBuffer(&buffers->editItemAt(i)); + } +} + +void OMXCodec::drainInputBuffers() { + CHECK(mState == EXECUTING || mState == RECONFIGURING); + + Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput]; + for (size_t i = 0; i < buffers->size(); ++i) { + drainInputBuffer(&buffers->editItemAt(i)); + } +} + +void OMXCodec::drainInputBuffer(BufferInfo *info) { + CHECK_EQ(info->mOwnedByComponent, false); + + if (mSignalledEOS) { + return; + } + + if (mCodecSpecificDataIndex < mCodecSpecificData.size()) { + const CodecSpecificData *specific = + mCodecSpecificData[mCodecSpecificDataIndex]; + + size_t size = specific->mSize; + + if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mMIME) + && !(mQuirks & kWantsNALFragments)) { + static const uint8_t kNALStartCode[4] = + { 0x00, 0x00, 0x00, 0x01 }; + + CHECK(info->mMem->size() >= specific->mSize + 4); + + size += 4; + + memcpy(info->mMem->pointer(), kNALStartCode, 4); + memcpy((uint8_t *)info->mMem->pointer() + 4, + specific->mData, specific->mSize); + } else { + CHECK(info->mMem->size() >= specific->mSize); + memcpy(info->mMem->pointer(), specific->mData, specific->mSize); + } + + status_t err = mOMX->emptyBuffer( + mNode, info->mBuffer, 0, size, + OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG, + 0); + CHECK_EQ(err, OK); + + info->mOwnedByComponent = true; + + ++mCodecSpecificDataIndex; + return; + } + + MediaBuffer *srcBuffer; + status_t err; + if (mSeekTimeUs >= 0) { + MediaSource::ReadOptions options; + options.setSeekTo(mSeekTimeUs); + mSeekTimeUs = -1; + + err = mSource->read(&srcBuffer, &options); + } else { + err = mSource->read(&srcBuffer); + } + + OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; + OMX_TICKS timestamp = 0; + size_t srcLength = 0; + + if (err != OK) { + CODEC_LOGV("signalling end of input stream."); + flags |= OMX_BUFFERFLAG_EOS; + + mSignalledEOS = true; + } else { + srcLength = srcBuffer->range_length(); + + if (info->mMem->size() < srcLength) { + LOGE("info->mMem->size() = %d, srcLength = %d", + info->mMem->size(), srcLength); + } + CHECK(info->mMem->size() >= srcLength); + memcpy(info->mMem->pointer(), + (const uint8_t *)srcBuffer->data() + srcBuffer->range_offset(), + srcLength); + + int32_t units, scale; + if (srcBuffer->meta_data()->findInt32(kKeyTimeUnits, &units) + && srcBuffer->meta_data()->findInt32(kKeyTimeScale, &scale)) { + timestamp = ((OMX_TICKS)units * 1000000) / scale; + + CODEC_LOGV("Calling empty_buffer on buffer %p (length %d)", + info->mBuffer, srcLength); + CODEC_LOGV("Calling empty_buffer with timestamp %lld us (%.2f secs)", + timestamp, timestamp / 1E6); + } + } + + if (srcBuffer != NULL) { + srcBuffer->release(); + srcBuffer = NULL; + } + + err = mOMX->emptyBuffer( + mNode, info->mBuffer, 0, srcLength, + flags, timestamp); + + if (err != OK) { + setState(ERROR); + return; + } + + info->mOwnedByComponent = true; +} + +void OMXCodec::fillOutputBuffer(BufferInfo *info) { + CHECK_EQ(info->mOwnedByComponent, false); + + if (mNoMoreOutputData) { + CODEC_LOGV("There is no more output data available, not " + "calling fillOutputBuffer"); + return; + } + + CODEC_LOGV("Calling fill_buffer on buffer %p", info->mBuffer); + status_t err = mOMX->fillBuffer(mNode, info->mBuffer); + CHECK_EQ(err, OK); + + info->mOwnedByComponent = true; +} + +void OMXCodec::drainInputBuffer(IOMX::buffer_id buffer) { + Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput]; + for (size_t i = 0; i < buffers->size(); ++i) { + if ((*buffers)[i].mBuffer == buffer) { + drainInputBuffer(&buffers->editItemAt(i)); + return; + } + } + + CHECK(!"should not be here."); +} + +void OMXCodec::fillOutputBuffer(IOMX::buffer_id buffer) { + Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput]; + for (size_t i = 0; i < buffers->size(); ++i) { + if ((*buffers)[i].mBuffer == buffer) { + fillOutputBuffer(&buffers->editItemAt(i)); + return; + } + } + + CHECK(!"should not be here."); +} + +void OMXCodec::setState(State newState) { + mState = newState; + mAsyncCompletion.signal(); + + // This may cause some spurious wakeups but is necessary to + // unblock the reader if we enter ERROR state. + mBufferFilled.signal(); +} + +void OMXCodec::setRawAudioFormat( + OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) { + OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; + InitOMXParams(&pcmParams); + pcmParams.nPortIndex = portIndex; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); + + CHECK_EQ(err, OK); + + pcmParams.nChannels = numChannels; + pcmParams.eNumData = OMX_NumericalDataSigned; + pcmParams.bInterleaved = OMX_TRUE; + pcmParams.nBitPerSample = 16; + pcmParams.nSamplingRate = sampleRate; + pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; + + if (numChannels == 1) { + pcmParams.eChannelMapping[0] = OMX_AUDIO_ChannelCF; + } else { + CHECK_EQ(numChannels, 2); + + pcmParams.eChannelMapping[0] = OMX_AUDIO_ChannelLF; + pcmParams.eChannelMapping[1] = OMX_AUDIO_ChannelRF; + } + + err = mOMX->setParameter( + mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); + + CHECK_EQ(err, OK); +} + +void OMXCodec::setAMRFormat() { + if (!mIsEncoder) { + OMX_AUDIO_PARAM_AMRTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexInput; + + status_t err = + mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); + + CHECK_EQ(err, OK); + + def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; + def.eAMRBandMode = OMX_AUDIO_AMRBandModeNB0; + + err = mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); + CHECK_EQ(err, OK); + } + + //////////////////////// + + if (mIsEncoder) { + sp<MetaData> format = mSource->getFormat(); + int32_t sampleRate; + int32_t numChannels; + CHECK(format->findInt32(kKeySampleRate, &sampleRate)); + CHECK(format->findInt32(kKeyChannelCount, &numChannels)); + + setRawAudioFormat(kPortIndexInput, sampleRate, numChannels); + } +} + +void OMXCodec::setAMRWBFormat() { + if (!mIsEncoder) { + OMX_AUDIO_PARAM_AMRTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexInput; + + status_t err = + mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); + + CHECK_EQ(err, OK); + + def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; + def.eAMRBandMode = OMX_AUDIO_AMRBandModeWB0; + + err = mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); + CHECK_EQ(err, OK); + } + + //////////////////////// + + if (mIsEncoder) { + sp<MetaData> format = mSource->getFormat(); + int32_t sampleRate; + int32_t numChannels; + CHECK(format->findInt32(kKeySampleRate, &sampleRate)); + CHECK(format->findInt32(kKeyChannelCount, &numChannels)); + + setRawAudioFormat(kPortIndexInput, sampleRate, numChannels); + } +} + +void OMXCodec::setAACFormat(int32_t numChannels, int32_t sampleRate) { + if (mIsEncoder) { + setRawAudioFormat(kPortIndexInput, sampleRate, numChannels); + } else { + OMX_AUDIO_PARAM_AACPROFILETYPE profile; + InitOMXParams(&profile); + profile.nPortIndex = kPortIndexInput; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); + CHECK_EQ(err, OK); + + profile.nChannels = numChannels; + profile.nSampleRate = sampleRate; + profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4ADTS; + + err = mOMX->setParameter( + mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); + CHECK_EQ(err, OK); + } +} + +void OMXCodec::setImageOutputFormat( + OMX_COLOR_FORMATTYPE format, OMX_U32 width, OMX_U32 height) { + CODEC_LOGV("setImageOutputFormat(%ld, %ld)", width, height); + +#if 0 + OMX_INDEXTYPE index; + status_t err = mOMX->get_extension_index( + mNode, "OMX.TI.JPEG.decode.Config.OutputColorFormat", &index); + CHECK_EQ(err, OK); + + err = mOMX->set_config(mNode, index, &format, sizeof(format)); + CHECK_EQ(err, OK); +#endif + + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexOutput; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); + + CHECK_EQ(def.eDomain, OMX_PortDomainImage); + + OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image; + + CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused); + imageDef->eColorFormat = format; + imageDef->nFrameWidth = width; + imageDef->nFrameHeight = height; + + switch (format) { + case OMX_COLOR_FormatYUV420PackedPlanar: + case OMX_COLOR_FormatYUV411Planar: + { + def.nBufferSize = (width * height * 3) / 2; + break; + } + + case OMX_COLOR_FormatCbYCrY: + { + def.nBufferSize = width * height * 2; + break; + } + + case OMX_COLOR_Format32bitARGB8888: + { + def.nBufferSize = width * height * 4; + break; + } + + case OMX_COLOR_Format16bitARGB4444: + case OMX_COLOR_Format16bitARGB1555: + case OMX_COLOR_Format16bitRGB565: + case OMX_COLOR_Format16bitBGR565: + { + def.nBufferSize = width * height * 2; + break; + } + + default: + CHECK(!"Should not be here. Unknown color format."); + break; + } + + def.nBufferCountActual = def.nBufferCountMin; + + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); +} + +void OMXCodec::setJPEGInputFormat( + OMX_U32 width, OMX_U32 height, OMX_U32 compressedSize) { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexInput; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); + + CHECK_EQ(def.eDomain, OMX_PortDomainImage); + OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image; + + CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingJPEG); + imageDef->nFrameWidth = width; + imageDef->nFrameHeight = height; + + def.nBufferSize = compressedSize; + def.nBufferCountActual = def.nBufferCountMin; + + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); +} + +void OMXCodec::addCodecSpecificData(const void *data, size_t size) { + CodecSpecificData *specific = + (CodecSpecificData *)malloc(sizeof(CodecSpecificData) + size - 1); + + specific->mSize = size; + memcpy(specific->mData, data, size); + + mCodecSpecificData.push(specific); +} + +void OMXCodec::clearCodecSpecificData() { + for (size_t i = 0; i < mCodecSpecificData.size(); ++i) { + free(mCodecSpecificData.editItemAt(i)); + } + mCodecSpecificData.clear(); + mCodecSpecificDataIndex = 0; +} + +status_t OMXCodec::start(MetaData *) { + Mutex::Autolock autoLock(mLock); + + if (mState != LOADED) { + return UNKNOWN_ERROR; + } + + sp<MetaData> params = new MetaData; + if (mQuirks & kWantsNALFragments) { + params->setInt32(kKeyWantsNALFragments, true); + } + status_t err = mSource->start(params.get()); + + if (err != OK) { + return err; + } + + mCodecSpecificDataIndex = 0; + mInitialBufferSubmit = true; + mSignalledEOS = false; + mNoMoreOutputData = false; + mSeekTimeUs = -1; + mFilledBuffers.clear(); + + return init(); +} + +status_t OMXCodec::stop() { + CODEC_LOGV("stop"); + + Mutex::Autolock autoLock(mLock); + + while (isIntermediateState(mState)) { + mAsyncCompletion.wait(mLock); + } + + switch (mState) { + case LOADED: + case ERROR: + break; + + case EXECUTING: + { + setState(EXECUTING_TO_IDLE); + + if (mQuirks & kRequiresFlushBeforeShutdown) { + CODEC_LOGV("This component requires a flush before transitioning " + "from EXECUTING to IDLE..."); + + bool emulateInputFlushCompletion = + !flushPortAsync(kPortIndexInput); + + bool emulateOutputFlushCompletion = + !flushPortAsync(kPortIndexOutput); + + if (emulateInputFlushCompletion) { + onCmdComplete(OMX_CommandFlush, kPortIndexInput); + } + + if (emulateOutputFlushCompletion) { + onCmdComplete(OMX_CommandFlush, kPortIndexOutput); + } + } else { + mPortStatus[kPortIndexInput] = SHUTTING_DOWN; + mPortStatus[kPortIndexOutput] = SHUTTING_DOWN; + + status_t err = + mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle); + CHECK_EQ(err, OK); + } + + while (mState != LOADED && mState != ERROR) { + mAsyncCompletion.wait(mLock); + } + + break; + } + + default: + { + CHECK(!"should not be here."); + break; + } + } + + mSource->stop(); + + return OK; +} + +sp<MetaData> OMXCodec::getFormat() { + return mOutputFormat; +} + +status_t OMXCodec::read( + MediaBuffer **buffer, const ReadOptions *options) { + *buffer = NULL; + + Mutex::Autolock autoLock(mLock); + + if (mState != EXECUTING && mState != RECONFIGURING) { + return UNKNOWN_ERROR; + } + + bool seeking = false; + int64_t seekTimeUs; + if (options && options->getSeekTo(&seekTimeUs)) { + seeking = true; + } + + if (mInitialBufferSubmit) { + mInitialBufferSubmit = false; + + if (seeking) { + CHECK(seekTimeUs >= 0); + mSeekTimeUs = seekTimeUs; + + // There's no reason to trigger the code below, there's + // nothing to flush yet. + seeking = false; + } + + drainInputBuffers(); + + if (mState == EXECUTING) { + // Otherwise mState == RECONFIGURING and this code will trigger + // after the output port is reenabled. + fillOutputBuffers(); + } + } + + if (seeking) { + CODEC_LOGV("seeking to %lld us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6); + + mSignalledEOS = false; + mNoMoreOutputData = false; + + CHECK(seekTimeUs >= 0); + mSeekTimeUs = seekTimeUs; + + mFilledBuffers.clear(); + + CHECK_EQ(mState, EXECUTING); + + bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput); + bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput); + + if (emulateInputFlushCompletion) { + onCmdComplete(OMX_CommandFlush, kPortIndexInput); + } + + if (emulateOutputFlushCompletion) { + onCmdComplete(OMX_CommandFlush, kPortIndexOutput); + } + } + + while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) { + mBufferFilled.wait(mLock); + } + + if (mState == ERROR) { + return UNKNOWN_ERROR; + } + + if (mFilledBuffers.empty()) { + return ERROR_END_OF_STREAM; + } + + size_t index = *mFilledBuffers.begin(); + mFilledBuffers.erase(mFilledBuffers.begin()); + + BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index); + info->mMediaBuffer->add_ref(); + *buffer = info->mMediaBuffer; + + return OK; +} + +void OMXCodec::signalBufferReturned(MediaBuffer *buffer) { + Mutex::Autolock autoLock(mLock); + + Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput]; + for (size_t i = 0; i < buffers->size(); ++i) { + BufferInfo *info = &buffers->editItemAt(i); + + if (info->mMediaBuffer == buffer) { + CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED); + fillOutputBuffer(info); + return; + } + } + + CHECK(!"should not be here."); +} + +static const char *imageCompressionFormatString(OMX_IMAGE_CODINGTYPE type) { + static const char *kNames[] = { + "OMX_IMAGE_CodingUnused", + "OMX_IMAGE_CodingAutoDetect", + "OMX_IMAGE_CodingJPEG", + "OMX_IMAGE_CodingJPEG2K", + "OMX_IMAGE_CodingEXIF", + "OMX_IMAGE_CodingTIFF", + "OMX_IMAGE_CodingGIF", + "OMX_IMAGE_CodingPNG", + "OMX_IMAGE_CodingLZW", + "OMX_IMAGE_CodingBMP", + }; + + size_t numNames = sizeof(kNames) / sizeof(kNames[0]); + + if (type < 0 || (size_t)type >= numNames) { + return "UNKNOWN"; + } else { + return kNames[type]; + } +} + +static const char *colorFormatString(OMX_COLOR_FORMATTYPE type) { + static const char *kNames[] = { + "OMX_COLOR_FormatUnused", + "OMX_COLOR_FormatMonochrome", + "OMX_COLOR_Format8bitRGB332", + "OMX_COLOR_Format12bitRGB444", + "OMX_COLOR_Format16bitARGB4444", + "OMX_COLOR_Format16bitARGB1555", + "OMX_COLOR_Format16bitRGB565", + "OMX_COLOR_Format16bitBGR565", + "OMX_COLOR_Format18bitRGB666", + "OMX_COLOR_Format18bitARGB1665", + "OMX_COLOR_Format19bitARGB1666", + "OMX_COLOR_Format24bitRGB888", + "OMX_COLOR_Format24bitBGR888", + "OMX_COLOR_Format24bitARGB1887", + "OMX_COLOR_Format25bitARGB1888", + "OMX_COLOR_Format32bitBGRA8888", + "OMX_COLOR_Format32bitARGB8888", + "OMX_COLOR_FormatYUV411Planar", + "OMX_COLOR_FormatYUV411PackedPlanar", + "OMX_COLOR_FormatYUV420Planar", + "OMX_COLOR_FormatYUV420PackedPlanar", + "OMX_COLOR_FormatYUV420SemiPlanar", + "OMX_COLOR_FormatYUV422Planar", + "OMX_COLOR_FormatYUV422PackedPlanar", + "OMX_COLOR_FormatYUV422SemiPlanar", + "OMX_COLOR_FormatYCbYCr", + "OMX_COLOR_FormatYCrYCb", + "OMX_COLOR_FormatCbYCrY", + "OMX_COLOR_FormatCrYCbY", + "OMX_COLOR_FormatYUV444Interleaved", + "OMX_COLOR_FormatRawBayer8bit", + "OMX_COLOR_FormatRawBayer10bit", + "OMX_COLOR_FormatRawBayer8bitcompressed", + "OMX_COLOR_FormatL2", + "OMX_COLOR_FormatL4", + "OMX_COLOR_FormatL8", + "OMX_COLOR_FormatL16", + "OMX_COLOR_FormatL24", + "OMX_COLOR_FormatL32", + "OMX_COLOR_FormatYUV420PackedSemiPlanar", + "OMX_COLOR_FormatYUV422PackedSemiPlanar", + "OMX_COLOR_Format18BitBGR666", + "OMX_COLOR_Format24BitARGB6666", + "OMX_COLOR_Format24BitABGR6666", + }; + + size_t numNames = sizeof(kNames) / sizeof(kNames[0]); + + static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00; + + if (type == OMX_QCOM_COLOR_FormatYVU420SemiPlanar) { + return "OMX_QCOM_COLOR_FormatYVU420SemiPlanar"; + } else if (type < 0 || (size_t)type >= numNames) { + return "UNKNOWN"; + } else { + return kNames[type]; + } +} + +static const char *videoCompressionFormatString(OMX_VIDEO_CODINGTYPE type) { + static const char *kNames[] = { + "OMX_VIDEO_CodingUnused", + "OMX_VIDEO_CodingAutoDetect", + "OMX_VIDEO_CodingMPEG2", + "OMX_VIDEO_CodingH263", + "OMX_VIDEO_CodingMPEG4", + "OMX_VIDEO_CodingWMV", + "OMX_VIDEO_CodingRV", + "OMX_VIDEO_CodingAVC", + "OMX_VIDEO_CodingMJPEG", + }; + + size_t numNames = sizeof(kNames) / sizeof(kNames[0]); + + if (type < 0 || (size_t)type >= numNames) { + return "UNKNOWN"; + } else { + return kNames[type]; + } +} + +static const char *audioCodingTypeString(OMX_AUDIO_CODINGTYPE type) { + static const char *kNames[] = { + "OMX_AUDIO_CodingUnused", + "OMX_AUDIO_CodingAutoDetect", + "OMX_AUDIO_CodingPCM", + "OMX_AUDIO_CodingADPCM", + "OMX_AUDIO_CodingAMR", + "OMX_AUDIO_CodingGSMFR", + "OMX_AUDIO_CodingGSMEFR", + "OMX_AUDIO_CodingGSMHR", + "OMX_AUDIO_CodingPDCFR", + "OMX_AUDIO_CodingPDCEFR", + "OMX_AUDIO_CodingPDCHR", + "OMX_AUDIO_CodingTDMAFR", + "OMX_AUDIO_CodingTDMAEFR", + "OMX_AUDIO_CodingQCELP8", + "OMX_AUDIO_CodingQCELP13", + "OMX_AUDIO_CodingEVRC", + "OMX_AUDIO_CodingSMV", + "OMX_AUDIO_CodingG711", + "OMX_AUDIO_CodingG723", + "OMX_AUDIO_CodingG726", + "OMX_AUDIO_CodingG729", + "OMX_AUDIO_CodingAAC", + "OMX_AUDIO_CodingMP3", + "OMX_AUDIO_CodingSBC", + "OMX_AUDIO_CodingVORBIS", + "OMX_AUDIO_CodingWMA", + "OMX_AUDIO_CodingRA", + "OMX_AUDIO_CodingMIDI", + }; + + size_t numNames = sizeof(kNames) / sizeof(kNames[0]); + + if (type < 0 || (size_t)type >= numNames) { + return "UNKNOWN"; + } else { + return kNames[type]; + } +} + +static const char *audioPCMModeString(OMX_AUDIO_PCMMODETYPE type) { + static const char *kNames[] = { + "OMX_AUDIO_PCMModeLinear", + "OMX_AUDIO_PCMModeALaw", + "OMX_AUDIO_PCMModeMULaw", + }; + + size_t numNames = sizeof(kNames) / sizeof(kNames[0]); + + if (type < 0 || (size_t)type >= numNames) { + return "UNKNOWN"; + } else { + return kNames[type]; + } +} + +static const char *amrBandModeString(OMX_AUDIO_AMRBANDMODETYPE type) { + static const char *kNames[] = { + "OMX_AUDIO_AMRBandModeUnused", + "OMX_AUDIO_AMRBandModeNB0", + "OMX_AUDIO_AMRBandModeNB1", + "OMX_AUDIO_AMRBandModeNB2", + "OMX_AUDIO_AMRBandModeNB3", + "OMX_AUDIO_AMRBandModeNB4", + "OMX_AUDIO_AMRBandModeNB5", + "OMX_AUDIO_AMRBandModeNB6", + "OMX_AUDIO_AMRBandModeNB7", + "OMX_AUDIO_AMRBandModeWB0", + "OMX_AUDIO_AMRBandModeWB1", + "OMX_AUDIO_AMRBandModeWB2", + "OMX_AUDIO_AMRBandModeWB3", + "OMX_AUDIO_AMRBandModeWB4", + "OMX_AUDIO_AMRBandModeWB5", + "OMX_AUDIO_AMRBandModeWB6", + "OMX_AUDIO_AMRBandModeWB7", + "OMX_AUDIO_AMRBandModeWB8", + }; + + size_t numNames = sizeof(kNames) / sizeof(kNames[0]); + + if (type < 0 || (size_t)type >= numNames) { + return "UNKNOWN"; + } else { + return kNames[type]; + } +} + +static const char *amrFrameFormatString(OMX_AUDIO_AMRFRAMEFORMATTYPE type) { + static const char *kNames[] = { + "OMX_AUDIO_AMRFrameFormatConformance", + "OMX_AUDIO_AMRFrameFormatIF1", + "OMX_AUDIO_AMRFrameFormatIF2", + "OMX_AUDIO_AMRFrameFormatFSF", + "OMX_AUDIO_AMRFrameFormatRTPPayload", + "OMX_AUDIO_AMRFrameFormatITU", + }; + + size_t numNames = sizeof(kNames) / sizeof(kNames[0]); + + if (type < 0 || (size_t)type >= numNames) { + return "UNKNOWN"; + } else { + return kNames[type]; + } +} + +void OMXCodec::dumpPortStatus(OMX_U32 portIndex) { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = portIndex; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); + + printf("%s Port = {\n", portIndex == kPortIndexInput ? "Input" : "Output"); + + CHECK((portIndex == kPortIndexInput && def.eDir == OMX_DirInput) + || (portIndex == kPortIndexOutput && def.eDir == OMX_DirOutput)); + + printf(" nBufferCountActual = %ld\n", def.nBufferCountActual); + printf(" nBufferCountMin = %ld\n", def.nBufferCountMin); + printf(" nBufferSize = %ld\n", def.nBufferSize); + + switch (def.eDomain) { + case OMX_PortDomainImage: + { + const OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image; + + printf("\n"); + printf(" // Image\n"); + printf(" nFrameWidth = %ld\n", imageDef->nFrameWidth); + printf(" nFrameHeight = %ld\n", imageDef->nFrameHeight); + printf(" nStride = %ld\n", imageDef->nStride); + + printf(" eCompressionFormat = %s\n", + imageCompressionFormatString(imageDef->eCompressionFormat)); + + printf(" eColorFormat = %s\n", + colorFormatString(imageDef->eColorFormat)); + + break; + } + + case OMX_PortDomainVideo: + { + OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; + + printf("\n"); + printf(" // Video\n"); + printf(" nFrameWidth = %ld\n", videoDef->nFrameWidth); + printf(" nFrameHeight = %ld\n", videoDef->nFrameHeight); + printf(" nStride = %ld\n", videoDef->nStride); + + printf(" eCompressionFormat = %s\n", + videoCompressionFormatString(videoDef->eCompressionFormat)); + + printf(" eColorFormat = %s\n", + colorFormatString(videoDef->eColorFormat)); + + break; + } + + case OMX_PortDomainAudio: + { + OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; + + printf("\n"); + printf(" // Audio\n"); + printf(" eEncoding = %s\n", + audioCodingTypeString(audioDef->eEncoding)); + + if (audioDef->eEncoding == OMX_AUDIO_CodingPCM) { + OMX_AUDIO_PARAM_PCMMODETYPE params; + InitOMXParams(¶ms); + params.nPortIndex = portIndex; + + err = mOMX->getParameter( + mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); + CHECK_EQ(err, OK); + + printf(" nSamplingRate = %ld\n", params.nSamplingRate); + printf(" nChannels = %ld\n", params.nChannels); + printf(" bInterleaved = %d\n", params.bInterleaved); + printf(" nBitPerSample = %ld\n", params.nBitPerSample); + + printf(" eNumData = %s\n", + params.eNumData == OMX_NumericalDataSigned + ? "signed" : "unsigned"); + + printf(" ePCMMode = %s\n", audioPCMModeString(params.ePCMMode)); + } else if (audioDef->eEncoding == OMX_AUDIO_CodingAMR) { + OMX_AUDIO_PARAM_AMRTYPE amr; + InitOMXParams(&amr); + amr.nPortIndex = portIndex; + + err = mOMX->getParameter( + mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr)); + CHECK_EQ(err, OK); + + printf(" nChannels = %ld\n", amr.nChannels); + printf(" eAMRBandMode = %s\n", + amrBandModeString(amr.eAMRBandMode)); + printf(" eAMRFrameFormat = %s\n", + amrFrameFormatString(amr.eAMRFrameFormat)); + } + + break; + } + + default: + { + printf(" // Unknown\n"); + break; + } + } + + printf("}\n"); +} + +void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) { + mOutputFormat = new MetaData; + mOutputFormat->setCString(kKeyDecoderComponent, mComponentName); + + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexOutput; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + CHECK_EQ(err, OK); + + switch (def.eDomain) { + case OMX_PortDomainImage: + { + OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image; + CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused); + + mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); + mOutputFormat->setInt32(kKeyColorFormat, imageDef->eColorFormat); + mOutputFormat->setInt32(kKeyWidth, imageDef->nFrameWidth); + mOutputFormat->setInt32(kKeyHeight, imageDef->nFrameHeight); + break; + } + + case OMX_PortDomainAudio: + { + OMX_AUDIO_PORTDEFINITIONTYPE *audio_def = &def.format.audio; + + if (audio_def->eEncoding == OMX_AUDIO_CodingPCM) { + OMX_AUDIO_PARAM_PCMMODETYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; + + err = mOMX->getParameter( + mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); + CHECK_EQ(err, OK); + + CHECK_EQ(params.eNumData, OMX_NumericalDataSigned); + CHECK_EQ(params.nBitPerSample, 16); + CHECK_EQ(params.ePCMMode, OMX_AUDIO_PCMModeLinear); + + int32_t numChannels, sampleRate; + inputFormat->findInt32(kKeyChannelCount, &numChannels); + inputFormat->findInt32(kKeySampleRate, &sampleRate); + + if ((OMX_U32)numChannels != params.nChannels) { + LOGW("Codec outputs a different number of channels than " + "the input stream contains."); + } + + mOutputFormat->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW); + + // Use the codec-advertised number of channels, as some + // codecs appear to output stereo even if the input data is + // mono. + mOutputFormat->setInt32(kKeyChannelCount, params.nChannels); + + // The codec-reported sampleRate is not reliable... + mOutputFormat->setInt32(kKeySampleRate, sampleRate); + } else if (audio_def->eEncoding == OMX_AUDIO_CodingAMR) { + OMX_AUDIO_PARAM_AMRTYPE amr; + InitOMXParams(&amr); + amr.nPortIndex = kPortIndexOutput; + + err = mOMX->getParameter( + mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr)); + CHECK_EQ(err, OK); + + CHECK_EQ(amr.nChannels, 1); + mOutputFormat->setInt32(kKeyChannelCount, 1); + + if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeNB0 + && amr.eAMRBandMode <= OMX_AUDIO_AMRBandModeNB7) { + mOutputFormat->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_NB); + mOutputFormat->setInt32(kKeySampleRate, 8000); + } else if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0 + && amr.eAMRBandMode <= OMX_AUDIO_AMRBandModeWB8) { + mOutputFormat->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_WB); + mOutputFormat->setInt32(kKeySampleRate, 16000); + } else { + CHECK(!"Unknown AMR band mode."); + } + } else if (audio_def->eEncoding == OMX_AUDIO_CodingAAC) { + mOutputFormat->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC); + } else { + CHECK(!"Should not be here. Unknown audio encoding."); + } + break; + } + + case OMX_PortDomainVideo: + { + OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; + + if (video_def->eCompressionFormat == OMX_VIDEO_CodingUnused) { + mOutputFormat->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); + } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingMPEG4) { + mOutputFormat->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4); + } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingH263) { + mOutputFormat->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); + } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingAVC) { + mOutputFormat->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); + } else { + CHECK(!"Unknown compression format."); + } + + if (mQuirks & kOutputDimensionsAre16Aligned) { + // This component appears to be lying to me. + mOutputFormat->setInt32( + kKeyWidth, (video_def->nFrameWidth + 15) & -16); + mOutputFormat->setInt32( + kKeyHeight, (video_def->nFrameHeight + 15) & -16); + } else { + mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth); + mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight); + } + + mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat); + break; + } + + default: + { + CHECK(!"should not be here, neither audio nor video."); + break; + } + } +} + +//////////////////////////////////////////////////////////////////////////////// + +status_t QueryCodecs( + const sp<IOMX> &omx, + const char *mime, bool queryDecoders, + Vector<CodecCapabilities> *results) { + results->clear(); + + for (int index = 0;; ++index) { + const char *componentName; + + if (!queryDecoders) { + componentName = GetCodec( + kEncoderInfo, sizeof(kEncoderInfo) / sizeof(kEncoderInfo[0]), + mime, index); + } else { + componentName = GetCodec( + kDecoderInfo, sizeof(kDecoderInfo) / sizeof(kDecoderInfo[0]), + mime, index); + } + + if (!componentName) { + return OK; + } + + sp<OMXCodecObserver> observer = new OMXCodecObserver; + IOMX::node_id node; + status_t err = omx->allocateNode(componentName, observer, &node); + + if (err != OK) { + continue; + } + + OMXCodec::setComponentRole(omx, node, queryDecoders, mime); + + results->push(); + CodecCapabilities *caps = &results->editItemAt(results->size() - 1); + caps->mComponentName = componentName; + + OMX_VIDEO_PARAM_PROFILELEVELTYPE param; + InitOMXParams(¶m); + + param.nPortIndex = queryDecoders ? 0 : 1; + + for (param.nProfileIndex = 0;; ++param.nProfileIndex) { + err = omx->getParameter( + node, OMX_IndexParamVideoProfileLevelQuerySupported, + ¶m, sizeof(param)); + + if (err != OK) { + break; + } + + CodecProfileLevel profileLevel; + profileLevel.mProfile = param.eProfile; + profileLevel.mLevel = param.eLevel; + + caps->mProfileLevels.push(profileLevel); + } + + CHECK_EQ(omx->freeNode(node), OK); + } +} + +} // namespace android diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp new file mode 100644 index 000000000000..8efa7c7bd721 --- /dev/null +++ b/media/libstagefright/SampleTable.cpp @@ -0,0 +1,578 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "SampleTable" +#include <utils/Log.h> + +#include <arpa/inet.h> + +#include <media/stagefright/DataSource.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/SampleTable.h> +#include <media/stagefright/Utils.h> + +namespace android { + +static const uint32_t kChunkOffsetType32 = FOURCC('s', 't', 'c', 'o'); +static const uint32_t kChunkOffsetType64 = FOURCC('c', 'o', '6', '4'); +static const uint32_t kSampleSizeType32 = FOURCC('s', 't', 's', 'z'); +static const uint32_t kSampleSizeTypeCompact = FOURCC('s', 't', 'z', '2'); + +SampleTable::SampleTable(const sp<DataSource> &source) + : mDataSource(source), + mChunkOffsetOffset(-1), + mChunkOffsetType(0), + mNumChunkOffsets(0), + mSampleToChunkOffset(-1), + mNumSampleToChunkOffsets(0), + mSampleSizeOffset(-1), + mSampleSizeFieldSize(0), + mDefaultSampleSize(0), + mNumSampleSizes(0), + mTimeToSampleCount(0), + mTimeToSample(NULL), + mSyncSampleOffset(-1), + mNumSyncSamples(0) { +} + +SampleTable::~SampleTable() { + delete[] mTimeToSample; + mTimeToSample = NULL; +} + +status_t SampleTable::setChunkOffsetParams( + uint32_t type, off_t data_offset, off_t data_size) { + if (mChunkOffsetOffset >= 0) { + return ERROR_MALFORMED; + } + + CHECK(type == kChunkOffsetType32 || type == kChunkOffsetType64); + + mChunkOffsetOffset = data_offset; + mChunkOffsetType = type; + + if (data_size < 8) { + return ERROR_MALFORMED; + } + + uint8_t header[8]; + if (mDataSource->read_at( + data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) { + return ERROR_IO; + } + + if (U32_AT(header) != 0) { + // Expected version = 0, flags = 0. + return ERROR_MALFORMED; + } + + mNumChunkOffsets = U32_AT(&header[4]); + + if (mChunkOffsetType == kChunkOffsetType32) { + if (data_size < 8 + mNumChunkOffsets * 4) { + return ERROR_MALFORMED; + } + } else { + if (data_size < 8 + mNumChunkOffsets * 8) { + return ERROR_MALFORMED; + } + } + + return OK; +} + +status_t SampleTable::setSampleToChunkParams( + off_t data_offset, off_t data_size) { + if (mSampleToChunkOffset >= 0) { + return ERROR_MALFORMED; + } + + mSampleToChunkOffset = data_offset; + + if (data_size < 8) { + return ERROR_MALFORMED; + } + + uint8_t header[8]; + if (mDataSource->read_at( + data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) { + return ERROR_IO; + } + + if (U32_AT(header) != 0) { + // Expected version = 0, flags = 0. + return ERROR_MALFORMED; + } + + mNumSampleToChunkOffsets = U32_AT(&header[4]); + + if (data_size < 8 + mNumSampleToChunkOffsets * 12) { + return ERROR_MALFORMED; + } + + return OK; +} + +status_t SampleTable::setSampleSizeParams( + uint32_t type, off_t data_offset, off_t data_size) { + if (mSampleSizeOffset >= 0) { + return ERROR_MALFORMED; + } + + CHECK(type == kSampleSizeType32 || type == kSampleSizeTypeCompact); + + mSampleSizeOffset = data_offset; + + if (data_size < 12) { + return ERROR_MALFORMED; + } + + uint8_t header[12]; + if (mDataSource->read_at( + data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) { + return ERROR_IO; + } + + if (U32_AT(header) != 0) { + // Expected version = 0, flags = 0. + return ERROR_MALFORMED; + } + + mDefaultSampleSize = U32_AT(&header[4]); + mNumSampleSizes = U32_AT(&header[8]); + + if (type == kSampleSizeType32) { + mSampleSizeFieldSize = 32; + + if (mDefaultSampleSize != 0) { + return OK; + } + + if (data_size < 12 + mNumSampleSizes * 4) { + return ERROR_MALFORMED; + } + } else { + if ((mDefaultSampleSize & 0xffffff00) != 0) { + // The high 24 bits are reserved and must be 0. + return ERROR_MALFORMED; + } + + mSampleSizeFieldSize = mDefaultSampleSize & 0xf; + mDefaultSampleSize = 0; + + if (mSampleSizeFieldSize != 4 && mSampleSizeFieldSize != 8 + && mSampleSizeFieldSize != 16) { + return ERROR_MALFORMED; + } + + if (data_size < 12 + (mNumSampleSizes * mSampleSizeFieldSize + 4) / 8) { + return ERROR_MALFORMED; + } + } + + return OK; +} + +status_t SampleTable::setTimeToSampleParams( + off_t data_offset, off_t data_size) { + if (mTimeToSample != NULL || data_size < 8) { + return ERROR_MALFORMED; + } + + uint8_t header[8]; + if (mDataSource->read_at( + data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) { + return ERROR_IO; + } + + if (U32_AT(header) != 0) { + // Expected version = 0, flags = 0. + return ERROR_MALFORMED; + } + + mTimeToSampleCount = U32_AT(&header[4]); + mTimeToSample = new uint32_t[mTimeToSampleCount * 2]; + + size_t size = sizeof(uint32_t) * mTimeToSampleCount * 2; + if (mDataSource->read_at( + data_offset + 8, mTimeToSample, size) < (ssize_t)size) { + return ERROR_IO; + } + + for (uint32_t i = 0; i < mTimeToSampleCount * 2; ++i) { + mTimeToSample[i] = ntohl(mTimeToSample[i]); + } + + return OK; +} + +status_t SampleTable::setSyncSampleParams(off_t data_offset, off_t data_size) { + if (mSyncSampleOffset >= 0 || data_size < 8) { + return ERROR_MALFORMED; + } + + mSyncSampleOffset = data_offset; + + uint8_t header[8]; + if (mDataSource->read_at( + data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) { + return ERROR_IO; + } + + if (U32_AT(header) != 0) { + // Expected version = 0, flags = 0. + return ERROR_MALFORMED; + } + + mNumSyncSamples = U32_AT(&header[4]); + + if (mNumSyncSamples < 2) { + LOGW("Table of sync samples is empty or has only a single entry!"); + } + return OK; +} + +uint32_t SampleTable::countChunkOffsets() const { + return mNumChunkOffsets; +} + +status_t SampleTable::getChunkOffset(uint32_t chunk_index, off_t *offset) { + *offset = 0; + + if (mChunkOffsetOffset < 0) { + return ERROR_MALFORMED; + } + + if (chunk_index >= mNumChunkOffsets) { + return ERROR_OUT_OF_RANGE; + } + + if (mChunkOffsetType == kChunkOffsetType32) { + uint32_t offset32; + + if (mDataSource->read_at( + mChunkOffsetOffset + 8 + 4 * chunk_index, + &offset32, + sizeof(offset32)) < (ssize_t)sizeof(offset32)) { + return ERROR_IO; + } + + *offset = ntohl(offset32); + } else { + CHECK_EQ(mChunkOffsetType, kChunkOffsetType64); + + uint64_t offset64; + if (mDataSource->read_at( + mChunkOffsetOffset + 8 + 8 * chunk_index, + &offset64, + sizeof(offset64)) < (ssize_t)sizeof(offset64)) { + return ERROR_IO; + } + + *offset = ntoh64(offset64); + } + + return OK; +} + +status_t SampleTable::getChunkForSample( + uint32_t sample_index, + uint32_t *chunk_index, + uint32_t *chunk_relative_sample_index, + uint32_t *desc_index) { + *chunk_index = 0; + *chunk_relative_sample_index = 0; + *desc_index = 0; + + if (mSampleToChunkOffset < 0) { + return ERROR_MALFORMED; + } + + if (sample_index >= countSamples()) { + return ERROR_END_OF_STREAM; + } + + uint32_t first_chunk = 0; + uint32_t samples_per_chunk = 0; + uint32_t chunk_desc_index = 0; + + uint32_t index = 0; + while (index < mNumSampleToChunkOffsets) { + uint8_t buffer[12]; + if (mDataSource->read_at(mSampleToChunkOffset + 8 + index * 12, + buffer, sizeof(buffer)) < (ssize_t)sizeof(buffer)) { + return ERROR_IO; + } + + uint32_t stop_chunk = U32_AT(buffer); + if (sample_index < (stop_chunk - first_chunk) * samples_per_chunk) { + break; + } + + sample_index -= (stop_chunk - first_chunk) * samples_per_chunk; + first_chunk = stop_chunk; + samples_per_chunk = U32_AT(&buffer[4]); + chunk_desc_index = U32_AT(&buffer[8]); + + ++index; + } + + *chunk_index = sample_index / samples_per_chunk + first_chunk - 1; + *chunk_relative_sample_index = sample_index % samples_per_chunk; + *desc_index = chunk_desc_index; + + return OK; +} + +uint32_t SampleTable::countSamples() const { + return mNumSampleSizes; +} + +status_t SampleTable::getSampleSize( + uint32_t sample_index, size_t *sample_size) { + *sample_size = 0; + + if (mSampleSizeOffset < 0) { + return ERROR_MALFORMED; + } + + if (sample_index >= mNumSampleSizes) { + return ERROR_OUT_OF_RANGE; + } + + if (mDefaultSampleSize > 0) { + *sample_size = mDefaultSampleSize; + return OK; + } + + switch (mSampleSizeFieldSize) { + case 32: + { + if (mDataSource->read_at( + mSampleSizeOffset + 12 + 4 * sample_index, + sample_size, sizeof(*sample_size)) < (ssize_t)sizeof(*sample_size)) { + return ERROR_IO; + } + + *sample_size = ntohl(*sample_size); + break; + } + + case 16: + { + uint16_t x; + if (mDataSource->read_at( + mSampleSizeOffset + 12 + 2 * sample_index, + &x, sizeof(x)) < (ssize_t)sizeof(x)) { + return ERROR_IO; + } + + *sample_size = ntohs(x); + break; + } + + case 8: + { + uint8_t x; + if (mDataSource->read_at( + mSampleSizeOffset + 12 + sample_index, + &x, sizeof(x)) < (ssize_t)sizeof(x)) { + return ERROR_IO; + } + + *sample_size = x; + break; + } + + default: + { + CHECK_EQ(mSampleSizeFieldSize, 4); + + uint8_t x; + if (mDataSource->read_at( + mSampleSizeOffset + 12 + sample_index / 2, + &x, sizeof(x)) < (ssize_t)sizeof(x)) { + return ERROR_IO; + } + + *sample_size = (sample_index & 1) ? x & 0x0f : x >> 4; + break; + } + } + + return OK; +} + +status_t SampleTable::getSampleOffsetAndSize( + uint32_t sample_index, off_t *offset, size_t *size) { + Mutex::Autolock autoLock(mLock); + + *offset = 0; + *size = 0; + + uint32_t chunk_index; + uint32_t chunk_relative_sample_index; + uint32_t desc_index; + status_t err = getChunkForSample( + sample_index, &chunk_index, &chunk_relative_sample_index, + &desc_index); + + if (err != OK) { + return err; + } + + err = getChunkOffset(chunk_index, offset); + + if (err != OK) { + return err; + } + + for (uint32_t j = 0; j < chunk_relative_sample_index; ++j) { + size_t sample_size; + err = getSampleSize(sample_index - j - 1, &sample_size); + + if (err != OK) { + return err; + } + + *offset += sample_size; + } + + err = getSampleSize(sample_index, size); + + if (err != OK) { + return err; + } + + return OK; +} + +status_t SampleTable::getMaxSampleSize(size_t *max_size) { + Mutex::Autolock autoLock(mLock); + + *max_size = 0; + + for (uint32_t i = 0; i < mNumSampleSizes; ++i) { + size_t sample_size; + status_t err = getSampleSize(i, &sample_size); + + if (err != OK) { + return err; + } + + if (sample_size > *max_size) { + *max_size = sample_size; + } + } + + return OK; +} + +status_t SampleTable::getDecodingTime(uint32_t sample_index, uint32_t *time) { + // XXX FIXME idiotic (for the common use-case) O(n) algorithm below... + + Mutex::Autolock autoLock(mLock); + + if (sample_index >= mNumSampleSizes) { + return ERROR_OUT_OF_RANGE; + } + + uint32_t cur_sample = 0; + *time = 0; + for (uint32_t i = 0; i < mTimeToSampleCount; ++i) { + uint32_t n = mTimeToSample[2 * i]; + uint32_t delta = mTimeToSample[2 * i + 1]; + + if (sample_index < cur_sample + n) { + *time += delta * (sample_index - cur_sample); + + return OK; + } + + *time += delta * n; + cur_sample += n; + } + + return ERROR_OUT_OF_RANGE; +} + +status_t SampleTable::findClosestSample( + uint32_t req_time, uint32_t *sample_index, uint32_t flags) { + Mutex::Autolock autoLock(mLock); + + uint32_t cur_sample = 0; + uint32_t time = 0; + for (uint32_t i = 0; i < mTimeToSampleCount; ++i) { + uint32_t n = mTimeToSample[2 * i]; + uint32_t delta = mTimeToSample[2 * i + 1]; + + if (req_time < time + n * delta) { + int j = (req_time - time) / delta; + + *sample_index = cur_sample + j; + + if (flags & kSyncSample_Flag) { + return findClosestSyncSample(*sample_index, sample_index); + } + + return OK; + } + + time += delta * n; + cur_sample += n; + } + + return ERROR_OUT_OF_RANGE; +} + +status_t SampleTable::findClosestSyncSample( + uint32_t start_sample_index, uint32_t *sample_index) { + *sample_index = 0; + + if (mSyncSampleOffset < 0) { + // All samples are sync-samples. + *sample_index = start_sample_index; + return OK; + } + + uint32_t x; + uint32_t left = 0; + uint32_t right = mNumSyncSamples; + while (left < right) { + uint32_t mid = (left + right) / 2; + if (mDataSource->read_at( + mSyncSampleOffset + 8 + (mid - 1) * 4, &x, 4) != 4) { + return ERROR_IO; + } + + x = ntohl(x); + + if (x < (start_sample_index + 1)) { + left = mid + 1; + } else if (x > (start_sample_index + 1)) { + right = mid; + } else { + break; + } + } + + *sample_index = x - 1; + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/ShoutcastSource.cpp b/media/libstagefright/ShoutcastSource.cpp new file mode 100644 index 000000000000..346b5aa7cd60 --- /dev/null +++ b/media/libstagefright/ShoutcastSource.cpp @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <stdlib.h> + +#include <media/stagefright/HTTPStream.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaBufferGroup.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/ShoutcastSource.h> +#include <media/stagefright/stagefright_string.h> + +namespace android { + +ShoutcastSource::ShoutcastSource(HTTPStream *http) + : mHttp(http), + mMetaDataOffset(0), + mBytesUntilMetaData(0), + mGroup(NULL), + mStarted(false) { + string metaint; + if (mHttp->find_header_value("icy-metaint", &metaint)) { + char *end; + const char *start = metaint.c_str(); + mMetaDataOffset = strtol(start, &end, 10); + CHECK(end > start && *end == '\0'); + CHECK(mMetaDataOffset > 0); + + mBytesUntilMetaData = mMetaDataOffset; + } +} + +ShoutcastSource::~ShoutcastSource() { + if (mStarted) { + stop(); + } + + delete mHttp; + mHttp = NULL; +} + +status_t ShoutcastSource::start(MetaData *) { + CHECK(!mStarted); + + mGroup = new MediaBufferGroup; + mGroup->add_buffer(new MediaBuffer(4096)); // XXX + + mStarted = true; + + return OK; +} + +status_t ShoutcastSource::stop() { + CHECK(mStarted); + + delete mGroup; + mGroup = NULL; + + mStarted = false; + + return OK; +} + +sp<MetaData> ShoutcastSource::getFormat() { + sp<MetaData> meta = new MetaData; + meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG); + meta->setInt32(kKeySampleRate, 44100); + meta->setInt32(kKeyChannelCount, 2); // XXX + + return meta; +} + +status_t ShoutcastSource::read( + MediaBuffer **out, const ReadOptions *options) { + CHECK(mStarted); + + *out = NULL; + + int64_t seekTimeUs; + if (options && options->getSeekTo(&seekTimeUs)) { + return ERROR_UNSUPPORTED; + } + + MediaBuffer *buffer; + status_t err = mGroup->acquire_buffer(&buffer); + if (err != OK) { + return err; + } + + *out = buffer; + + size_t num_bytes = buffer->size(); + if (mMetaDataOffset > 0 && num_bytes > mBytesUntilMetaData) { + num_bytes = mBytesUntilMetaData; + } + + ssize_t n = mHttp->receive(buffer->data(), num_bytes); + + if (n <= 0) { + return (status_t)n; + } + + buffer->set_range(0, n); + + mBytesUntilMetaData -= (size_t)n; + + if (mBytesUntilMetaData == 0) { + unsigned char num_16_byte_blocks = 0; + n = mHttp->receive((char *)&num_16_byte_blocks, 1); + CHECK_EQ(n, 1); + + char meta[255 * 16]; + size_t meta_size = num_16_byte_blocks * 16; + size_t meta_length = 0; + while (meta_length < meta_size) { + n = mHttp->receive(&meta[meta_length], meta_size - meta_length); + if (n <= 0) { + return (status_t)n; + } + + meta_length += (size_t) n; + } + + while (meta_length > 0 && meta[meta_length - 1] == '\0') { + --meta_length; + } + + if (meta_length > 0) { + // Technically we should probably attach this meta data to the + // next buffer. XXX + buffer->meta_data()->setData('shou', 'shou', meta, meta_length); + } + + mBytesUntilMetaData = mMetaDataOffset; + } + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/TimeSource.cpp b/media/libstagefright/TimeSource.cpp new file mode 100644 index 000000000000..d987fbf1c731 --- /dev/null +++ b/media/libstagefright/TimeSource.cpp @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <stddef.h> +#include <sys/time.h> + +#include <media/stagefright/TimeSource.h> + +namespace android { + +SystemTimeSource::SystemTimeSource() + : mStartTimeUs(GetSystemTimeUs()) { +} + +int64_t SystemTimeSource::getRealTimeUs() { + return GetSystemTimeUs() - mStartTimeUs; +} + +// static +int64_t SystemTimeSource::GetSystemTimeUs() { + struct timeval tv; + gettimeofday(&tv, NULL); + + return (int64_t)tv.tv_sec * 1000000 + tv.tv_usec; +} + +} // namespace android + diff --git a/media/libstagefright/TimedEventQueue.cpp b/media/libstagefright/TimedEventQueue.cpp new file mode 100644 index 000000000000..3d85f754c6d1 --- /dev/null +++ b/media/libstagefright/TimedEventQueue.cpp @@ -0,0 +1,205 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#undef __STRICT_ANSI__ +#define __STDINT_LIMITS +#define __STDC_LIMIT_MACROS +#include <stdint.h> + +#define LOG_TAG "TimedEventQueue" +#include <utils/Log.h> + +#include <sys/time.h> + +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/TimedEventQueue.h> + +namespace android { + +TimedEventQueue::TimedEventQueue() + : mRunning(false), + mStopped(false) { +} + +TimedEventQueue::~TimedEventQueue() { + stop(); +} + +void TimedEventQueue::start() { + if (mRunning) { + return; + } + + mStopped = false; + + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + pthread_create(&mThread, &attr, ThreadWrapper, this); + + pthread_attr_destroy(&attr); + + mRunning = true; +} + +void TimedEventQueue::stop(bool flush) { + if (!mRunning) { + return; + } + + if (flush) { + postEventToBack(new StopEvent); + } else { + postTimedEvent(new StopEvent, INT64_MIN); + } + + void *dummy; + pthread_join(mThread, &dummy); + + mQueue.clear(); + + mRunning = false; +} + +void TimedEventQueue::postEvent(const sp<Event> &event) { + // Reserve an earlier timeslot an INT64_MIN to be able to post + // the StopEvent to the absolute head of the queue. + postTimedEvent(event, INT64_MIN + 1); +} + +void TimedEventQueue::postEventToBack(const sp<Event> &event) { + postTimedEvent(event, INT64_MAX); +} + +void TimedEventQueue::postEventWithDelay( + const sp<Event> &event, int64_t delay_us) { + CHECK(delay_us >= 0); + postTimedEvent(event, getRealTimeUs() + delay_us); +} + +void TimedEventQueue::postTimedEvent( + const sp<Event> &event, int64_t realtime_us) { + Mutex::Autolock autoLock(mLock); + + List<QueueItem>::iterator it = mQueue.begin(); + while (it != mQueue.end() && realtime_us >= (*it).realtime_us) { + ++it; + } + + QueueItem item; + item.event = event; + item.realtime_us = realtime_us; + + if (it == mQueue.begin()) { + mQueueHeadChangedCondition.signal(); + } + + mQueue.insert(it, item); + + mQueueNotEmptyCondition.signal(); +} + +bool TimedEventQueue::cancelEvent(const sp<Event> &event) { + Mutex::Autolock autoLock(mLock); + + List<QueueItem>::iterator it = mQueue.begin(); + while (it != mQueue.end() && (*it).event != event) { + ++it; + } + + if (it == mQueue.end()) { + return false; + } + + if (it == mQueue.begin()) { + mQueueHeadChangedCondition.signal(); + } + + mQueue.erase(it); + + return true; +} + +// static +int64_t TimedEventQueue::getRealTimeUs() { + struct timeval tv; + gettimeofday(&tv, NULL); + + return (int64_t)tv.tv_sec * 1000000 + tv.tv_usec; +} + +// static +void *TimedEventQueue::ThreadWrapper(void *me) { + static_cast<TimedEventQueue *>(me)->threadEntry(); + + return NULL; +} + +void TimedEventQueue::threadEntry() { + for (;;) { + int64_t now_us; + sp<Event> event; + + { + Mutex::Autolock autoLock(mLock); + + if (mStopped) { + break; + } + + while (mQueue.empty()) { + mQueueNotEmptyCondition.wait(mLock); + } + + List<QueueItem>::iterator it; + for (;;) { + it = mQueue.begin(); + + now_us = getRealTimeUs(); + int64_t when_us = (*it).realtime_us; + + int64_t delay_us; + if (when_us < 0 || when_us == INT64_MAX) { + delay_us = 0; + } else { + delay_us = when_us - now_us; + } + + if (delay_us <= 0) { + break; + } + + status_t err = mQueueHeadChangedCondition.waitRelative( + mLock, delay_us * 1000); + + if (err == -ETIMEDOUT) { + now_us = getRealTimeUs(); + break; + } + } + + event = (*it).event; + mQueue.erase(it); + } + + // Fire event with the lock NOT held. + event->fire(this, now_us); + } +} + +} // namespace android + diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp new file mode 100644 index 000000000000..2720f93e14e0 --- /dev/null +++ b/media/libstagefright/Utils.cpp @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <arpa/inet.h> + +#include <media/stagefright/Utils.h> + +namespace android { + +uint16_t U16_AT(const uint8_t *ptr) { + return ptr[0] << 8 | ptr[1]; +} + +uint32_t U32_AT(const uint8_t *ptr) { + return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3]; +} + +uint64_t U64_AT(const uint8_t *ptr) { + return ((uint64_t)U32_AT(ptr)) << 32 | U32_AT(ptr + 4); +} + +// XXX warning: these won't work on big-endian host. +uint64_t ntoh64(uint64_t x) { + return ((uint64_t)ntohl(x & 0xffffffff) << 32) | ntohl(x >> 32); +} + +uint64_t hton64(uint64_t x) { + return ((uint64_t)htonl(x & 0xffffffff) << 32) | htonl(x >> 32); +} + +} // namespace android + diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h new file mode 100644 index 000000000000..d0bd61e56373 --- /dev/null +++ b/media/libstagefright/include/OMX.h @@ -0,0 +1,133 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_OMX_H_ +#define ANDROID_OMX_H_ + +#include <media/IOMX.h> +#include <utils/threads.h> +#include <utils/KeyedVector.h> + +namespace android { + +class OMXNodeInstance; + +class OMX : public BnOMX, + public IBinder::DeathRecipient { +public: + OMX(); + + virtual status_t listNodes(List<String8> *list); + + virtual status_t allocateNode( + const char *name, const sp<IOMXObserver> &observer, node_id *node); + + virtual status_t freeNode(node_id node); + + virtual status_t sendCommand( + node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param); + + virtual status_t getParameter( + node_id node, OMX_INDEXTYPE index, + void *params, size_t size); + + virtual status_t setParameter( + node_id node, OMX_INDEXTYPE index, + const void *params, size_t size); + + virtual status_t getConfig( + node_id node, OMX_INDEXTYPE index, + void *params, size_t size); + + virtual status_t setConfig( + node_id node, OMX_INDEXTYPE index, + const void *params, size_t size); + + virtual status_t useBuffer( + node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, + buffer_id *buffer); + + virtual status_t allocateBuffer( + node_id node, OMX_U32 port_index, size_t size, + buffer_id *buffer); + + virtual status_t allocateBufferWithBackup( + node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, + buffer_id *buffer); + + virtual status_t freeBuffer( + node_id node, OMX_U32 port_index, buffer_id buffer); + + virtual status_t fillBuffer(node_id node, buffer_id buffer); + + virtual status_t emptyBuffer( + node_id node, + buffer_id buffer, + OMX_U32 range_offset, OMX_U32 range_length, + OMX_U32 flags, OMX_TICKS timestamp); + + virtual status_t getExtensionIndex( + node_id node, + const char *parameter_name, + OMX_INDEXTYPE *index); + + virtual sp<IOMXRenderer> createRenderer( + const sp<ISurface> &surface, + const char *componentName, + OMX_COLOR_FORMATTYPE colorFormat, + size_t encodedWidth, size_t encodedHeight, + size_t displayWidth, size_t displayHeight); + + virtual void binderDied(const wp<IBinder> &the_late_who); + + OMX_ERRORTYPE OnEvent( + node_id node, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData); + + OMX_ERRORTYPE OnEmptyBufferDone( + node_id node, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer); + + OMX_ERRORTYPE OnFillBufferDone( + node_id node, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer); + + void invalidateNodeID(node_id node); + +private: + Mutex mLock; + + struct CallbackDispatcher; + sp<CallbackDispatcher> mDispatcher; + + int32_t mNodeCounter; + + KeyedVector<wp<IBinder>, OMXNodeInstance *> mLiveNodes; + KeyedVector<node_id, OMXNodeInstance *> mNodeIDToInstance; + + node_id makeNodeID(OMXNodeInstance *instance); + OMXNodeInstance *findInstance(node_id node); + + void invalidateNodeID_l(node_id node); + + OMX(const OMX &); + OMX &operator=(const OMX &); +}; + +} // namespace android + +#endif // ANDROID_OMX_H_ diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h new file mode 100644 index 000000000000..09a88169c040 --- /dev/null +++ b/media/libstagefright/include/OMXNodeInstance.h @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef OMX_NODE_INSTANCE_H_ + +#define OMX_NODE_INSTANCE_H_ + +#include "OMX.h" + +#include <utils/RefBase.h> +#include <utils/threads.h> + +namespace android { + +class IOMXObserver; + +struct OMXNodeInstance { + OMXNodeInstance( + OMX *owner, const sp<IOMXObserver> &observer); + + void setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle); + + OMX *owner(); + sp<IOMXObserver> observer(); + OMX::node_id nodeID(); + + status_t freeNode(); + + status_t sendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param); + status_t getParameter(OMX_INDEXTYPE index, void *params, size_t size); + + status_t setParameter( + OMX_INDEXTYPE index, const void *params, size_t size); + + status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size); + status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size); + + status_t useBuffer( + OMX_U32 portIndex, const sp<IMemory> ¶ms, + OMX::buffer_id *buffer); + + status_t allocateBuffer( + OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer); + + status_t allocateBufferWithBackup( + OMX_U32 portIndex, const sp<IMemory> ¶ms, + OMX::buffer_id *buffer); + + status_t freeBuffer(OMX_U32 portIndex, OMX::buffer_id buffer); + + status_t fillBuffer(OMX::buffer_id buffer); + + status_t emptyBuffer( + OMX::buffer_id buffer, + OMX_U32 rangeOffset, OMX_U32 rangeLength, + OMX_U32 flags, OMX_TICKS timestamp); + + status_t getExtensionIndex( + const char *parameterName, OMX_INDEXTYPE *index); + + void onMessage(const omx_message &msg); + void onObserverDied(); + void onGetHandleFailed(); + + static OMX_CALLBACKTYPE kCallbacks; + +private: + Mutex mLock; + + OMX *mOwner; + OMX::node_id mNodeID; + OMX_HANDLETYPE mHandle; + sp<IOMXObserver> mObserver; + + struct ActiveBuffer { + OMX_U32 mPortIndex; + OMX::buffer_id mID; + }; + Vector<ActiveBuffer> mActiveBuffers; + + ~OMXNodeInstance(); + + void addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id); + void removeActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id); + void freeActiveBuffers(); + + static OMX_ERRORTYPE OnEvent( + OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData); + + static OMX_ERRORTYPE OnEmptyBufferDone( + OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_BUFFERHEADERTYPE *pBuffer); + + static OMX_ERRORTYPE OnFillBufferDone( + OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_BUFFERHEADERTYPE *pBuffer); + + OMXNodeInstance(const OMXNodeInstance &); + OMXNodeInstance &operator=(const OMXNodeInstance &); +}; + +} // namespace android + +#endif // OMX_NODE_INSTANCE_H_ + diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk new file mode 100644 index 000000000000..25da813aaa15 --- /dev/null +++ b/media/libstagefright/omx/Android.mk @@ -0,0 +1,36 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +# Set up the OpenCore variables. +include external/opencore/Config.mk +LOCAL_C_INCLUDES := $(PV_INCLUDES) +LOCAL_CFLAGS := $(PV_CFLAGS_MINUS_VISIBILITY) + +LOCAL_C_INCLUDES += $(JNI_H_INCLUDE) + +LOCAL_SRC_FILES:= \ + OMX.cpp \ + OMXNodeInstance.cpp \ + SoftwareRenderer.cpp + +LOCAL_SHARED_LIBRARIES := \ + libbinder \ + libmedia \ + libutils \ + libui \ + libcutils \ + libopencore_common + +ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true) + LOCAL_LDLIBS += -lpthread -ldl +endif + +ifneq ($(TARGET_SIMULATOR),true) +LOCAL_SHARED_LIBRARIES += libdl +endif + +LOCAL_PRELINK_MODULE:= false + +LOCAL_MODULE:= libstagefright_omx + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp new file mode 100644 index 000000000000..4ccd4bd35dae --- /dev/null +++ b/media/libstagefright/omx/OMX.cpp @@ -0,0 +1,493 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "OMX" +#include <utils/Log.h> + +#include "../include/OMX.h" +#include "OMXRenderer.h" + +#include "pv_omxcore.h" + +#include "../include/OMXNodeInstance.h" + +#include <binder/IMemory.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/SoftwareRenderer.h> +#include <media/stagefright/VideoRenderer.h> + +#include <OMX_Component.h> + +namespace android { + +//////////////////////////////////////////////////////////////////////////////// + +struct OMX::CallbackDispatcher : public RefBase { + CallbackDispatcher(OMX *owner); + + void post(const omx_message &msg); + +protected: + virtual ~CallbackDispatcher(); + +private: + Mutex mLock; + + OMX *mOwner; + bool mDone; + Condition mQueueChanged; + List<omx_message> mQueue; + + pthread_t mThread; + + void dispatch(const omx_message &msg); + + static void *ThreadWrapper(void *me); + void threadEntry(); + + CallbackDispatcher(const CallbackDispatcher &); + CallbackDispatcher &operator=(const CallbackDispatcher &); +}; + +OMX::CallbackDispatcher::CallbackDispatcher(OMX *owner) + : mOwner(owner), + mDone(false) { + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + pthread_create(&mThread, &attr, ThreadWrapper, this); + + pthread_attr_destroy(&attr); +} + +OMX::CallbackDispatcher::~CallbackDispatcher() { + { + Mutex::Autolock autoLock(mLock); + + mDone = true; + mQueueChanged.signal(); + } + + void *dummy; + pthread_join(mThread, &dummy); +} + +void OMX::CallbackDispatcher::post(const omx_message &msg) { + Mutex::Autolock autoLock(mLock); + mQueue.push_back(msg); + mQueueChanged.signal(); +} + +void OMX::CallbackDispatcher::dispatch(const omx_message &msg) { + OMXNodeInstance *instance = mOwner->findInstance(msg.node); + if (instance == NULL) { + LOGV("Would have dispatched a message to a node that's already gone."); + return; + } + instance->onMessage(msg); +} + +// static +void *OMX::CallbackDispatcher::ThreadWrapper(void *me) { + static_cast<CallbackDispatcher *>(me)->threadEntry(); + + return NULL; +} + +void OMX::CallbackDispatcher::threadEntry() { + for (;;) { + omx_message msg; + + { + Mutex::Autolock autoLock(mLock); + while (!mDone && mQueue.empty()) { + mQueueChanged.wait(mLock); + } + + if (mDone) { + break; + } + + msg = *mQueue.begin(); + mQueue.erase(mQueue.begin()); + } + + dispatch(msg); + } +} + +//////////////////////////////////////////////////////////////////////////////// + +class BufferMeta { +public: + BufferMeta(OMX *owner, const sp<IMemory> &mem, bool is_backup = false) + : mOwner(owner), + mMem(mem), + mIsBackup(is_backup) { + } + + BufferMeta(OMX *owner, size_t size) + : mOwner(owner), + mSize(size), + mIsBackup(false) { + } + + void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) { + if (!mIsBackup) { + return; + } + + memcpy((OMX_U8 *)mMem->pointer() + header->nOffset, + header->pBuffer + header->nOffset, + header->nFilledLen); + } + + void CopyToOMX(const OMX_BUFFERHEADERTYPE *header) { + if (!mIsBackup) { + return; + } + + memcpy(header->pBuffer + header->nOffset, + (const OMX_U8 *)mMem->pointer() + header->nOffset, + header->nFilledLen); + } + +private: + OMX *mOwner; + sp<IMemory> mMem; + size_t mSize; + bool mIsBackup; + + BufferMeta(const BufferMeta &); + BufferMeta &operator=(const BufferMeta &); +}; + +OMX::OMX() + : mDispatcher(new CallbackDispatcher(this)), + mNodeCounter(0) { +} + +void OMX::binderDied(const wp<IBinder> &the_late_who) { + OMXNodeInstance *instance; + + { + Mutex::Autolock autoLock(mLock); + + ssize_t index = mLiveNodes.indexOfKey(the_late_who); + CHECK(index >= 0); + + instance = mLiveNodes.editValueAt(index); + mLiveNodes.removeItemsAt(index); + + invalidateNodeID_l(instance->nodeID()); + } + + instance->onObserverDied(); +} + +status_t OMX::listNodes(List<String8> *list) { + OMX_MasterInit(); // XXX Put this somewhere else. + + list->clear(); + + OMX_U32 index = 0; + char componentName[256]; + while (OMX_MasterComponentNameEnum(componentName, sizeof(componentName), index) + == OMX_ErrorNone) { + list->push_back(String8(componentName)); + + ++index; + } + + return OK; +} + +status_t OMX::allocateNode( + const char *name, const sp<IOMXObserver> &observer, node_id *node) { + Mutex::Autolock autoLock(mLock); + + *node = 0; + + OMX_MasterInit(); // XXX Put this somewhere else. + + OMXNodeInstance *instance = new OMXNodeInstance(this, observer); + + OMX_HANDLETYPE handle; + OMX_ERRORTYPE err = OMX_MasterGetHandle( + &handle, const_cast<char *>(name), instance, + &OMXNodeInstance::kCallbacks); + + if (err != OMX_ErrorNone) { + LOGE("FAILED to allocate omx component '%s'", name); + + instance->onGetHandleFailed(); + + return UNKNOWN_ERROR; + } + + *node = makeNodeID(instance); + + instance->setHandle(*node, handle); + + mLiveNodes.add(observer->asBinder(), instance); + observer->asBinder()->linkToDeath(this); + + return OK; +} + +status_t OMX::freeNode(node_id node) { + OMXNodeInstance *instance = findInstance(node); + + ssize_t index = mLiveNodes.indexOfKey(instance->observer()->asBinder()); + CHECK(index >= 0); + mLiveNodes.removeItemsAt(index); + instance->observer()->asBinder()->unlinkToDeath(this); + + return instance->freeNode(); +} + +status_t OMX::sendCommand( + node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) { + return findInstance(node)->sendCommand(cmd, param); +} + +status_t OMX::getParameter( + node_id node, OMX_INDEXTYPE index, + void *params, size_t size) { + return findInstance(node)->getParameter( + index, params, size); +} + +status_t OMX::setParameter( + node_id node, OMX_INDEXTYPE index, + const void *params, size_t size) { + return findInstance(node)->setParameter( + index, params, size); +} + +status_t OMX::getConfig( + node_id node, OMX_INDEXTYPE index, + void *params, size_t size) { + return findInstance(node)->getConfig( + index, params, size); +} + +status_t OMX::setConfig( + node_id node, OMX_INDEXTYPE index, + const void *params, size_t size) { + return findInstance(node)->setConfig( + index, params, size); +} + +status_t OMX::useBuffer( + node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, + buffer_id *buffer) { + return findInstance(node)->useBuffer( + port_index, params, buffer); +} + +status_t OMX::allocateBuffer( + node_id node, OMX_U32 port_index, size_t size, + buffer_id *buffer) { + return findInstance(node)->allocateBuffer( + port_index, size, buffer); +} + +status_t OMX::allocateBufferWithBackup( + node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, + buffer_id *buffer) { + return findInstance(node)->allocateBufferWithBackup( + port_index, params, buffer); +} + +status_t OMX::freeBuffer(node_id node, OMX_U32 port_index, buffer_id buffer) { + return findInstance(node)->freeBuffer( + port_index, buffer); +} + +status_t OMX::fillBuffer(node_id node, buffer_id buffer) { + return findInstance(node)->fillBuffer(buffer); +} + +status_t OMX::emptyBuffer( + node_id node, + buffer_id buffer, + OMX_U32 range_offset, OMX_U32 range_length, + OMX_U32 flags, OMX_TICKS timestamp) { + return findInstance(node)->emptyBuffer( + buffer, range_offset, range_length, flags, timestamp); +} + +status_t OMX::getExtensionIndex( + node_id node, + const char *parameter_name, + OMX_INDEXTYPE *index) { + return findInstance(node)->getExtensionIndex( + parameter_name, index); +} + +OMX_ERRORTYPE OMX::OnEvent( + node_id node, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData) { + LOGV("OnEvent(%d, %ld, %ld)", eEvent, nData1, nData2); + + omx_message msg; + msg.type = omx_message::EVENT; + msg.node = node; + msg.u.event_data.event = eEvent; + msg.u.event_data.data1 = nData1; + msg.u.event_data.data2 = nData2; + + mDispatcher->post(msg); + + return OMX_ErrorNone; +} + +OMX_ERRORTYPE OMX::OnEmptyBufferDone( + node_id node, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) { + LOGV("OnEmptyBufferDone buffer=%p", pBuffer); + + omx_message msg; + msg.type = omx_message::EMPTY_BUFFER_DONE; + msg.node = node; + msg.u.buffer_data.buffer = pBuffer; + + mDispatcher->post(msg); + + return OMX_ErrorNone; +} + +OMX_ERRORTYPE OMX::OnFillBufferDone( + node_id node, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) { + LOGV("OnFillBufferDone buffer=%p", pBuffer); + + omx_message msg; + msg.type = omx_message::FILL_BUFFER_DONE; + msg.node = node; + msg.u.extended_buffer_data.buffer = pBuffer; + msg.u.extended_buffer_data.range_offset = pBuffer->nOffset; + msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen; + msg.u.extended_buffer_data.flags = pBuffer->nFlags; + msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp; + msg.u.extended_buffer_data.platform_private = pBuffer->pPlatformPrivate; + + mDispatcher->post(msg); + + return OMX_ErrorNone; +} + +OMX::node_id OMX::makeNodeID(OMXNodeInstance *instance) { + // mLock is already held. + + node_id node = (node_id)++mNodeCounter; + mNodeIDToInstance.add(node, instance); + + return node; +} + +OMXNodeInstance *OMX::findInstance(node_id node) { + Mutex::Autolock autoLock(mLock); + + ssize_t index = mNodeIDToInstance.indexOfKey(node); + + return index < 0 ? NULL : mNodeIDToInstance.valueAt(index); +} + +void OMX::invalidateNodeID(node_id node) { + Mutex::Autolock autoLock(mLock); + invalidateNodeID_l(node); +} + +void OMX::invalidateNodeID_l(node_id node) { + // mLock is held. + mNodeIDToInstance.removeItem(node); +} + +//////////////////////////////////////////////////////////////////////////////// + +sp<IOMXRenderer> OMX::createRenderer( + const sp<ISurface> &surface, + const char *componentName, + OMX_COLOR_FORMATTYPE colorFormat, + size_t encodedWidth, size_t encodedHeight, + size_t displayWidth, size_t displayHeight) { + Mutex::Autolock autoLock(mLock); + + VideoRenderer *impl = NULL; + + static void *libHandle = NULL; + + if (!libHandle) { + libHandle = dlopen("libstagefrighthw.so", RTLD_NOW); + } + + if (libHandle) { + typedef VideoRenderer *(*CreateRendererFunc)( + const sp<ISurface> &surface, + const char *componentName, + OMX_COLOR_FORMATTYPE colorFormat, + size_t displayWidth, size_t displayHeight, + size_t decodedWidth, size_t decodedHeight); + + CreateRendererFunc func = + (CreateRendererFunc)dlsym( + libHandle, + "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20" + "OMX_COLOR_FORMATTYPEjjjj"); + + if (func) { + impl = (*func)(surface, componentName, colorFormat, + displayWidth, displayHeight, encodedWidth, encodedHeight); + } + } + + if (!impl) { + LOGW("Using software renderer."); + impl = new SoftwareRenderer( + colorFormat, + surface, + displayWidth, displayHeight, + encodedWidth, encodedHeight); + } + + return new OMXRenderer(impl); +} + +OMXRenderer::OMXRenderer(VideoRenderer *impl) + : mImpl(impl) { +} + +OMXRenderer::~OMXRenderer() { + delete mImpl; + mImpl = NULL; +} + +void OMXRenderer::render(IOMX::buffer_id buffer) { + OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer; + + mImpl->render( + header->pBuffer + header->nOffset, + header->nFilledLen, + header->pPlatformPrivate); +} + +} // namespace android + diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp new file mode 100644 index 000000000000..821891862c4b --- /dev/null +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -0,0 +1,461 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "OMXNodeInstance" +#include <utils/Log.h> + +#include "../include/OMXNodeInstance.h" + +#include "pv_omxcore.h" + +#include <binder/IMemory.h> +#include <media/stagefright/MediaDebug.h> + +namespace android { + +struct BufferMeta { + BufferMeta(const sp<IMemory> &mem, bool is_backup = false) + : mMem(mem), + mIsBackup(is_backup) { + } + + BufferMeta(size_t size) + : mSize(size), + mIsBackup(false) { + } + + void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) { + if (!mIsBackup) { + return; + } + + memcpy((OMX_U8 *)mMem->pointer() + header->nOffset, + header->pBuffer + header->nOffset, + header->nFilledLen); + } + + void CopyToOMX(const OMX_BUFFERHEADERTYPE *header) { + if (!mIsBackup) { + return; + } + + memcpy(header->pBuffer + header->nOffset, + (const OMX_U8 *)mMem->pointer() + header->nOffset, + header->nFilledLen); + } + +private: + sp<IMemory> mMem; + size_t mSize; + bool mIsBackup; + + BufferMeta(const BufferMeta &); + BufferMeta &operator=(const BufferMeta &); +}; + +// static +OMX_CALLBACKTYPE OMXNodeInstance::kCallbacks = { + &OnEvent, &OnEmptyBufferDone, &OnFillBufferDone +}; + +OMXNodeInstance::OMXNodeInstance( + OMX *owner, const sp<IOMXObserver> &observer) + : mOwner(owner), + mNodeID(NULL), + mHandle(NULL), + mObserver(observer) { +} + +OMXNodeInstance::~OMXNodeInstance() { + CHECK_EQ(mHandle, NULL); +} + +void OMXNodeInstance::setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle) { + CHECK_EQ(mHandle, NULL); + mNodeID = node_id; + mHandle = handle; +} + +OMX *OMXNodeInstance::owner() { + return mOwner; +} + +sp<IOMXObserver> OMXNodeInstance::observer() { + return mObserver; +} + +OMX::node_id OMXNodeInstance::nodeID() { + return mNodeID; +} + +static status_t StatusFromOMXError(OMX_ERRORTYPE err) { + return (err == OMX_ErrorNone) ? OK : UNKNOWN_ERROR; +} + +status_t OMXNodeInstance::freeNode() { + // Transition the node from its current state all the way down + // to "Loaded". + // This ensures that all active buffers are properly freed even + // for components that don't do this themselves on a call to + // "FreeHandle". + + OMX_STATETYPE state; + CHECK_EQ(OMX_GetState(mHandle, &state), OMX_ErrorNone); + switch (state) { + case OMX_StateExecuting: + { + LOGV("forcing Executing->Idle"); + sendCommand(OMX_CommandStateSet, OMX_StateIdle); + OMX_ERRORTYPE err; + while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone + && state != OMX_StateIdle) { + usleep(100000); + } + CHECK_EQ(err, OMX_ErrorNone); + + // fall through + } + + case OMX_StateIdle: + { + LOGV("forcing Idle->Loaded"); + sendCommand(OMX_CommandStateSet, OMX_StateLoaded); + + freeActiveBuffers(); + + OMX_ERRORTYPE err; + while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone + && state != OMX_StateLoaded) { + LOGV("waiting for Loaded state..."); + usleep(100000); + } + CHECK_EQ(err, OMX_ErrorNone); + + // fall through + } + + case OMX_StateLoaded: + case OMX_StateInvalid: + break; + + default: + CHECK(!"should not be here, unknown state."); + break; + } + + OMX_ERRORTYPE err = OMX_MasterFreeHandle(mHandle); + mHandle = NULL; + + if (err != OMX_ErrorNone) { + LOGE("FreeHandle FAILED with error 0x%08x.", err); + } + + mOwner->invalidateNodeID(mNodeID); + mNodeID = NULL; + + LOGV("OMXNodeInstance going away."); + delete this; + + return StatusFromOMXError(err); +} + +status_t OMXNodeInstance::sendCommand( + OMX_COMMANDTYPE cmd, OMX_S32 param) { + Mutex::Autolock autoLock(mLock); + + OMX_ERRORTYPE err = OMX_SendCommand(mHandle, cmd, param, NULL); + return StatusFromOMXError(err); +} + +status_t OMXNodeInstance::getParameter( + OMX_INDEXTYPE index, void *params, size_t size) { + Mutex::Autolock autoLock(mLock); + + OMX_ERRORTYPE err = OMX_GetParameter(mHandle, index, params); + return StatusFromOMXError(err); +} + +status_t OMXNodeInstance::setParameter( + OMX_INDEXTYPE index, const void *params, size_t size) { + Mutex::Autolock autoLock(mLock); + + OMX_ERRORTYPE err = OMX_SetParameter( + mHandle, index, const_cast<void *>(params)); + + return StatusFromOMXError(err); +} + +status_t OMXNodeInstance::getConfig( + OMX_INDEXTYPE index, void *params, size_t size) { + Mutex::Autolock autoLock(mLock); + + OMX_ERRORTYPE err = OMX_GetConfig(mHandle, index, params); + return StatusFromOMXError(err); +} + +status_t OMXNodeInstance::setConfig( + OMX_INDEXTYPE index, const void *params, size_t size) { + Mutex::Autolock autoLock(mLock); + + OMX_ERRORTYPE err = OMX_SetConfig( + mHandle, index, const_cast<void *>(params)); + + return StatusFromOMXError(err); +} + +status_t OMXNodeInstance::useBuffer( + OMX_U32 portIndex, const sp<IMemory> ¶ms, + OMX::buffer_id *buffer) { + Mutex::Autolock autoLock(mLock); + + BufferMeta *buffer_meta = new BufferMeta(params); + + OMX_BUFFERHEADERTYPE *header; + + OMX_ERRORTYPE err = OMX_UseBuffer( + mHandle, &header, portIndex, buffer_meta, + params->size(), static_cast<OMX_U8 *>(params->pointer())); + + if (err != OMX_ErrorNone) { + LOGE("OMX_UseBuffer failed with error %d (0x%08x)", err, err); + + delete buffer_meta; + buffer_meta = NULL; + + *buffer = 0; + + return UNKNOWN_ERROR; + } + + *buffer = header; + + addActiveBuffer(portIndex, *buffer); + + return OK; +} + +status_t OMXNodeInstance::allocateBuffer( + OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer) { + Mutex::Autolock autoLock(mLock); + + BufferMeta *buffer_meta = new BufferMeta(size); + + OMX_BUFFERHEADERTYPE *header; + + OMX_ERRORTYPE err = OMX_AllocateBuffer( + mHandle, &header, portIndex, buffer_meta, size); + + if (err != OMX_ErrorNone) { + LOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", err, err); + + delete buffer_meta; + buffer_meta = NULL; + + *buffer = 0; + + return UNKNOWN_ERROR; + } + + *buffer = header; + + addActiveBuffer(portIndex, *buffer); + + return OK; +} + +status_t OMXNodeInstance::allocateBufferWithBackup( + OMX_U32 portIndex, const sp<IMemory> ¶ms, + OMX::buffer_id *buffer) { + Mutex::Autolock autoLock(mLock); + + BufferMeta *buffer_meta = new BufferMeta(params, true); + + OMX_BUFFERHEADERTYPE *header; + + OMX_ERRORTYPE err = OMX_AllocateBuffer( + mHandle, &header, portIndex, buffer_meta, params->size()); + + if (err != OMX_ErrorNone) { + LOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", err, err); + + delete buffer_meta; + buffer_meta = NULL; + + *buffer = 0; + + return UNKNOWN_ERROR; + } + + *buffer = header; + + addActiveBuffer(portIndex, *buffer); + + return OK; +} + +status_t OMXNodeInstance::freeBuffer( + OMX_U32 portIndex, OMX::buffer_id buffer) { + Mutex::Autolock autoLock(mLock); + + removeActiveBuffer(portIndex, buffer); + + OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer; + BufferMeta *buffer_meta = static_cast<BufferMeta *>(header->pAppPrivate); + + OMX_ERRORTYPE err = OMX_FreeBuffer(mHandle, portIndex, header); + + delete buffer_meta; + buffer_meta = NULL; + + return StatusFromOMXError(err); +} + +status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer) { + Mutex::Autolock autoLock(mLock); + + OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer; + header->nFilledLen = 0; + header->nOffset = 0; + header->nFlags = 0; + + OMX_ERRORTYPE err = OMX_FillThisBuffer(mHandle, header); + + return StatusFromOMXError(err); +} + +status_t OMXNodeInstance::emptyBuffer( + OMX::buffer_id buffer, + OMX_U32 rangeOffset, OMX_U32 rangeLength, + OMX_U32 flags, OMX_TICKS timestamp) { + Mutex::Autolock autoLock(mLock); + + OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer; + header->nFilledLen = rangeLength; + header->nOffset = rangeOffset; + header->nFlags = flags; + header->nTimeStamp = timestamp; + + BufferMeta *buffer_meta = + static_cast<BufferMeta *>(header->pAppPrivate); + buffer_meta->CopyToOMX(header); + + OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header); + + return StatusFromOMXError(err); +} + +status_t OMXNodeInstance::getExtensionIndex( + const char *parameterName, OMX_INDEXTYPE *index) { + Mutex::Autolock autoLock(mLock); + + OMX_ERRORTYPE err = OMX_GetExtensionIndex( + mHandle, const_cast<char *>(parameterName), index); + + return StatusFromOMXError(err); +} + +void OMXNodeInstance::onMessage(const omx_message &msg) { + if (msg.type == omx_message::FILL_BUFFER_DONE) { + OMX_BUFFERHEADERTYPE *buffer = + static_cast<OMX_BUFFERHEADERTYPE *>( + msg.u.extended_buffer_data.buffer); + + BufferMeta *buffer_meta = + static_cast<BufferMeta *>(buffer->pAppPrivate); + + buffer_meta->CopyFromOMX(buffer); + } + + mObserver->onMessage(msg); +} + +void OMXNodeInstance::onObserverDied() { + LOGE("!!! Observer died. Quickly, do something, ... anything..."); + + // Try to force shutdown of the node and hope for the best. + freeNode(); +} + +void OMXNodeInstance::onGetHandleFailed() { + delete this; +} + +// static +OMX_ERRORTYPE OMXNodeInstance::OnEvent( + OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_EVENTTYPE eEvent, + OMX_IN OMX_U32 nData1, + OMX_IN OMX_U32 nData2, + OMX_IN OMX_PTR pEventData) { + OMXNodeInstance *instance = static_cast<OMXNodeInstance *>(pAppData); + return instance->owner()->OnEvent( + instance->nodeID(), eEvent, nData1, nData2, pEventData); +} + +// static +OMX_ERRORTYPE OMXNodeInstance::OnEmptyBufferDone( + OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffer) { + OMXNodeInstance *instance = static_cast<OMXNodeInstance *>(pAppData); + return instance->owner()->OnEmptyBufferDone(instance->nodeID(), pBuffer); +} + +// static +OMX_ERRORTYPE OMXNodeInstance::OnFillBufferDone( + OMX_IN OMX_HANDLETYPE hComponent, + OMX_IN OMX_PTR pAppData, + OMX_IN OMX_BUFFERHEADERTYPE* pBuffer) { + OMXNodeInstance *instance = static_cast<OMXNodeInstance *>(pAppData); + return instance->owner()->OnFillBufferDone(instance->nodeID(), pBuffer); +} + +void OMXNodeInstance::addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id) { + ActiveBuffer active; + active.mPortIndex = portIndex; + active.mID = id; + mActiveBuffers.push(active); +} + +void OMXNodeInstance::removeActiveBuffer( + OMX_U32 portIndex, OMX::buffer_id id) { + bool found = false; + for (size_t i = 0; i < mActiveBuffers.size(); ++i) { + if (mActiveBuffers[i].mPortIndex == portIndex + && mActiveBuffers[i].mID == id) { + found = true; + mActiveBuffers.removeItemsAt(i); + break; + } + } + + if (!found) { + LOGW("Attempt to remove an active buffer we know nothing about..."); + } +} + +void OMXNodeInstance::freeActiveBuffers() { + // Make sure to count down here, as freeBuffer will in turn remove + // the active buffer from the vector... + for (size_t i = mActiveBuffers.size(); i--;) { + freeBuffer(mActiveBuffers[i].mPortIndex, mActiveBuffers[i].mID); + } +} + +} // namespace android + diff --git a/media/libstagefright/omx/OMXRenderer.h b/media/libstagefright/omx/OMXRenderer.h new file mode 100644 index 000000000000..4d194ce73440 --- /dev/null +++ b/media/libstagefright/omx/OMXRenderer.h @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef OMX_RENDERER_H_ + +#define OMX_RENDERER_H_ + +#include <media/IOMX.h> + +namespace android { + +class VideoRenderer; + +class OMXRenderer : public BnOMXRenderer { +public: + // Assumes ownership of "impl". + OMXRenderer(VideoRenderer *impl); + virtual ~OMXRenderer(); + + virtual void render(IOMX::buffer_id buffer); + +private: + VideoRenderer *mImpl; + + OMXRenderer(const OMXRenderer &); + OMXRenderer &operator=(const OMXRenderer &); +}; + +} // namespace android + +#endif // OMX_RENDERER_H_ diff --git a/media/libstagefright/omx/SoftwareRenderer.cpp b/media/libstagefright/omx/SoftwareRenderer.cpp new file mode 100644 index 000000000000..39de50492dc4 --- /dev/null +++ b/media/libstagefright/omx/SoftwareRenderer.cpp @@ -0,0 +1,335 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "SoftwareRenderer" +#include <utils/Log.h> + +#include <binder/MemoryHeapBase.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/SoftwareRenderer.h> +#include <ui/ISurface.h> + +namespace android { + +#define QCOM_YUV 0 + +SoftwareRenderer::SoftwareRenderer( + OMX_COLOR_FORMATTYPE colorFormat, + const sp<ISurface> &surface, + size_t displayWidth, size_t displayHeight, + size_t decodedWidth, size_t decodedHeight) + : mColorFormat(colorFormat), + mISurface(surface), + mDisplayWidth(displayWidth), + mDisplayHeight(displayHeight), + mDecodedWidth(decodedWidth), + mDecodedHeight(decodedHeight), + mFrameSize(mDecodedWidth * mDecodedHeight * 2), // RGB565 + mMemoryHeap(new MemoryHeapBase(2 * mFrameSize)), + mIndex(0), + mClip(NULL) { + CHECK(mISurface.get() != NULL); + CHECK(mDecodedWidth > 0); + CHECK(mDecodedHeight > 0); + CHECK(mMemoryHeap->heapID() >= 0); + + ISurface::BufferHeap bufferHeap( + mDisplayWidth, mDisplayHeight, + mDecodedWidth, mDecodedHeight, + PIXEL_FORMAT_RGB_565, + mMemoryHeap); + + status_t err = mISurface->registerBuffers(bufferHeap); + CHECK_EQ(err, OK); +} + +SoftwareRenderer::~SoftwareRenderer() { + mISurface->unregisterBuffers(); + + delete[] mClip; + mClip = NULL; +} + +void SoftwareRenderer::render( + const void *data, size_t size, void *platformPrivate) { + static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00; + + switch (mColorFormat) { + case OMX_COLOR_FormatYUV420Planar: + return renderYUV420Planar(data, size); + + case OMX_COLOR_FormatCbYCrY: + return renderCbYCrY(data, size); + + case OMX_QCOM_COLOR_FormatYVU420SemiPlanar: + return renderQCOMYUV420SemiPlanar(data, size); + + default: + { + LOGW("Cannot render color format %d", mColorFormat); + break; + } + } +} + +void SoftwareRenderer::renderYUV420Planar( + const void *data, size_t size) { + if (size != (mDecodedHeight * mDecodedWidth * 3) / 2) { + LOGE("size is %d, expected %d", + size, (mDecodedHeight * mDecodedWidth * 3) / 2); + } + CHECK(size >= (mDecodedWidth * mDecodedHeight * 3) / 2); + + uint8_t *kAdjustedClip = initClip(); + + size_t offset = mIndex * mFrameSize; + + void *dst = (uint8_t *)mMemoryHeap->getBase() + offset; + + uint32_t *dst_ptr = (uint32_t *)dst; + + const uint8_t *src_y = (const uint8_t *)data; + + const uint8_t *src_u = + (const uint8_t *)src_y + mDecodedWidth * mDecodedHeight; + +#if !QCOM_YUV + const uint8_t *src_v = + (const uint8_t *)src_u + (mDecodedWidth / 2) * (mDecodedHeight / 2); +#endif + + for (size_t y = 0; y < mDecodedHeight; ++y) { + for (size_t x = 0; x < mDecodedWidth; x += 2) { + // B = 1.164 * (Y - 16) + 2.018 * (U - 128) + // G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128) + // R = 1.164 * (Y - 16) + 1.596 * (V - 128) + + // B = 298/256 * (Y - 16) + 517/256 * (U - 128) + // G = .................. - 208/256 * (V - 128) - 100/256 * (U - 128) + // R = .................. + 409/256 * (V - 128) + + // min_B = (298 * (- 16) + 517 * (- 128)) / 256 = -277 + // min_G = (298 * (- 16) - 208 * (255 - 128) - 100 * (255 - 128)) / 256 = -172 + // min_R = (298 * (- 16) + 409 * (- 128)) / 256 = -223 + + // max_B = (298 * (255 - 16) + 517 * (255 - 128)) / 256 = 534 + // max_G = (298 * (255 - 16) - 208 * (- 128) - 100 * (- 128)) / 256 = 432 + // max_R = (298 * (255 - 16) + 409 * (255 - 128)) / 256 = 481 + + // clip range -278 .. 535 + + signed y1 = (signed)src_y[x] - 16; + signed y2 = (signed)src_y[x + 1] - 16; + +#if QCOM_YUV + signed u = (signed)src_u[x & ~1] - 128; + signed v = (signed)src_u[(x & ~1) + 1] - 128; +#else + signed u = (signed)src_u[x / 2] - 128; + signed v = (signed)src_v[x / 2] - 128; +#endif + + signed u_b = u * 517; + signed u_g = -u * 100; + signed v_g = -v * 208; + signed v_r = v * 409; + + signed tmp1 = y1 * 298; + signed b1 = (tmp1 + u_b) / 256; + signed g1 = (tmp1 + v_g + u_g) / 256; + signed r1 = (tmp1 + v_r) / 256; + + signed tmp2 = y2 * 298; + signed b2 = (tmp2 + u_b) / 256; + signed g2 = (tmp2 + v_g + u_g) / 256; + signed r2 = (tmp2 + v_r) / 256; + + uint32_t rgb1 = + ((kAdjustedClip[r1] >> 3) << 11) + | ((kAdjustedClip[g1] >> 2) << 5) + | (kAdjustedClip[b1] >> 3); + + uint32_t rgb2 = + ((kAdjustedClip[r2] >> 3) << 11) + | ((kAdjustedClip[g2] >> 2) << 5) + | (kAdjustedClip[b2] >> 3); + + dst_ptr[x / 2] = (rgb2 << 16) | rgb1; + } + + src_y += mDecodedWidth; + + if (y & 1) { +#if QCOM_YUV + src_u += mDecodedWidth; +#else + src_u += mDecodedWidth / 2; + src_v += mDecodedWidth / 2; +#endif + } + + dst_ptr += mDecodedWidth / 2; + } + + mISurface->postBuffer(offset); + mIndex = 1 - mIndex; +} + +void SoftwareRenderer::renderCbYCrY( + const void *data, size_t size) { + if (size != (mDecodedHeight * mDecodedWidth * 2)) { + LOGE("size is %d, expected %d", + size, (mDecodedHeight * mDecodedWidth * 2)); + } + CHECK(size >= (mDecodedWidth * mDecodedHeight * 2)); + + uint8_t *kAdjustedClip = initClip(); + + size_t offset = mIndex * mFrameSize; + void *dst = (uint8_t *)mMemoryHeap->getBase() + offset; + uint32_t *dst_ptr = (uint32_t *)dst; + + const uint8_t *src = (const uint8_t *)data; + + for (size_t y = 0; y < mDecodedHeight; ++y) { + for (size_t x = 0; x < mDecodedWidth; x += 2) { + signed y1 = (signed)src[2 * x + 1] - 16; + signed y2 = (signed)src[2 * x + 3] - 16; + signed u = (signed)src[2 * x] - 128; + signed v = (signed)src[2 * x + 2] - 128; + + signed u_b = u * 517; + signed u_g = -u * 100; + signed v_g = -v * 208; + signed v_r = v * 409; + + signed tmp1 = y1 * 298; + signed b1 = (tmp1 + u_b) / 256; + signed g1 = (tmp1 + v_g + u_g) / 256; + signed r1 = (tmp1 + v_r) / 256; + + signed tmp2 = y2 * 298; + signed b2 = (tmp2 + u_b) / 256; + signed g2 = (tmp2 + v_g + u_g) / 256; + signed r2 = (tmp2 + v_r) / 256; + + uint32_t rgb1 = + ((kAdjustedClip[r1] >> 3) << 11) + | ((kAdjustedClip[g1] >> 2) << 5) + | (kAdjustedClip[b1] >> 3); + + uint32_t rgb2 = + ((kAdjustedClip[r2] >> 3) << 11) + | ((kAdjustedClip[g2] >> 2) << 5) + | (kAdjustedClip[b2] >> 3); + + dst_ptr[x / 2] = (rgb2 << 16) | rgb1; + } + + src += mDecodedWidth * 2; + dst_ptr += mDecodedWidth / 2; + } + + mISurface->postBuffer(offset); + mIndex = 1 - mIndex; +} + +void SoftwareRenderer::renderQCOMYUV420SemiPlanar( + const void *data, size_t size) { + if (size != (mDecodedHeight * mDecodedWidth * 3) / 2) { + LOGE("size is %d, expected %d", + size, (mDecodedHeight * mDecodedWidth * 3) / 2); + } + CHECK(size >= (mDecodedWidth * mDecodedHeight * 3) / 2); + + uint8_t *kAdjustedClip = initClip(); + + size_t offset = mIndex * mFrameSize; + + void *dst = (uint8_t *)mMemoryHeap->getBase() + offset; + + uint32_t *dst_ptr = (uint32_t *)dst; + + const uint8_t *src_y = (const uint8_t *)data; + + const uint8_t *src_u = + (const uint8_t *)src_y + mDecodedWidth * mDecodedHeight; + + for (size_t y = 0; y < mDecodedHeight; ++y) { + for (size_t x = 0; x < mDecodedWidth; x += 2) { + signed y1 = (signed)src_y[x] - 16; + signed y2 = (signed)src_y[x + 1] - 16; + + signed u = (signed)src_u[x & ~1] - 128; + signed v = (signed)src_u[(x & ~1) + 1] - 128; + + signed u_b = u * 517; + signed u_g = -u * 100; + signed v_g = -v * 208; + signed v_r = v * 409; + + signed tmp1 = y1 * 298; + signed b1 = (tmp1 + u_b) / 256; + signed g1 = (tmp1 + v_g + u_g) / 256; + signed r1 = (tmp1 + v_r) / 256; + + signed tmp2 = y2 * 298; + signed b2 = (tmp2 + u_b) / 256; + signed g2 = (tmp2 + v_g + u_g) / 256; + signed r2 = (tmp2 + v_r) / 256; + + uint32_t rgb1 = + ((kAdjustedClip[b1] >> 3) << 11) + | ((kAdjustedClip[g1] >> 2) << 5) + | (kAdjustedClip[r1] >> 3); + + uint32_t rgb2 = + ((kAdjustedClip[b2] >> 3) << 11) + | ((kAdjustedClip[g2] >> 2) << 5) + | (kAdjustedClip[r2] >> 3); + + dst_ptr[x / 2] = (rgb2 << 16) | rgb1; + } + + src_y += mDecodedWidth; + + if (y & 1) { + src_u += mDecodedWidth; + } + + dst_ptr += mDecodedWidth / 2; + } + + mISurface->postBuffer(offset); + mIndex = 1 - mIndex; +} + +uint8_t *SoftwareRenderer::initClip() { + static const signed kClipMin = -278; + static const signed kClipMax = 535; + + if (mClip == NULL) { + mClip = new uint8_t[kClipMax - kClipMin + 1]; + + for (signed i = kClipMin; i <= kClipMax; ++i) { + mClip[i - kClipMin] = (i < 0) ? 0 : (i > 255) ? 255 : (uint8_t)i; + } + } + + return &mClip[-kClipMin]; +} + +} // namespace android diff --git a/media/libstagefright/stagefright_string.cpp b/media/libstagefright/stagefright_string.cpp new file mode 100644 index 000000000000..2aedb80739e2 --- /dev/null +++ b/media/libstagefright/stagefright_string.cpp @@ -0,0 +1,83 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/stagefright/stagefright_string.h> + +namespace android { + +// static +string::size_type string::npos = (string::size_type)-1; + +string::string() { +} + +string::string(const char *s, size_t length) + : mString(s, length) { +} + +string::string(const string &from, size_type start, size_type length) + : mString(from.c_str() + start, length) { +} + +string::string(const char *s) + : mString(s) { +} + +const char *string::c_str() const { + return mString.string(); +} + +string::size_type string::size() const { + return mString.length(); +} + +void string::clear() { + mString = String8(); +} + +string::size_type string::find(char c) const { + char s[2]; + s[0] = c; + s[1] = '\0'; + + ssize_t index = mString.find(s); + + return index < 0 ? npos : (size_type)index; +} + +bool string::operator<(const string &other) const { + return mString < other.mString; +} + +bool string::operator==(const string &other) const { + return mString == other.mString; +} + +string &string::operator+=(char c) { + mString.append(&c, 1); + + return *this; +} + +void string::erase(size_t from, size_t length) { + String8 s(mString.string(), from); + s.append(mString.string() + from + length); + + mString = s; +} + +} // namespace android + diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk index c6816985357f..a92cea896902 100644 --- a/media/mediaserver/Android.mk +++ b/media/mediaserver/Android.mk @@ -8,7 +8,8 @@ LOCAL_SHARED_LIBRARIES := \ libaudioflinger \ libcameraservice \ libmediaplayerservice \ - libutils + libutils \ + libbinder base := $(LOCAL_PATH)/../.. diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp index 6954b63680c0..7094cfa3a1f2 100644 --- a/media/mediaserver/main_mediaserver.cpp +++ b/media/mediaserver/main_mediaserver.cpp @@ -20,14 +20,15 @@ #include <unistd.h> #include <grp.h> -#include <utils/IPCThreadState.h> -#include <utils/ProcessState.h> -#include <utils/IServiceManager.h> +#include <binder/IPCThreadState.h> +#include <binder/ProcessState.h> +#include <binder/IServiceManager.h> #include <utils/Log.h> #include <AudioFlinger.h> #include <CameraService.h> #include <MediaPlayerService.h> +#include <AudioPolicyService.h> #include <private/android_filesystem_config.h> using namespace android; @@ -40,6 +41,7 @@ int main(int argc, char** argv) AudioFlinger::instantiate(); MediaPlayerService::instantiate(); CameraService::instantiate(); + AudioPolicyService::instantiate(); ProcessState::self()->startThreadPool(); IPCThreadState::self()->joinThreadPool(); } diff --git a/media/sdutils/sdutil.cpp b/media/sdutils/sdutil.cpp index 06120f57431b..fe1187897126 100644 --- a/media/sdutils/sdutil.cpp +++ b/media/sdutils/sdutil.cpp @@ -15,8 +15,8 @@ */ #include <hardware_legacy/IMountService.h> -#include <utils/BpBinder.h> -#include <utils/IServiceManager.h> +#include <binder/BpBinder.h> +#include <binder/IServiceManager.h> #include <stdio.h> #include <stdlib.h> diff --git a/media/tests/MediaFrameworkTest/AndroidManifest.xml b/media/tests/MediaFrameworkTest/AndroidManifest.xml index 690822013a68..246f9fc528ab 100644 --- a/media/tests/MediaFrameworkTest/AndroidManifest.xml +++ b/media/tests/MediaFrameworkTest/AndroidManifest.xml @@ -19,6 +19,7 @@ <uses-permission android:name="android.permission.RECORD_AUDIO" /> <uses-permission android:name="android.permission.CAMERA" /> + <uses-permission android:name="android.permission.INTERNET" /> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> <application> <uses-library android:name="android.test.runner" /> diff --git a/media/tests/MediaFrameworkTest/res/drawable-hdpi/icon.png b/media/tests/MediaFrameworkTest/res/drawable-hdpi/icon.png Binary files differnew file mode 100644 index 000000000000..a02138ef0b9f --- /dev/null +++ b/media/tests/MediaFrameworkTest/res/drawable-hdpi/icon.png diff --git a/media/tests/MediaFrameworkTest/res/drawable/icon.png b/media/tests/MediaFrameworkTest/res/drawable-mdpi/icon.png Binary files differindex 64e3601c234d..64e3601c234d 100644 --- a/media/tests/MediaFrameworkTest/res/drawable/icon.png +++ b/media/tests/MediaFrameworkTest/res/drawable-mdpi/icon.png diff --git a/media/tests/MediaFrameworkTest/res/layout/surface_view.xml b/media/tests/MediaFrameworkTest/res/layout/surface_view.xml index c25e4760744d..cbd1ff811751 100644 --- a/media/tests/MediaFrameworkTest/res/layout/surface_view.xml +++ b/media/tests/MediaFrameworkTest/res/layout/surface_view.xml @@ -21,13 +21,12 @@ <FrameLayout android:layout_width="fill_parent" - android:layout_height="0px" - android:layout_weight="1"> + android:layout_height="fill_parent"> <SurfaceView android:id="@+id/surface_view" - android:layout_width="320dip" - android:layout_height="240dip" + android:layout_width="fill_parent" + android:layout_height="fill_parent" android:layout_centerInParent="true" /> diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTest.java index e65cf41975fe..9fb49b12a5bb 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTest.java @@ -42,6 +42,7 @@ import com.android.mediaframeworktest.MediaNames; import java.io.File; import java.io.FileDescriptor; +import java.net.InetAddress; public class MediaFrameworkTest extends Activity { @@ -69,10 +70,6 @@ public class MediaFrameworkTest extends Activity { setContentView(R.layout.surface_view); mSurfaceView = (SurfaceView)findViewById(R.id.surface_view); ViewGroup.LayoutParams lp = mSurfaceView.getLayoutParams(); - lp.width = 320; - lp.height = 240; - mSurfaceView.setLayoutParams(lp); - mSurfaceView.getHolder().setFixedSize(320, 240); mSurfaceView.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); //Get the midi fd @@ -146,4 +143,9 @@ public class MediaFrameworkTest extends Activity { return super.onKeyDown(keyCode, event); } + + public static boolean checkStreamingServer() throws Exception { + InetAddress address = InetAddress.getByAddress(MediaNames.STREAM_SERVER); + return address.isReachable(10000); + } } diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java index 6edc2cc0d61b..2a4e9a04af62 100755 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkTestRunner.java @@ -23,6 +23,7 @@ import com.android.mediaframeworktest.functional.MediaMimeTest; import com.android.mediaframeworktest.functional.MediaPlayerApiTest; import com.android.mediaframeworktest.functional.MediaRecorderTest; import com.android.mediaframeworktest.functional.SimTonesTest; +import com.android.mediaframeworktest.functional.MediaPlayerInvokeTest; import junit.framework.TestSuite; @@ -32,7 +33,7 @@ import android.test.InstrumentationTestSuite; /** * Instrumentation Test Runner for all MediaPlayer tests. - * + * * Running all tests: * * adb shell am instrument \ @@ -52,6 +53,7 @@ public class MediaFrameworkTestRunner extends InstrumentationTestRunner { suite.addTestSuite(MediaRecorderTest.class); suite.addTestSuite(MediaAudioTrackTest.class); suite.addTestSuite(MediaMimeTest.class); + suite.addTestSuite(MediaPlayerInvokeTest.class); return suite; } @@ -60,5 +62,3 @@ public class MediaFrameworkTestRunner extends InstrumentationTestRunner { return MediaFrameworkTestRunner.class.getClassLoader(); } } - - diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java index 81d59da5c492..a203adc71c31 100755 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaFrameworkUnitTestRunner.java @@ -24,16 +24,16 @@ import junit.framework.TestSuite; /** * Instrumentation Test Runner for all media framework unit tests. - * + * * Make sure that MediaFrameworkUnitTestRunner has been added to * AndroidManifest.xml file, and then "make -j4 mediaframeworktest; adb sync" * to build and upload mediaframeworktest to the phone or emulator. - * + * * Example on running all unit tests for a single class: * adb shell am instrument -e class \ - * com.android.mediaframeworktest.unit.MediaMetadataRetrieverUnitTest \ + * com.android.mediaframeworktest.unit.MediaMetadataRetrieverUnitTest \ * -w com.android.mediaframeworktest/.MediaFrameworkUnitTestRunner - * + * * Example on running all unit tests for the media framework: * adb shell am instrument \ * -w com.android.mediaframeworktest/.MediaFrameworkUnitTestRunner @@ -54,12 +54,12 @@ public class MediaFrameworkUnitTestRunner extends InstrumentationTestRunner { public ClassLoader getLoader() { return MediaFrameworkUnitTestRunner.class.getClassLoader(); } - + // Running all unit tests checking the state machine may be time-consuming. private void addMediaMetadataRetrieverStateUnitTests(TestSuite suite) { suite.addTestSuite(MediaMetadataRetrieverTest.class); } - + // Running all unit tests checking the state machine may be time-consuming. private void addMediaRecorderStateUnitTests(TestSuite suite) { suite.addTestSuite(MediaRecorderPrepareStateUnitTest.class); @@ -87,5 +87,6 @@ public class MediaFrameworkUnitTestRunner extends InstrumentationTestRunner { suite.addTestSuite(MediaPlayerSetLoopingStateUnitTest.class); suite.addTestSuite(MediaPlayerSetAudioStreamTypeStateUnitTest.class); suite.addTestSuite(MediaPlayerSetVolumeStateUnitTest.class); + suite.addTestSuite(MediaPlayerMetadataParserTest.class); } } diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaNames.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaNames.java index e76967d921e6..5127255d8779 100755 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaNames.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaNames.java @@ -372,81 +372,81 @@ public class MediaNames { public static final String META_DATA_MP3 [][] = { {"/sdcard/media_api/metaDataTestMedias/MP3/ID3V1_ID3V2.mp3", "1/10", "ID3V2.3 Album", "ID3V2.3 Artist", "ID3V2.3 Lyricist", "ID3V2.3 Composer", null, "Blues", - "ID3V2.3 Title", "1234", "295", "1"}, + "ID3V2.3 Title", "1234", "295", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/ID3V2.mp3", "1/10", "ID3V2.3 Album", "ID3V2.3 Artist", "ID3V2.3 Lyricist", "ID3V2.3 Composer", null, "Blues", - "ID3V2.3 Title", "1234", "287", "1"}, + "ID3V2.3 Title", "1234", "287", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/ID3V1.mp3", "1", "test ID3V1 Album", "test ID3V1 Artist", - null, null, null, "255", "test ID3V1 Title", "1234", "231332", "1"}, + null, null, null, "255", "test ID3V1 Title", "1234", "231332", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/Corrupted_ID3V1.mp3" , null, null, null, - null, null, null, null, null, null, "231330", "1"}, + null, null, null, null, null, null, "231330", "1", null}, //The corrupted TALB field in id3v2 would not switch to id3v1 tag automatically {"/sdcard/media_api/metaDataTestMedias/MP3/Corrupted_ID3V2_TALB.mp3", "01", null, "ID3V2.3 Artist", "ID3V2.3 Lyricist", "ID3V2.3 Composer", null, - "Blues", "ID3V2.3 Title", "1234", "295", "1"}, + "Blues", "ID3V2.3 Title", "1234", "295", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/Corrupted_ID3V2_TCOM.mp3", "01", "ID3V2.3 Album", "ID3V2.3 Artist", "ID3V2.3 Lyricist", null, null, - "Blues", "ID3V2.3 Title", "1234", "295", "1"}, + "Blues", "ID3V2.3 Title", "1234", "295", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/Corrupted_ID3V2_TCOM_2.mp3", "01", "ID3V2.3 Album", - "ID3V2.3 Artist", null, null, null, "Blues", "ID3V2.3 Title", "1234", "295", "1"}, + "ID3V2.3 Artist", null, null, null, "Blues", "ID3V2.3 Title", "1234", "295", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/Corrupted_ID3V2_TRCK.mp3", "dd", "ID3V2.3 Album", "ID3V2.3 Artist", "ID3V2.3 Lyricist", "ID3V2.3 Composer", null, - "Blues", "ID3V2.3 Title", "1234", "295", "1"}, + "Blues", "ID3V2.3 Title", "1234", "295", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/Corrupted_ID3V2_TRCK_2.mp3", "01", "ID3V2.3 Album", - "ID3V2.3 Artist", null, null, null, "255", "ID3V2.3 Title", "1234", "295", "1"}, + "ID3V2.3 Artist", null, null, null, "255", "ID3V2.3 Title", "1234", "295", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/Corrupted_ID3V2_TYER.mp3", "01", "ID3V2.3 Album", - "ID3V2.3 Artist", null, null, null, null, "ID3V2.3 Title", "9999", "295", "1"}, + "ID3V2.3 Artist", null, null, null, null, "ID3V2.3 Title", "9999", "295", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/Corrupted_ID3V2_TYER_2.mp3", "01", "ID3V2.3 Album", "ID3V2.3 Artist", "ID3V2.3 Lyricist", "ID3V2.3 Composer", null, - "Blues", "ID3V2.3 Title", null, "295", "1"}, + "Blues", "ID3V2.3 Title", null, "295", "1", null}, {"/sdcard/media_api/metaDataTestMedias/MP3/Corrupted_ID3V2_TIT.mp3", null, null, null, - null, null, null, null, null, null, "295", "1"} + null, null, null, null, null, null, "295", "1", null} }; public static final String META_DATA_OTHERS [][] = { {"/sdcard/media_api/metaDataTestMedias/3GP/cat.3gp", null, null, null, null, null, "20080309T002415.000Z", null, - null, null, "1404928", "2"}, + null, null, "63916", "2", null}, {"/sdcard/media_api/metaDataTestMedias/AMR/AMR_NB.amr", null, null, null, null, null, null, null, - null, null, "126540", "1"}, + null, null, "126540", "1", null}, {"/sdcard/media_api/metaDataTestMedias/AMRWB/AMR_WB.amr", null, null, null, null, null, null, null, - null, null, "231180", "1"}, - {"/sdcard/media_api/metaDataTestMedias/M4A/Jaws Of Life_ver1.m4a", null, "Suspended Animation", + null, null, "231180", "1", null}, + {"/sdcard/media_api/metaDataTestMedias/M4A/Jaws Of Life_ver1.m4a", "1/8", "Suspended Animation", "John Petrucci", null, null, "20070510T125223.000Z", - null, null, "2005", "231180", "1"}, + "13", "Jaws Of Life", "2005", "449329", "1", "m4a composer"}, {"/sdcard/media_api/metaDataTestMedias/M4V/sample_iPod.m4v", null, null, null, null, null, "20051220T202015.000Z", - null, null, null, "3771392", "2"}, + null, null, null, "85500", "2", null}, {"/sdcard/media_api/metaDataTestMedias/MIDI/MIDI.mid", null, "Suspended Animation", "John Petrucci", null, null, "20070510T125223.000Z", - null, null, "2005", "231180", "1"}, - {"/sdcard/media_api/metaDataTestMedias/MP4/kung_fu_panda_h264.mp4", null, "mp4 album Kung Fu Panda", + null, null, "2005", "231180", "1", null}, + {"/sdcard/media_api/metaDataTestMedias/MP4/kung_fu_panda_h264.mp4", "2/0", "mp4 album Kung Fu Panda", "mp4 artist Kung Fu Panda", null, null, "20080517T091451.000Z", - "41", "Kung Fu Panda", "2008", "5667840", "2"}, + "41", "Kung Fu Panda", "2008", "128521", "2", "mp4 composer"}, {"/sdcard/media_api/metaDataTestMedias/OGG/Ring_Classic_02.ogg", null, "Suspended Animation", "John Petrucci", null, null, "20070510T125223.000Z", - null, null, "2005", "231180", "1"}, + null, null, "2005", "231180", "1", null}, {"/sdcard/media_api/metaDataTestMedias/OGG/When You Say Nothing At All.ogg", null, "Suspended Animation", "John Petrucci", - null, null, "20070510T125223.000Z", null, null, "2005", "231180", "1"}, + null, null, "20070510T125223.000Z", null, null, "2005", "231180", "1", null}, {"/sdcard/media_api/metaDataTestMedias/WAV/Im With You.wav", null, null, null, null, null, null, - null, null, null, "224000", "1"}, + null, null, null, "224000", "1", null}, {"/sdcard/media_api/metaDataTestMedias/WMA/WMA9.wma", "6", "Ten Songs in the Key of Betrayal", "Alien Crime Syndicate", "Alien Crime Syndicate", "wma 9 Composer", "20040521T175729.483Z", - "Rock", "Run for the Money", "2004", "134479", "1"}, + "Rock", "Run for the Money", "2004", "134479", "1", null}, {"/sdcard/media_api/metaDataTestMedias/WMA/WMA10.wma", "09", "wma 10 Album", "wma 10 Album Artist", "wma 10 Artist", "wma 10 Composer", "20070705T063625.097Z", - "Acid Jazz", "wma 10 Title", "2010", "126574", "1"}, + "Acid Jazz", "wma 10 Title", "2010", "126574", "1", null}, {"/sdcard/media_api/metaDataTestMedias/WMV/bugs.wmv", "8", "wmv 9 Album", null, "wmv 9 Artist ", null, "20051122T155247.540Z", - null, "Looney Tunes - Hare-Breadth Hurry", "2005", "193482", "2"}, + null, "Looney Tunes - Hare-Breadth Hurry", "2005", "193482", "2", null}, {"/sdcard/media_api/metaDataTestMedias/WMV/clips_ver7.wmv", "50", "wmv 7 Album", null, "Hallau Shoots & Company", null, "20020226T170045.891Z", - null, "CODEC Shootout", "1986", "43709", "2"} + null, "CODEC Shootout", "1986", "43709", "2", null} }; //output recorded video @@ -489,18 +489,20 @@ public class MediaNames { }; //Streaming test files + public static final byte [] STREAM_SERVER = new byte[] {(byte)75,(byte)17,(byte)48,(byte)204}; public static final String STREAM_H264_480_360_1411k = - "http://sridharg.googlejunta.com/yslau/stress_media/h264_regular.mp4"; + "http://75.17.48.204:10088/yslau/stress_media/h264_regular.mp4"; public static final String STREAM_WMV = - "http://sridharg.googlejunta.com/yslau/stress_media/bugs.wmv"; + "http://75.17.48.204:10088/yslau/stress_media/bugs.wmv"; public static final String STREAM_H263_176x144_325k = - "http://sridharg.googlejunta.com/yslau/stress_media/h263_regular.3gp"; + "http://75.17.48.204:10088/yslau/stress_media/h263_regular.3gp"; public static final String STREAM_H264_352x288_1536k = - "http://sridharg.googlejunta.com/yslau/stress_media/h264_highBitRate.mp4"; + "http://75.17.48.204:10088/yslau/stress_media/h264_highBitRate.mp4"; public static final String STREAM_MP3= - "http://sridharg.googlejunta.com/yslau/stress_media/mp3_regular.mp3"; + "http://75.17.48.204:10088/yslau/stress_media/mp3_regular.mp3"; public static final String STREAM_MPEG4_QVGA_128k = - "http://sridharg.googlejunta.com/yslau/stress_media/mpeg4_qvga_24fps.3gp"; + "http://75.17.48.204:10088/yslau/stress_media/mpeg4_qvga_24fps.3gp"; public static final int STREAM_H264_480_360_1411k_DURATION = 46000; public static final int VIDEO_H263_AAC_DURATION = 501000; + public static final int VIDEO_H263_AMR_DURATION = 502000; } diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaProfileReader.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaProfileReader.java new file mode 100644 index 000000000000..717f7bac5c0d --- /dev/null +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/MediaProfileReader.java @@ -0,0 +1,110 @@ +package com.android.mediaframeworktest; + +import android.media.MediaRecorder; +import android.os.SystemProperties; +import java.util.HashMap; + +public class MediaProfileReader { + + public static final HashMap<String, Integer> + OUTPUT_FORMAT_TABLE = new HashMap<String, Integer>(); + public static String MEDIA_ENC_VID = "ro.media.enc.vid."; + public static String MEDIA_AUD_VID = "ro.media.enc.aud."; + public static String[] VIDEO_ENCODER_PROPERTY = {".width", ".height", ".bps", ".fps",}; + public static String[] AUDIO_ENCODER_PROPERTY = {".bps", ".hz", ".ch",}; + + public static String getVideoCodecProperty() { + String s; + s = SystemProperties.get("ro.media.enc.vid.codec"); + return s; + } + + public static String getAudioCodecProperty() { + String s; + s = SystemProperties.get("ro.media.enc.aud.codec"); + return s; + } + + public static String getDeviceType() { + // push all the property into one big table + String s; + s = SystemProperties.get("ro.product.name"); + return s; + } + + public static boolean getWMAEnable() { + // push all the property into one big table + int wmaEnable = 1; + wmaEnable = SystemProperties.getInt("ro.media.dec.aud.wma.enabled", + wmaEnable); + if (wmaEnable == 1) { + return true; + } else { + return false; + } + } + + public static boolean getWMVEnable(){ + int wmvEnable = 1; + wmvEnable = SystemProperties.getInt("ro.media.dec.vid.wmv.enabled", + wmvEnable); + if (wmvEnable == 1) { + return true; + } else { + return false; + } + } + + public static void createVideoProfileTable() { + // push all the property into one big table + String encoderType = getVideoCodecProperty(); + if (encoderType.length() != 0) { + String encoder[] = encoderType.split(","); + for (int i = 0; i < encoder.length; i++) { + for (int j = 0; j < VIDEO_ENCODER_PROPERTY.length; j++) { + String propertyName = MEDIA_ENC_VID + encoder[i] + VIDEO_ENCODER_PROPERTY[j]; + String prop = SystemProperties.get(propertyName); + // push to the table + String propRange[] = prop.split(","); + OUTPUT_FORMAT_TABLE.put((encoder[i] + VIDEO_ENCODER_PROPERTY[j] + "_low"), + Integer.parseInt(propRange[0])); + OUTPUT_FORMAT_TABLE.put((encoder[i] + VIDEO_ENCODER_PROPERTY[j] + "_high"), + Integer.parseInt(propRange[1])); + } + + } + } + } + + public static void createAudioProfileTable() { + // push all the property into one big table + String audioType = getAudioCodecProperty(); + String encoder[] = audioType.split(","); + if (audioType.length() != 0) { + for (int i = 0; i < encoder.length; i++) { + for (int j = 0; j < AUDIO_ENCODER_PROPERTY.length; j++) { + String propertyName = MEDIA_AUD_VID + encoder[i] + AUDIO_ENCODER_PROPERTY[j]; + String prop = SystemProperties.get(propertyName); + // push to the table + String propRange[] = prop.split(","); + OUTPUT_FORMAT_TABLE.put((encoder[i] + AUDIO_ENCODER_PROPERTY[j] + "_low"), + Integer.parseInt(propRange[0])); + OUTPUT_FORMAT_TABLE.put((encoder[i] + AUDIO_ENCODER_PROPERTY[j] + "_high"), + Integer.parseInt(propRange[1])); + } + } + } + } + + public static void createEncoderTable(){ + OUTPUT_FORMAT_TABLE.put("h263", MediaRecorder.VideoEncoder.H263); + OUTPUT_FORMAT_TABLE.put("h264", MediaRecorder.VideoEncoder.H264); + OUTPUT_FORMAT_TABLE.put("m4v", MediaRecorder.VideoEncoder.MPEG_4_SP); + OUTPUT_FORMAT_TABLE.put("amrnb", MediaRecorder.AudioEncoder.AMR_NB); + OUTPUT_FORMAT_TABLE.put("amrwb", MediaRecorder.AudioEncoder.AMR_WB); + OUTPUT_FORMAT_TABLE.put("aac", MediaRecorder.AudioEncoder.AAC); + OUTPUT_FORMAT_TABLE.put("aacplus", MediaRecorder.AudioEncoder.AAC_PLUS); + OUTPUT_FORMAT_TABLE.put("eaacplus", + MediaRecorder.AudioEncoder.EAAC_PLUS); + } +} diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CameraTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CameraTest.java index fa0986ab68d8..e66e560cc506 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CameraTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/CameraTest.java @@ -226,7 +226,8 @@ public class CameraTest extends ActivityInstrumentationTestCase<MediaFrameworkTe * Test case 1: Take a picture and verify all the callback * functions are called properly. */ - @LargeTest + // TODO: add this back to LargeTest once bug 2141755 is fixed + // @LargeTest public void testTakePicture() throws Exception { synchronized (lock) { initializeMessageLooper(); diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java index aefedc3bff41..cea3a5a797a5 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaAudioTrackTest.java @@ -140,7 +140,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF TestResults res = constructorTestMultiSampleRate( AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM, - AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, + AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, AudioTrack.STATE_INITIALIZED); assertTrue("testConstructorMono16MusicStream: " + res.mResultLog, res.mResult); @@ -153,7 +153,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF TestResults res = constructorTestMultiSampleRate( AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM, - AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, + AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, AudioTrack.STATE_INITIALIZED); assertTrue("testConstructorStereo16MusicStream: " + res.mResultLog, res.mResult); @@ -166,7 +166,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF TestResults res = constructorTestMultiSampleRate( AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC, - AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, + AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, AudioTrack.STATE_NO_STATIC_DATA); assertTrue("testConstructorMono16MusicStatic: " + res.mResultLog, res.mResult); @@ -179,7 +179,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF TestResults res = constructorTestMultiSampleRate( AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC, - AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, + AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, AudioTrack.STATE_NO_STATIC_DATA); assertTrue("testConstructorStereo16MusicStatic: " + res.mResultLog, res.mResult); @@ -196,7 +196,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF TestResults res = constructorTestMultiSampleRate( AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM, - AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_8BIT, + AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_8BIT, AudioTrack.STATE_INITIALIZED); assertTrue("testConstructorMono8MusicStream: " + res.mResultLog, res.mResult); @@ -208,7 +208,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF TestResults res = constructorTestMultiSampleRate( AudioManager.STREAM_MUSIC, AudioTrack.MODE_STREAM, - AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_8BIT, + AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_8BIT, AudioTrack.STATE_INITIALIZED); assertTrue("testConstructorStereo8MusicStream: " + res.mResultLog, res.mResult); @@ -220,7 +220,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF TestResults res = constructorTestMultiSampleRate( AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC, - AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_8BIT, + AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_8BIT, AudioTrack.STATE_NO_STATIC_DATA); assertTrue("testConstructorMono8MusicStatic: " + res.mResultLog, res.mResult); @@ -232,7 +232,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF TestResults res = constructorTestMultiSampleRate( AudioManager.STREAM_MUSIC, AudioTrack.MODE_STATIC, - AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_8BIT, + AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_8BIT, AudioTrack.STATE_NO_STATIC_DATA); assertTrue("testConstructorStereo8MusicStatic: " + res.mResultLog, res.mResult); @@ -248,15 +248,15 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF public void testConstructorStreamType() throws Exception { // constants for test final int TYPE_TEST_SR = 22050; - final int TYPE_TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TYPE_TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TYPE_TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TYPE_TEST_MODE = AudioTrack.MODE_STREAM; final int[] STREAM_TYPES = { AudioManager.STREAM_ALARM, AudioManager.STREAM_BLUETOOTH_SCO, AudioManager.STREAM_MUSIC, AudioManager.STREAM_NOTIFICATION, AudioManager.STREAM_RING, AudioManager.STREAM_SYSTEM, - AudioManager.STREAM_VOICE_CALL }; + AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_DTMF, }; final String[] STREAM_NAMES = { "STREAM_ALARM", "STREAM_BLUETOOTH_SCO", "STREAM_MUSIC", - "STREAM_NOTIFICATION", "STREAM_RING", "STREAM_SYSTEM", "STREAM_VOICE_CALL" }; + "STREAM_NOTIFICATION", "STREAM_RING", "STREAM_SYSTEM", "STREAM_VOICE_CALL", "STREAM_DTMF" }; boolean localTestRes = true; AudioTrack track = null; @@ -303,7 +303,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testPlaybackHeadPositionAfterInit"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -324,7 +324,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testPlaybackHeadPositionIncrease"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -352,7 +352,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testPlaybackHeadPositionAfterFlush"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -382,7 +382,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testPlaybackHeadPositionAfterStop"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -413,7 +413,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testPlaybackHeadPositionAfterPause"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -448,7 +448,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetStereoVolumeMax"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -474,7 +474,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetStereoVolumeMin"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -500,7 +500,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetStereoVolumeMid"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -526,7 +526,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetPlaybackRate"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -552,7 +552,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetPlaybackRateZero"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -574,7 +574,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetPlaybackRateTwiceOutputSR"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -601,7 +601,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetGetPlaybackRate"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_STEREO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_STEREO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -628,7 +628,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetPlaybackRateUninit"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STATIC; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -655,7 +655,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetPlaybackHeadPositionPlaying"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -682,7 +682,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetPlaybackHeadPositionStopped"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -710,7 +710,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetPlaybackHeadPositionPaused"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -738,7 +738,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetPlaybackHeadPositionTooFar"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -770,7 +770,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetLoopPointsStream"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -794,7 +794,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetLoopPointsStartAfterEnd"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STATIC; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -818,7 +818,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetLoopPointsSuccess"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STATIC; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -842,7 +842,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetLoopPointsLoopTooLong"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STATIC; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -868,7 +868,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetLoopPointsStartTooFar"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STATIC; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -896,7 +896,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testSetLoopPointsEndTooFar"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STATIC; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -929,7 +929,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteByteOffsetTooBig"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -953,7 +953,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteShortOffsetTooBig"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -977,7 +977,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteByteSizeTooBig"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1001,7 +1001,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteShortSizeTooBig"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1025,7 +1025,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteByteNegativeOffset"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1049,7 +1049,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteShortNegativeOffset"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1073,7 +1073,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteByteNegativeSize"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1097,7 +1097,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteShortNegativeSize"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1121,7 +1121,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteByte"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1145,7 +1145,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteShort"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_16BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1169,7 +1169,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteByte8bit"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_8BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1193,7 +1193,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constants for test final String TEST_NAME = "testWriteShort8bit"; final int TEST_SR = 22050; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_8BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1221,7 +1221,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constant for test final String TEST_NAME = "testGetMinBufferSizeTooLowSR"; final int TEST_SR = 3999; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_8BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; @@ -1238,7 +1238,7 @@ public class MediaAudioTrackTest extends ActivityInstrumentationTestCase2<MediaF // constant for testg final String TEST_NAME = "testGetMinBufferSizeTooHighSR"; final int TEST_SR = 48001; - final int TEST_CONF = AudioFormat.CHANNEL_CONFIGURATION_MONO; + final int TEST_CONF = AudioFormat.CHANNEL_OUT_MONO; final int TEST_FORMAT = AudioFormat.ENCODING_PCM_8BIT; final int TEST_MODE = AudioTrack.MODE_STREAM; final int TEST_STREAM_TYPE = AudioManager.STREAM_MUSIC; diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaMetadataTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaMetadataTest.java index 3715913877e4..ca60e8cfa4c1 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaMetadataTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaMetadataTest.java @@ -23,6 +23,7 @@ import android.test.suitebuilder.annotation.Suppress; import android.util.Log; import com.android.mediaframeworktest.MediaNames; +import com.android.mediaframeworktest.MediaProfileReader; /** * This metadata test suite test the basic functionality of the * MediaMetadataRetriever @@ -31,12 +32,12 @@ import com.android.mediaframeworktest.MediaNames; public class MediaMetadataTest extends AndroidTestCase { private static final String TAG = "MediaMetadataTest"; - + public static enum METADATA_EXPECTEDRESULT{ FILE_PATH,CD_TRACK, ALBUM, ARTIST, AUTHOR, COMPOSER, DATE, GENRE, TITLE, - YEAR, DURATION, NUM_TRACKS + YEAR, DURATION, NUM_TRACKS, WRITER } public static enum MP3_TEST_FILE{ @@ -130,8 +131,6 @@ public class MediaMetadataTest extends AndroidTestCase { validateMetatData(non_mp3_test_file.AMRWB.ordinal(), MediaNames.META_DATA_OTHERS); } - //Bug# 1440173 - skip this test case now - @Suppress @MediumTest public static void testM4A1_Metadata() throws Exception { validateMetatData(non_mp3_test_file.M4A1.ordinal(), MediaNames.META_DATA_OTHERS); @@ -195,12 +194,17 @@ public class MediaMetadataTest extends AndroidTestCase { } private static void validateMetatData(int fileIndex, String meta_data_file[][]) { + Log.v(TAG, "filePath = "+ meta_data_file[fileIndex][0]); + if ((meta_data_file[fileIndex][0].endsWith("wma") && !MediaProfileReader.getWMAEnable()) || + (meta_data_file[fileIndex][0].endsWith("wmv") && !MediaProfileReader.getWMVEnable())) { + Log.v(TAG, "Skip test since windows media is not supported"); + return; + } String value = null; MediaMetadataRetriever retriever = new MediaMetadataRetriever(); retriever.setMode(MediaMetadataRetriever.MODE_GET_METADATA_ONLY); try { retriever.setDataSource(meta_data_file[fileIndex][0]); - Log.v(TAG, "filePath = "+ meta_data_file[fileIndex][0]); } catch(Exception e) { Log.v(TAG, "Failed: "+meta_data_file[fileIndex][0] + " " + e.toString()); //Set the test case failure whenever it failed to setDataSource @@ -254,6 +258,10 @@ public class MediaMetadataTest extends AndroidTestCase { Log.v(TAG, "Track : "+ value); assertEquals(TAG,meta_data_file[fileIndex][meta.NUM_TRACKS.ordinal()], value); + value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_WRITER); + Log.v(TAG, "Writer : "+ value); + assertEquals(TAG,meta_data_file[fileIndex][meta.WRITER.ordinal()], value); + retriever.release(); } } diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaMimeTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaMimeTest.java index d2809c1a4bea..ddf5e0bfaf0e 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaMimeTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaMimeTest.java @@ -83,7 +83,8 @@ public class MediaMimeTest extends ActivityInstrumentationTestCase2<MediaFramewo assertMediaPlaybackActivityHandles("audio/*"); } - @MediumTest + // TODO: temporarily remove from medium suite because it hangs whole suite + // @MediumTest // Checks the MediaPlaybackActivity handles application/itunes. Some servers // set the Content-type header to application/iTunes (with capital T, but // the download manager downcasts it) for their MP3 podcasts. This is non diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerApiTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerApiTest.java index ea42f53155df..4e30f913c935 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerApiTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerApiTest.java @@ -18,6 +18,7 @@ package com.android.mediaframeworktest.functional; import com.android.mediaframeworktest.MediaFrameworkTest; import com.android.mediaframeworktest.MediaNames; +import com.android.mediaframeworktest.MediaProfileReader; import android.content.Context; import android.test.ActivityInstrumentationTestCase; @@ -35,11 +36,15 @@ import java.io.File; public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFrameworkTest> { private boolean duratoinWithinTolerence = false; private String TAG = "MediaPlayerApiTest"; + private boolean isWMAEnable = false; + private boolean isWMVEnable = false; Context mContext; public MediaPlayerApiTest() { super("com.android.mediaframeworktest", MediaFrameworkTest.class); + isWMAEnable = MediaProfileReader.getWMAEnable(); + isWMVEnable = MediaProfileReader.getWMVEnable(); } protected void setUp() throws Exception { @@ -82,9 +87,11 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @MediumTest public void testWMA9GetDuration() throws Exception { - int duration = CodecTest.getDuration(MediaNames.WMA9); - duratoinWithinTolerence = verifyDuration(duration, MediaNames.WMA9_LENGTH); - assertTrue("WMA9 getDuration", duratoinWithinTolerence); + if (isWMAEnable) { + int duration = CodecTest.getDuration(MediaNames.WMA9); + duratoinWithinTolerence = verifyDuration(duration, MediaNames.WMA9_LENGTH); + assertTrue("WMA9 getDuration", duratoinWithinTolerence); + } } @MediumTest @@ -123,8 +130,10 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @LargeTest public void testWMA9GetCurrentPosition() throws Exception { - boolean currentPosition = CodecTest.getCurrentPosition(MediaNames.WMA9); - assertTrue("WMA9 GetCurrentPosition", currentPosition); + if (isWMAEnable) { + boolean currentPosition = CodecTest.getCurrentPosition(MediaNames.WMA9); + assertTrue("WMA9 GetCurrentPosition", currentPosition); + } } @LargeTest @@ -160,8 +169,10 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @LargeTest public void testWMA9Pause() throws Exception { - boolean isPaused = CodecTest.pause(MediaNames.WMA9); - assertTrue("WMA9 Pause", isPaused); + if (isWMAEnable) { + boolean isPaused = CodecTest.pause(MediaNames.WMA9); + assertTrue("WMA9 Pause", isPaused); + } } @LargeTest @@ -231,8 +242,10 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @LargeTest public void testWMA9SetLooping() throws Exception { - boolean isLoop = CodecTest.setLooping(MediaNames.WMA9); - assertTrue("WMA9 setLooping", isLoop); + if (isWMAEnable) { + boolean isLoop = CodecTest.setLooping(MediaNames.WMA9); + assertTrue("WMA9 setLooping", isLoop); + } } @LargeTest @@ -269,8 +282,10 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @LargeTest public void testWMA9SeekTo() throws Exception { - boolean isLoop = CodecTest.seekTo(MediaNames.WMA9); - assertTrue("WMA9 seekTo", isLoop); + if (isWMAEnable) { + boolean isLoop = CodecTest.seekTo(MediaNames.WMA9); + assertTrue("WMA9 seekTo", isLoop); + } } @LargeTest @@ -309,8 +324,10 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @Suppress @LargeTest public void testWMA9SeekToEnd() throws Exception { - boolean isEnd = CodecTest.seekToEnd(MediaNames.WMA9); - assertTrue("WMA9 seekToEnd", isEnd); + if (isWMAEnable) { + boolean isEnd = CodecTest.seekToEnd(MediaNames.WMA9); + assertTrue("WMA9 seekToEnd", isEnd); + } } @LargeTest @@ -327,8 +344,10 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @LargeTest public void testWAVSeekToEnd() throws Exception { - boolean isEnd = CodecTest.seekToEnd(MediaNames.WAV); - assertTrue("WAV seekToEnd", isEnd); + if (isWMVEnable) { + boolean isEnd = CodecTest.seekToEnd(MediaNames.WAV); + assertTrue("WAV seekToEnd", isEnd); + } } @MediumTest @@ -385,8 +404,12 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @LargeTest public void testVideoWMVSeekTo() throws Exception { - boolean isSeek = CodecTest.videoSeekTo(MediaNames.VIDEO_WMV); - assertTrue("WMV SeekTo", isSeek); + Log.v(TAG, "wmv not enable"); + if (isWMVEnable) { + Log.v(TAG, "wmv enable"); + boolean isSeek = CodecTest.videoSeekTo(MediaNames.VIDEO_WMV); + assertTrue("WMV SeekTo", isSeek); + } } @LargeTest @@ -416,6 +439,7 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @MediumTest public void testPrepareAsyncReset() throws Exception { + assertTrue(MediaFrameworkTest.checkStreamingServer()); boolean isReset = CodecTest.prepareAsyncReset(MediaNames.STREAM_MP3); assertTrue("PrepareAsync Reset", isReset); } @@ -435,12 +459,20 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @LargeTest public void testLocalMp3PrepareAsyncCallback() throws Exception { boolean onPrepareSuccess = - CodecTest.prepareAsyncCallback(MediaNames.VIDEO_H263_AMR, false); + CodecTest.prepareAsyncCallback(MediaNames.MP3CBR, false); assertTrue("LocalMp3prepareAsyncCallback", onPrepareSuccess); } + + @LargeTest + public void testLocalH263AMRPrepareAsyncCallback() throws Exception { + boolean onPrepareSuccess = + CodecTest.prepareAsyncCallback(MediaNames.VIDEO_H263_AMR, false); + assertTrue("testLocalH263AMRPrepareAsyncCallback", onPrepareSuccess); + } @LargeTest public void testStreamPrepareAsyncCallback() throws Exception { + assertTrue(MediaFrameworkTest.checkStreamingServer()); boolean onPrepareSuccess = CodecTest.prepareAsyncCallback(MediaNames.STREAM_H264_480_360_1411k, false); assertTrue("StreamH264PrepareAsyncCallback", onPrepareSuccess); @@ -448,6 +480,7 @@ public class MediaPlayerApiTest extends ActivityInstrumentationTestCase<MediaFra @LargeTest public void testStreamPrepareAsyncCallbackReset() throws Exception { + assertTrue(MediaFrameworkTest.checkStreamingServer()); boolean onPrepareSuccess = CodecTest.prepareAsyncCallback(MediaNames.STREAM_H264_480_360_1411k, true); assertTrue("StreamH264PrepareAsyncCallback", onPrepareSuccess); diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerInvokeTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerInvokeTest.java new file mode 100644 index 000000000000..0b96bae4029c --- /dev/null +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaPlayerInvokeTest.java @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.android.mediaframeworktest.functional; + +import com.android.mediaframeworktest.MediaFrameworkTest; +import com.android.mediaframeworktest.MediaNames; + +import android.test.ActivityInstrumentationTestCase2; +import android.util.Log; +import android.test.suitebuilder.annotation.LargeTest; +import android.test.suitebuilder.annotation.MediumTest; +import android.test.suitebuilder.annotation.Suppress; + +import android.media.MediaPlayer; +import android.os.Parcel; + +import java.util.Calendar; +import java.util.Random; + +// Tests for the invoke method in the MediaPlayer. +public class MediaPlayerInvokeTest extends ActivityInstrumentationTestCase2<MediaFrameworkTest> { + private static final String TAG = "MediaPlayerInvokeTest"; + private MediaPlayer mPlayer; + private Random rnd; + + public MediaPlayerInvokeTest() { + super("com.android.mediaframeworktest", MediaFrameworkTest.class); + rnd = new Random(Calendar.getInstance().getTimeInMillis()); + } + + @Override + protected void setUp() throws Exception { + super.setUp(); + mPlayer = new MediaPlayer(); + } + + @Override + protected void tearDown() throws Exception { + mPlayer.release(); + super.tearDown(); + } + + // Generate a random number, sends it to the ping test player. + @Suppress + @MediumTest + public void testPing() throws Exception { + mPlayer.setDataSource("test:invoke_mock_media_player.so?url=ping"); + + Parcel request = mPlayer.newRequest(); + Parcel reply = Parcel.obtain(); + + int val = rnd.nextInt(); + request.writeInt(val); + assertEquals(0, mPlayer.invoke(request, reply)); + assertEquals(val, reply.readInt()); + } +} diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaRecorderTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaRecorderTest.java index ef0a3b1f1fe1..fdc5970df92f 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaRecorderTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaRecorderTest.java @@ -29,7 +29,7 @@ import android.test.ActivityInstrumentationTestCase; import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView; - +import com.android.mediaframeworktest.MediaProfileReader; import android.test.suitebuilder.annotation.LargeTest; import android.test.suitebuilder.annotation.Suppress; @@ -46,6 +46,9 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase<MediaFram private SurfaceHolder mSurfaceHolder = null; private MediaRecorder mRecorder; + + private int MIN_VIDEO_FPS = 5; + Context mContext; Camera mCamera; @@ -96,7 +99,70 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase<MediaFram } } - + private boolean recordVideoWithPara(String encoder, String audio, String quality){ + boolean recordSuccess = false; + int videoEncoder = MediaProfileReader.OUTPUT_FORMAT_TABLE.get(encoder); + int audioEncoder = MediaProfileReader.OUTPUT_FORMAT_TABLE.get(audio); + int videoWidth = MediaProfileReader.OUTPUT_FORMAT_TABLE.get(encoder + ".width_" + quality); + int videoHeight = + MediaProfileReader.OUTPUT_FORMAT_TABLE.get(encoder + ".height_" + quality); + int videoFps = MediaProfileReader.OUTPUT_FORMAT_TABLE.get(encoder + ".fps_" + quality); + int videoBitrate = MediaProfileReader.OUTPUT_FORMAT_TABLE.get(encoder + ".bps_" + quality); + int audioBitrate = MediaProfileReader.OUTPUT_FORMAT_TABLE.get(audio + ".bps_" + quality); + int audioChannels = MediaProfileReader.OUTPUT_FORMAT_TABLE.get(audio + ".ch_" + quality); + int audioSamplingRate = + MediaProfileReader.OUTPUT_FORMAT_TABLE.get(audio + ".hz_" + quality); + + if (videoFps < MIN_VIDEO_FPS) { + videoFps = MIN_VIDEO_FPS; + } + mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder(); + String filename = ("/sdcard/" + encoder + "_" + audio + "_" + quality + ".3gp"); + try { + Log.v(TAG, "video encoder :" + videoEncoder); + Log.v(TAG, "audio encoder :" + audioEncoder); + Log.v(TAG, "quality : " + quality); + Log.v(TAG, "encoder : " + encoder); + Log.v(TAG, "audio : " + audio); + Log.v(TAG, "videoWidth : " + videoWidth); + Log.v(TAG, "videoHeight : " + videoHeight); + Log.v(TAG, "videoFPS : " + videoFps); + Log.v(TAG, "videobitrate : " + videoBitrate); + Log.v(TAG, "audioBitrate : " + audioBitrate); + Log.v(TAG, "audioChannel : " + audioChannels); + Log.v(TAG, "AudioSampleRate : " + audioSamplingRate); + + MediaRecorder mMediaRecorder = new MediaRecorder(); + mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); + mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA); + mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); + mMediaRecorder.setOutputFile(filename); + mMediaRecorder.setVideoFrameRate(videoFps); + mMediaRecorder.setVideoSize(videoWidth, videoHeight); + mMediaRecorder.setParameters(String.format("video-param-encoding-bitrate=%d", + videoBitrate)); + mMediaRecorder.setParameters(String.format("audio-param-encoding-bitrate=%d", + audioBitrate)); + mMediaRecorder.setParameters(String.format("audio-param-number-of-channels=%d", + audioChannels)); + mMediaRecorder.setParameters(String.format("audio-param-sampling-rate=%d", + audioSamplingRate)); + mMediaRecorder.setVideoEncoder(videoEncoder); + mMediaRecorder.setAudioEncoder(audioEncoder); + mMediaRecorder.setPreviewDisplay(mSurfaceHolder.getSurface()); + mMediaRecorder.prepare(); + mMediaRecorder.start(); + Thread.sleep(MediaNames.RECORDED_TIME); + mMediaRecorder.stop(); + mMediaRecorder.release(); + recordSuccess = validateVideo(filename, videoWidth, videoHeight); + } catch (Exception e) { + Log.v(TAG, e.toString()); + return false; + } + return recordSuccess; + } + private boolean invalidRecordSetting(int frameRate, int width, int height, int videoFormat, int outFormat, String outFile, boolean videoOnly) { try { @@ -148,8 +214,11 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase<MediaFram Log.v(TAG, "before getduration"); mOutputDuration = mediaPlayer.getDuration(); Log.v(TAG, "get video dimension"); - mOutputVideoHeight = CodecTest.videoHeight(outputFilePath); - mOutputVideoWidth = CodecTest.videoWidth(outputFilePath); + Thread.sleep(1000); + mOutputVideoHeight = mediaPlayer.getVideoHeight(); + mOutputVideoWidth = mediaPlayer.getVideoWidth(); + //mOutputVideoHeight = CodecTest.videoHeight(outputFilePath); + //mOutputVideoWidth = CodecTest.videoWidth(outputFilePath); mediaPlayer.release(); } catch (Exception e) { Log.v(TAG, e.toString()); @@ -170,7 +239,7 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase<MediaFram validVideo = true; } Log.v(TAG, "width = " + mOutputVideoWidth + " height = " + mOutputVideoHeight + " Duration = " + mOutputDuration); - removeFile(filePath); + //removeFile(filePath); return validVideo; } @@ -255,10 +324,10 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase<MediaFram recordVideo(15, 352, 288, MediaRecorder.VideoEncoder.H263, MediaRecorder.OutputFormat.THREE_GPP, MediaNames.RECORDED_PORTRAIT_H263, true); - videoRecordedResult = - validateVideo(MediaNames.RECORDED_PORTRAIT_H263, 352, 288); mCamera.lock(); mCamera.release(); + videoRecordedResult = + validateVideo(MediaNames.RECORDED_PORTRAIT_H263, 352, 288); } catch (Exception e) { Log.v(TAG, e.toString()); } @@ -348,7 +417,8 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase<MediaFram MediaRecorder.OutputFormat.THREE_GPP, MediaNames.RECORDED_VIDEO_3GP, false); assertTrue("Invalid video Size", isTestInvalidVideoSizeSuccessful); } - + + @Suppress @LargeTest public void testInvalidFrameRate() throws Exception { boolean isTestInvalidFrameRateSuccessful = false; @@ -356,6 +426,43 @@ public class MediaRecorderTest extends ActivityInstrumentationTestCase<MediaFram MediaRecorder.OutputFormat.THREE_GPP, MediaNames.RECORDED_VIDEO_3GP, false); assertTrue("Invalid FrameRate", isTestInvalidFrameRateSuccessful); } - -} + @LargeTest + //test cases for the new codec + public void testDeviceSpecificCodec() throws Exception { + int noOfFailure = 0; + boolean recordSuccess = false; + String deviceType = MediaProfileReader.getDeviceType(); + Log.v(TAG, "deviceType = " + deviceType); + // Test cases are device specified + MediaProfileReader.createVideoProfileTable(); + MediaProfileReader.createAudioProfileTable(); + MediaProfileReader.createEncoderTable(); + String encoderType = MediaProfileReader.getVideoCodecProperty(); + String audioType = MediaProfileReader.getAudioCodecProperty(); + if ((encoderType.length() != 0) || (audioType.length() != 0)) { + String audio[] = audioType.split(","); + String encoder[] = encoderType.split(","); + for (int k = 0; k < 2; k++) { + for (int i = 0; i < encoder.length; i++) { + for (int j = 0; j < audio.length; j++) { + if (k == 0) { + recordSuccess = recordVideoWithPara(encoder[i], audio[j], "high"); + } else { + recordSuccess = recordVideoWithPara(encoder[i], audio[j], "low"); + } + if (!recordSuccess) { + Log.v(TAG, "testDeviceSpecificCodec failed"); + Log.v(TAG, "Encoder = " + encoder[i] + "Audio Encoder = " + audio[j]); + noOfFailure++; + } + // assertTrue((encoder[i] + audio[j]), recordSuccess); + } + } + } + if (noOfFailure != 0) { + assertTrue("testDeviceSpecificCodec", false); + } + } + } +} diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java index 442c35be1a26..6e3f5e34853d 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/performance/MediaPlayerPerformance.java @@ -40,6 +40,7 @@ import java.io.FileWriter; import java.io.BufferedWriter; import android.media.MediaMetadataRetriever; +import com.android.mediaframeworktest.MediaProfileReader; /** * Junit / Instrumentation - performance measurement for media player and @@ -47,7 +48,7 @@ import android.media.MediaMetadataRetriever; */ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<MediaFrameworkTest> { - private String TAG = "MediaFrameworkPerformance"; + private String TAG = "MediaPlayerPerformance"; private SQLiteDatabase mDB; private SurfaceHolder mSurfaceHolder = null; @@ -76,9 +77,11 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<Medi public void createDB() { mDB = SQLiteDatabase.openOrCreateDatabase("/sdcard/perf.db", null); - mDB.execSQL("CREATE TABLE perfdata (_id INTEGER PRIMARY KEY," + + mDB.execSQL("CREATE TABLE IF NOT EXISTS perfdata (_id INTEGER PRIMARY KEY," + "file TEXT," + "setdatatime LONG," + "preparetime LONG," + "playtime LONG" + ");"); + //clean the table before adding new data + mDB.execSQL("DELETE FROM perfdata"); } public void audioPlaybackStartupTime(String[] testFile) { @@ -137,6 +140,10 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<Medi audioPlaybackStartupTime(MediaNames.MP3FILES); audioPlaybackStartupTime(MediaNames.AACFILES); + //close the database after all transactions + if (mDB.isOpen()) { + mDB.close(); + } } public void wmametadatautility(String[] testFile) { @@ -302,6 +309,8 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<Medi } public boolean validateMemoryResult (int startPid, int startMemory, Writer output) throws Exception { + //Wait for 10 seconds to make sure the memory settle. + Thread.sleep(10000); mEndPid = getMediaserverPid(); mEndMemory = getMediaserverVsize(); Log.v(TAG, "End Memory " + mEndMemory); @@ -378,23 +387,24 @@ public class MediaPlayerPerformance extends ActivityInstrumentationTestCase<Medi @LargeTest public void testWMVVideoPlaybackMemoryUsage() throws Exception { boolean memoryResult = false; - mStartPid = getMediaserverPid(); - - File wmvMemoryOut = new File(MEDIA_MEMORY_OUTPUT); - Writer output = new BufferedWriter(new FileWriter(wmvMemoryOut, true)); - output.write("WMV video playback only\n"); - for (int i = 0; i < NUM_STRESS_LOOP; i++) { - mediaStressPlayback(MediaNames.VIDEO_WMV); - if (i == 0) { - mStartMemory = getMediaserverVsize(); - output.write("Start memory : " + mStartMemory + "\n"); + if (MediaProfileReader.getWMVEnable()){ + mStartPid = getMediaserverPid(); + File wmvMemoryOut = new File(MEDIA_MEMORY_OUTPUT); + Writer output = new BufferedWriter(new FileWriter(wmvMemoryOut, true)); + output.write("WMV video playback only\n"); + for (int i = 0; i < NUM_STRESS_LOOP; i++) { + mediaStressPlayback(MediaNames.VIDEO_WMV); + if (i == 0) { + mStartMemory = getMediaserverVsize(); + output.write("Start memory : " + mStartMemory + "\n"); + } + getMemoryWriteToLog(output); } - getMemoryWriteToLog(output); + output.write("\n"); + memoryResult = validateMemoryResult(mStartPid, mStartMemory, output); + output.close(); + assertTrue("wmv playback memory test", memoryResult); } - output.write("\n"); - memoryResult = validateMemoryResult(mStartPid, mStartMemory, output); - output.close(); - assertTrue("wmv playback memory test", memoryResult); } // Test case 4: Capture the memory usage after every 20 video only recorded diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaPlayerStressTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaPlayerStressTest.java index 5e213d704bbe..b476e01a5408 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaPlayerStressTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaPlayerStressTest.java @@ -40,8 +40,8 @@ public class MediaPlayerStressTest extends ActivityInstrumentationTestCase2<Medi private static final int NUMBER_OF_RANDOM_REPOSITION_AND_PLAY = 10; private static final int NUMBER_OF_RANDOM_REPOSITION_AND_PLAY_SHORT = 5; - private static final int NUMBER_OF_STRESS_LOOPS = 1000; - private static final int PLAYBACK_END_TOLERANCE = 5000; + private static final int NUMBER_OF_STRESS_LOOPS = 500; + private static final int PLAYBACK_END_TOLERANCE = 30000; private static final int WAIT_UNTIL_PLAYBACK_FINISH = 515000 ; public MediaPlayerStressTest() { @@ -64,6 +64,7 @@ public class MediaPlayerStressTest extends ActivityInstrumentationTestCase2<Medi mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder(); try { + assertTrue(MediaFrameworkTest.checkStreamingServer()); for (int i = 0; i < NUMBER_OF_STRESS_LOOPS; i++) { MediaPlayer mp = new MediaPlayer(); mp.setDataSource(MediaNames.STREAM_H264_480_360_1411k); @@ -94,9 +95,10 @@ public class MediaPlayerStressTest extends ActivityInstrumentationTestCase2<Medi long randomseed = System.currentTimeMillis(); Random generator = new Random(randomseed); Log.v(TAG, "Random seed: " + randomseed); - int video_duration = MediaNames.VIDEO_H263_AAC_DURATION; + int video_duration = MediaNames.VIDEO_H263_AMR_DURATION; int random_play_time = 0; int random_seek_time = 0; + int random_no_of_seek = 0; mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder(); try { @@ -106,10 +108,16 @@ public class MediaPlayerStressTest extends ActivityInstrumentationTestCase2<Medi mp.setDisplay(MediaFrameworkTest.mSurfaceView.getHolder()); mp.prepare(); mp.start(); - // Random seek and play - for (int j = 0; j < generator.nextInt(10); j++) { + random_no_of_seek = generator.nextInt(10); + // make sure the seek at least run once. + if (random_no_of_seek == 0) { + random_no_of_seek = 1; + } + Log.v(TAG, "random_seek = " + random_no_of_seek); + // Play for 10 seconds then random seekTo + for (int j = 0; j < random_no_of_seek; j++) { random_play_time = - generator.nextInt(video_duration / 2); + generator.nextInt(video_duration / 100); Log.v(TAG, "Play time = " + random_play_time); Thread.sleep(random_play_time); random_seek_time = @@ -117,12 +125,13 @@ public class MediaPlayerStressTest extends ActivityInstrumentationTestCase2<Medi Log.v(TAG, "Seek time = " + random_seek_time); mp.seekTo(random_seek_time); } - //wait until the movie finish and check the current position - //Make sure the wait time is long enough - long wait_until_playback_finish = video_duration - random_seek_time + PLAYBACK_END_TOLERANCE * 2; - Thread.sleep(wait_until_playback_finish); + //Seek to 10s from the end of the video + mp.seekTo(video_duration - 10000); + //After reposition, play 30 seconds the video should be finished. + Thread.sleep(PLAYBACK_END_TOLERANCE); Log.v(TAG, "CurrentPosition = " + mp.getCurrentPosition()); - if ( mp.isPlaying() || mp.getCurrentPosition() > (video_duration + PLAYBACK_END_TOLERANCE)){ + if ( mp.isPlaying() || mp.getCurrentPosition() + > (video_duration)){ assertTrue("Current PlayTime greater than duration", false); } mp.release(); diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaRecorderStressTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaRecorderStressTest.java index 69e93a1bc164..b6a1bfab6c8b 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaRecorderStressTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/stress/MediaRecorderStressTest.java @@ -16,6 +16,7 @@ package com.android.mediaframeworktest.stress; + import com.android.mediaframeworktest.MediaFrameworkTest; import java.io.BufferedWriter; @@ -26,6 +27,7 @@ import java.io.Writer; import android.hardware.Camera; import android.media.MediaPlayer; import android.media.MediaRecorder; +import android.os.Looper; import android.test.ActivityInstrumentationTestCase2; import android.test.suitebuilder.annotation.LargeTest; import android.util.Log; @@ -47,14 +49,21 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me private static final int NUMBER_OF_RECORDERANDPLAY_STRESS_LOOPS = 50; private static final int NUMBER_OF_SWTICHING_LOOPS_BW_CAMERA_AND_RECORDER = 200; private static final long WAIT_TIME_CAMERA_TEST = 3000; // 3 second - private static final long WAIT_TIME_RECORDER_TEST = 60000; // 6 second - private static final long WAIT_TIME_RECORD = 100000; // 10 seconds - private static final long WAIT_TIME_PLAYBACK = 60000; // 6 second + private static final long WAIT_TIME_RECORDER_TEST = 6000; // 6 second + private static final long WAIT_TIME_RECORD = 10000; // 10 seconds + private static final long WAIT_TIME_PLAYBACK = 6000; // 6 second private static final String OUTPUT_FILE = "/sdcard/temp"; private static final String OUTPUT_FILE_EXT = ".3gp"; private static final String MEDIA_STRESS_OUTPUT = "/sdcard/mediaStressOutput.txt"; - + private Looper mCameraLooper = null; + private Looper mRecorderLooper = null; + private final Object lock = new Object(); + private final Object recorderlock = new Object(); + private static int WAIT_FOR_COMMAND_TO_COMPLETE = 10000; // Milliseconds. + private final CameraErrorCallback mCameraErrorCallback = new CameraErrorCallback(); + private final RecorderErrorCallback mRecorderErrorCallback = new RecorderErrorCallback(); + public MediaRecorderStressTest() { super("com.android.mediaframeworktest", MediaFrameworkTest.class); } @@ -63,41 +72,129 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me getActivity(); super.setUp(); } - + + private final class CameraErrorCallback implements android.hardware.Camera.ErrorCallback { + public void onError(int error, android.hardware.Camera camera) { + if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) { + assertTrue("Camera test mediaserver died", false); + } + } + } + + private final class RecorderErrorCallback implements MediaRecorder.OnErrorListener { + public void onError(MediaRecorder mr, int what, int extra) { + // fail the test case no matter what error come up + assertTrue("mediaRecorder error", false); + } + } + + private void initializeCameraMessageLooper() { + Log.v(TAG, "start looper"); + new Thread() { + @Override + public void run() { + // Set up a looper to be used by camera. + Looper.prepare(); + Log.v(TAG, "start loopRun"); + mCameraLooper = Looper.myLooper(); + mCamera = Camera.open(); + synchronized (lock) { + lock.notify(); + } + Looper.loop(); + Log.v(TAG, "initializeMessageLooper: quit."); + } + }.start(); + } + + private void initializeRecorderMessageLooper() { + Log.v(TAG, "start looper"); + new Thread() { + @Override + public void run() { + Looper.prepare(); + Log.v(TAG, "start loopRun"); + mRecorderLooper = Looper.myLooper(); + mRecorder = new MediaRecorder(); + synchronized (recorderlock) { + recorderlock.notify(); + } + Looper.loop(); // Blocks forever until Looper.quit() is called. + Log.v(TAG, "initializeMessageLooper: quit."); + } + }.start(); + } + + /* + * Terminates the message looper thread. + */ + private void terminateCameraMessageLooper() { + mCameraLooper.quit(); + try { + Thread.sleep(1000); + } catch (Exception e){ + Log.v(TAG, e.toString()); + } + mCamera.release(); + } + + /* + * Terminates the message looper thread. + */ + private void terminateRecorderMessageLooper() { + mRecorderLooper.quit(); + try { + Thread.sleep(1000); + } catch (Exception e){ + Log.v(TAG, e.toString()); + } + mRecorder.release(); + } + //Test case for stressing the camera preview. @LargeTest public void testStressCamera() throws Exception { - SurfaceHolder mSurfaceHolder; + SurfaceHolder mSurfaceHolder; mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder(); File stressOutFile = new File(MEDIA_STRESS_OUTPUT); Writer output = new BufferedWriter(new FileWriter(stressOutFile, true)); output.write("Camera start preview stress:\n"); - output.write("Total number of loops:" + + output.write("Total number of loops:" + NUMBER_OF_CAMERA_STRESS_LOOPS + "\n"); - try { + try { Log.v(TAG, "Start preview"); output.write("No of loop: "); + for (int i = 0; i< NUMBER_OF_CAMERA_STRESS_LOOPS; i++){ - mCamera = Camera.open(); + synchronized (lock) { + initializeCameraMessageLooper(); + try { + lock.wait(WAIT_FOR_COMMAND_TO_COMPLETE); + } catch(Exception e) { + Log.v(TAG, "wait was interrupted."); + } + } + mCamera.setErrorCallback(mCameraErrorCallback); mCamera.setPreviewDisplay(mSurfaceHolder); mCamera.startPreview(); Thread.sleep(WAIT_TIME_CAMERA_TEST); mCamera.stopPreview(); - mCamera.release(); + terminateCameraMessageLooper(); output.write(" ," + i); } } catch (Exception e) { - Log.v(TAG, e.toString()); + assertTrue("CameraStressTest", false); + Log.v(TAG, e.toString()); } output.write("\n\n"); output.close(); } - + //Test case for stressing the camera preview. @LargeTest public void testStressRecorder() throws Exception { String filename; - SurfaceHolder mSurfaceHolder; + SurfaceHolder mSurfaceHolder; mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder(); File stressOutFile = new File(MEDIA_STRESS_OUTPUT); Writer output = new BufferedWriter(new FileWriter(stressOutFile, true)); @@ -108,12 +205,20 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me output.write("No of loop: "); Log.v(TAG, "Start preview"); for (int i = 0; i < NUMBER_OF_RECORDER_STRESS_LOOPS; i++){ + synchronized (recorderlock) { + initializeRecorderMessageLooper(); + try { + recorderlock.wait(WAIT_FOR_COMMAND_TO_COMPLETE); + } catch(Exception e) { + Log.v(TAG, "wait was interrupted."); + } + } Log.v(TAG, "counter = " + i); filename = OUTPUT_FILE + i + OUTPUT_FILE_EXT; Log.v(TAG, filename); - mRecorder = new MediaRecorder(); + mRecorder.setOnErrorListener(mRecorderErrorCallback); mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA); - mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); + mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mRecorder.setOutputFile(filename); mRecorder.setVideoFrameRate(20); mRecorder.setVideoSize(176,144); @@ -125,47 +230,63 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me Log.v(TAG, "prepare"); mRecorder.prepare(); Log.v(TAG, "before release"); - Thread.sleep(WAIT_TIME_RECORDER_TEST); + Thread.sleep(WAIT_TIME_RECORDER_TEST); mRecorder.reset(); - mRecorder.release(); + terminateRecorderMessageLooper(); output.write(", " + i); } } catch (Exception e) { - Log.v(TAG, e.toString()); + assertTrue("Recorder Stress test", false); + Log.v(TAG, e.toString()); } output.write("\n\n"); output.close(); } - - + //Stress test case for switching camera and video recorder preview. @LargeTest public void testStressCameraSwitchRecorder() throws Exception { String filename; - SurfaceHolder mSurfaceHolder; + SurfaceHolder mSurfaceHolder; mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder(); File stressOutFile = new File(MEDIA_STRESS_OUTPUT); Writer output = new BufferedWriter(new FileWriter(stressOutFile, true)); output.write("Camera and video recorder preview switching\n"); output.write("Total number of loops:" + NUMBER_OF_SWTICHING_LOOPS_BW_CAMERA_AND_RECORDER + "\n"); - try { + try { Log.v(TAG, "Start preview"); output.write("No of loop: "); for (int i = 0; i < NUMBER_OF_SWTICHING_LOOPS_BW_CAMERA_AND_RECORDER; i++){ - mCamera = Camera.open(); + synchronized (lock) { + initializeCameraMessageLooper(); + try { + lock.wait(WAIT_FOR_COMMAND_TO_COMPLETE); + } catch(Exception e) { + Log.v(TAG, "wait was interrupted."); + } + } + mCamera.setErrorCallback(mCameraErrorCallback); mCamera.setPreviewDisplay(mSurfaceHolder); mCamera.startPreview(); Thread.sleep(WAIT_TIME_CAMERA_TEST); mCamera.stopPreview(); - mCamera.release(); + terminateCameraMessageLooper(); mCamera = null; Log.v(TAG, "release camera"); filename = OUTPUT_FILE + i + OUTPUT_FILE_EXT; Log.v(TAG, filename); - mRecorder = new MediaRecorder(); + synchronized (recorderlock) { + initializeRecorderMessageLooper(); + try { + recorderlock.wait(WAIT_FOR_COMMAND_TO_COMPLETE); + } catch(Exception e) { + Log.v(TAG, "wait was interrupted."); + } + } + mRecorder.setOnErrorListener(mRecorderErrorCallback); mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA); - mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); + mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mRecorder.setOutputFile(filename); mRecorder.setVideoFrameRate(20); mRecorder.setVideoSize(176,144); @@ -176,23 +297,24 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me Log.v(TAG, "prepare"); mRecorder.prepare(); Log.v(TAG, "before release"); - Thread.sleep(WAIT_TIME_CAMERA_TEST); - mRecorder.release(); + Thread.sleep(WAIT_TIME_CAMERA_TEST); + terminateRecorderMessageLooper(); Log.v(TAG, "release video recorder"); output.write(", " + i); } } catch (Exception e) { + assertTrue("Camer and recorder switch mode", false); Log.v(TAG, e.toString()); } output.write("\n\n"); output.close(); } - + //Stress test case for record a video and play right away. @LargeTest public void testStressRecordVideoAndPlayback() throws Exception { String filename; - SurfaceHolder mSurfaceHolder; + SurfaceHolder mSurfaceHolder; mSurfaceHolder = MediaFrameworkTest.mSurfaceView.getHolder(); File stressOutFile = new File(MEDIA_STRESS_OUTPUT); Writer output = new BufferedWriter(new FileWriter(stressOutFile, true)); @@ -204,10 +326,18 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me for (int i = 0; i < NUMBER_OF_RECORDERANDPLAY_STRESS_LOOPS; i++){ filename = OUTPUT_FILE + i + OUTPUT_FILE_EXT; Log.v(TAG, filename); - mRecorder = new MediaRecorder(); + synchronized (recorderlock) { + initializeRecorderMessageLooper(); + try { + recorderlock.wait(WAIT_FOR_COMMAND_TO_COMPLETE); + } catch(Exception e) { + Log.v(TAG, "wait was interrupted."); + } + } + mRecorder.setOnErrorListener(mRecorderErrorCallback); mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA); mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); - mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); + mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mRecorder.setOutputFile(filename); mRecorder.setVideoFrameRate(20); mRecorder.setVideoSize(352,288); @@ -216,11 +346,11 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me Log.v(TAG, "mediaRecorder setPreview"); mRecorder.setPreviewDisplay(mSurfaceHolder.getSurface()); mRecorder.prepare(); - mRecorder.start(); + mRecorder.start(); Thread.sleep(WAIT_TIME_RECORD); Log.v(TAG, "Before stop"); mRecorder.stop(); - mRecorder.release(); + terminateRecorderMessageLooper(); //start the playback MediaPlayer mp = new MediaPlayer(); mp.setDataSource(filename); @@ -232,10 +362,10 @@ public class MediaRecorderStressTest extends ActivityInstrumentationTestCase2<Me output.write(", " + i); } } catch (Exception e) { + assertTrue("record and playback", false); Log.v(TAG, e.toString()); } output.write("\n\n"); output.close(); - } + } } - diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaMetadataRetrieverTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaMetadataRetrieverTest.java index 61a8a296d318..87500982bd67 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaMetadataRetrieverTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaMetadataRetrieverTest.java @@ -22,6 +22,7 @@ import android.graphics.Bitmap; import java.io.FileOutputStream; import android.test.AndroidTestCase; import com.android.mediaframeworktest.MediaNames; +import com.android.mediaframeworktest.MediaProfileReader; import android.test.suitebuilder.annotation.*; /** @@ -38,10 +39,19 @@ public class MediaMetadataRetrieverTest extends AndroidTestCase { public static void testAlbumArt() throws Exception { Log.v(TAG, "testAlbumArt starts."); MediaMetadataRetriever retriever = new MediaMetadataRetriever(); + MediaProfileReader reader = new MediaProfileReader(); + boolean supportWMA = reader.getWMAEnable(); + boolean supportWMV = reader.getWMVEnable(); retriever.setMode(MediaMetadataRetriever.MODE_GET_METADATA_ONLY); for (int i = 0, n = MediaNames.ALBUMART_TEST_FILES.length; i < n; ++i) { try { Log.v(TAG, "File " + i + ": " + MediaNames.ALBUMART_TEST_FILES[i]); + if ((MediaNames.ALBUMART_TEST_FILES[i].endsWith(".wma") && !supportWMA) || + (MediaNames.ALBUMART_TEST_FILES[i].endsWith(".wmv") && !supportWMV) + ) { + Log.v(TAG, "windows media is not supported and thus we will skip the test for this file"); + continue; + } retriever.setDataSource(MediaNames.ALBUMART_TEST_FILES[i]); byte[] albumArt = retriever.extractAlbumArt(); @@ -61,15 +71,23 @@ public class MediaMetadataRetrieverTest extends AndroidTestCase { } // Test frame capture - // Suppressing until 1259652 is fixed. - @Suppress - public static void disableTestThumbnailCapture() throws Exception { + @LargeTest + public static void testThumbnailCapture() throws Exception { MediaMetadataRetriever retriever = new MediaMetadataRetriever(); + MediaProfileReader reader = new MediaProfileReader(); + boolean supportWMA = reader.getWMAEnable(); + boolean supportWMV = reader.getWMVEnable(); Log.v(TAG, "Thumbnail processing starts"); long startedAt = System.currentTimeMillis(); for(int i = 0, n = MediaNames.THUMBNAIL_CAPTURE_TEST_FILES.length; i < n; ++i) { try { Log.v(TAG, "File " + i + ": " + MediaNames.THUMBNAIL_CAPTURE_TEST_FILES[i]); + if ((MediaNames.THUMBNAIL_CAPTURE_TEST_FILES[i].endsWith(".wma") && !supportWMA) || + (MediaNames.THUMBNAIL_CAPTURE_TEST_FILES[i].endsWith(".wmv") && !supportWMV) + ) { + Log.v(TAG, "windows media is not supported and thus we will skip the test for this file"); + continue; + } retriever.setDataSource(MediaNames.THUMBNAIL_CAPTURE_TEST_FILES[i]); Bitmap bitmap = retriever.captureFrame(); assertTrue(bitmap != null); @@ -92,10 +110,20 @@ public class MediaMetadataRetrieverTest extends AndroidTestCase { @LargeTest public static void testMetadataRetrieval() throws Exception { + MediaProfileReader reader = new MediaProfileReader(); + boolean supportWMA = reader.getWMAEnable(); + boolean supportWMV = reader.getWMVEnable(); MediaMetadataRetriever retriever = new MediaMetadataRetriever(); retriever.setMode(MediaMetadataRetriever.MODE_GET_METADATA_ONLY); for(int i = 0, n = MediaNames.METADATA_RETRIEVAL_TEST_FILES.length; i < n; ++i) { try { + Log.v(TAG, "File " + i + ": " + MediaNames.METADATA_RETRIEVAL_TEST_FILES[i]); + if ((MediaNames.METADATA_RETRIEVAL_TEST_FILES[i].endsWith(".wma") && !supportWMA) || + (MediaNames.METADATA_RETRIEVAL_TEST_FILES[i].endsWith(".wmv") && !supportWMV) + ) { + Log.v(TAG, "windows media is not supported and thus we will skip the test for this file"); + continue; + } retriever.setDataSource(MediaNames.METADATA_RETRIEVAL_TEST_FILES[i]); extractAllSupportedMetadataValues(retriever); } catch(Exception e) { diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaPlayerMetadataParserTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaPlayerMetadataParserTest.java new file mode 100644 index 000000000000..38f598a6abc9 --- /dev/null +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/MediaPlayerMetadataParserTest.java @@ -0,0 +1,432 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.android.mediaframeworktest.unit; +import android.media.Metadata; +import android.os.Parcel; +import android.test.AndroidTestCase; +import android.test.suitebuilder.annotation.SmallTest; +import android.util.Log; + +import java.util.Calendar; +import java.util.Date; + +/* + * Check the Java layer that parses serialized metadata in Parcel + * works as expected. + * + */ + +public class MediaPlayerMetadataParserTest extends AndroidTestCase { + private static final String TAG = "MediaPlayerMetadataTest"; + private static final int kMarker = 0x4d455441; // 'M' 'E' 'T' 'A' + private static final int kHeaderSize = 8; + + private Metadata mMetadata = null; + private Parcel mParcel = null; + + @Override + protected void setUp() throws Exception { + super.setUp(); + mMetadata = new Metadata(); + mParcel = Parcel.obtain(); + + resetParcel(); + } + + // Check parsing of the parcel fails. Make sure the parser rewind + // the parcel properly. + private void assertParseFail() throws Exception { + mParcel.setDataPosition(0); + assertFalse(mMetadata.parse(mParcel)); + assertEquals(0, mParcel.dataPosition()); + } + + // Check parsing of the parcel is successful. + private void assertParse() throws Exception { + mParcel.setDataPosition(0); + assertTrue(mMetadata.parse(mParcel)); + } + + // Write the number of bytes from the start of the parcel to the + // current position at the beginning of the parcel (offset 0). + private void adjustSize() { + adjustSize(0); + } + + // Write the number of bytes from the offset to the current + // position at position pointed by offset. + private void adjustSize(int offset) { + final int pos = mParcel.dataPosition(); + + mParcel.setDataPosition(offset); + mParcel.writeInt(pos - offset); + mParcel.setDataPosition(pos); + } + + // Rewind the parcel and insert the header. + private void resetParcel() { + mParcel.setDataPosition(0); + // Most tests will use a properly formed parcel with a size + // and the meta marker so we add them by default. + mParcel.writeInt(-1); // Placeholder for the size + mParcel.writeInt(kMarker); + } + + // ---------------------------------------------------------------------- + // START OF THE TESTS + + + // There should be at least 8 bytes in the parcel, 4 for the size + // and 4 for the 'M' 'E' 'T' 'A' marker. + @SmallTest + public void testMissingSizeAndMarker() throws Exception { + for (int i = 0; i < kHeaderSize; ++i) { + mParcel.setDataPosition(0); + mParcel.setDataSize(i); + + assertEquals(i, mParcel.dataAvail()); + assertParseFail(); + } + } + + // There should be at least 'size' bytes in the parcel. + @SmallTest + public void testMissingData() throws Exception { + final int size = 20; + + mParcel.writeInt(size); + mParcel.setDataSize(size - 1); + assertParseFail(); + } + + // Empty parcel is fine + @SmallTest + public void testEmptyIsOk() throws Exception { + adjustSize(); + assertParse(); + } + + // ---------------------------------------------------------------------- + // RECORDS + // ---------------------------------------------------------------------- + + // A record header should be at least 12 bytes long + @SmallTest + public void testRecordMissingId() throws Exception { + mParcel.writeInt(13); // record length + // misses metadata id and metadata type. + adjustSize(); + assertParseFail(); + } + + @SmallTest + public void testRecordMissingType() throws Exception { + mParcel.writeInt(13); // record length lies + mParcel.writeInt(Metadata.TITLE); + // misses metadata type + adjustSize(); + assertParseFail(); + } + + @SmallTest + public void testRecordWithZeroPayload() throws Exception { + mParcel.writeInt(0); + adjustSize(); + assertParseFail(); + } + + // A record cannot be empty. + @SmallTest + public void testRecordMissingPayload() throws Exception { + mParcel.writeInt(12); + mParcel.writeInt(Metadata.TITLE); + mParcel.writeInt(Metadata.STRING_VAL); + // misses payload + adjustSize(); + assertParseFail(); + } + + // Check records can be found. + @SmallTest + public void testRecordsFound() throws Exception { + writeStringRecord(Metadata.TITLE, "a title"); + writeStringRecord(Metadata.GENRE, "comedy"); + writeStringRecord(Metadata.firstCustomId(), "custom"); + adjustSize(); + assertParse(); + assertTrue(mMetadata.has(Metadata.TITLE)); + assertTrue(mMetadata.has(Metadata.GENRE)); + assertTrue(mMetadata.has(Metadata.firstCustomId())); + assertFalse(mMetadata.has(Metadata.DRM_CRIPPLED)); + assertEquals(3, mMetadata.keySet().size()); + } + + // Detects bad metadata type + @SmallTest + public void testBadMetadataType() throws Exception { + final int start = mParcel.dataPosition(); + mParcel.writeInt(-1); // Placeholder for the length + mParcel.writeInt(Metadata.TITLE); + mParcel.writeInt(0); // Invalid type. + mParcel.writeString("dummy"); + adjustSize(start); + + adjustSize(); + assertParseFail(); + } + + // Check a Metadata instance can be reused, i.e the parse method + // wipes out the existing states/keys. + @SmallTest + public void testParseClearState() throws Exception { + writeStringRecord(Metadata.TITLE, "a title"); + writeStringRecord(Metadata.GENRE, "comedy"); + writeStringRecord(Metadata.firstCustomId(), "custom"); + adjustSize(); + assertParse(); + + resetParcel(); + writeStringRecord(Metadata.MIME_TYPE, "audio/mpg"); + adjustSize(); + assertParse(); + + // Only the mime type metadata should be present. + assertEquals(1, mMetadata.keySet().size()); + assertTrue(mMetadata.has(Metadata.MIME_TYPE)); + + assertFalse(mMetadata.has(Metadata.TITLE)); + assertFalse(mMetadata.has(Metadata.GENRE)); + assertFalse(mMetadata.has(Metadata.firstCustomId())); + } + + // ---------------------------------------------------------------------- + // GETTERS + // ---------------------------------------------------------------------- + + // getString + @SmallTest + public void testGetString() throws Exception { + writeStringRecord(Metadata.TITLE, "a title"); + writeStringRecord(Metadata.GENRE, "comedy"); + adjustSize(); + assertParse(); + + assertEquals("a title", mMetadata.getString(Metadata.TITLE)); + assertEquals("comedy", mMetadata.getString(Metadata.GENRE)); + } + + // get an empty string. + @SmallTest + public void testGetEmptyString() throws Exception { + writeStringRecord(Metadata.TITLE, ""); + adjustSize(); + assertParse(); + + assertEquals("", mMetadata.getString(Metadata.TITLE)); + } + + // get a string when a NULL value was in the parcel + @SmallTest + public void testGetNullString() throws Exception { + writeStringRecord(Metadata.TITLE, null); + adjustSize(); + assertParse(); + + assertEquals(null, mMetadata.getString(Metadata.TITLE)); + } + + // get a string when an integer is actually present + @SmallTest + public void testWrongType() throws Exception { + writeIntRecord(Metadata.DURATION, 5); + adjustSize(); + assertParse(); + + try { + mMetadata.getString(Metadata.DURATION); + } catch (IllegalStateException ise) { + return; + } + fail("Exception was not thrown"); + } + + // getInt + @SmallTest + public void testGetInt() throws Exception { + writeIntRecord(Metadata.CD_TRACK_NUM, 1); + adjustSize(); + assertParse(); + + assertEquals(1, mMetadata.getInt(Metadata.CD_TRACK_NUM)); + } + + // getBoolean + @SmallTest + public void testGetBoolean() throws Exception { + writeBooleanRecord(Metadata.DRM_CRIPPLED, true); + adjustSize(); + assertParse(); + + assertEquals(true, mMetadata.getBoolean(Metadata.DRM_CRIPPLED)); + } + + // getLong + @SmallTest + public void testGetLong() throws Exception { + writeLongRecord(Metadata.DURATION, 1L); + adjustSize(); + assertParse(); + + assertEquals(1L, mMetadata.getLong(Metadata.DURATION)); + } + + // getDouble + @SmallTest + public void testGetDouble() throws Exception { + writeDoubleRecord(Metadata.VIDEO_FRAME_RATE, 29.97); + adjustSize(); + assertParse(); + + assertEquals(29.97, mMetadata.getDouble(Metadata.VIDEO_FRAME_RATE)); + } + + // getByteArray + @SmallTest + public void testGetByteArray() throws Exception { + byte data[] = new byte[]{1,2,3,4,5}; + + writeByteArrayRecord(Metadata.ALBUM_ART, data); + adjustSize(); + assertParse(); + + byte res[] = mMetadata.getByteArray(Metadata.ALBUM_ART); + for (int i = 0; i < data.length; ++i) { + assertEquals(data[i], res[i]); + } + } + + // getDate + @SmallTest + public void testGetDate() throws Exception { + writeDateRecord(Metadata.DATE, 0, "PST"); + adjustSize(); + assertParse(); + + assertEquals(new Date(0), mMetadata.getDate(Metadata.DATE)); + } + + // getTimedText + @SmallTest + public void testGetTimedText() throws Exception { + Date now = Calendar.getInstance().getTime(); + writeTimedTextRecord(Metadata.CAPTION, now.getTime(), + 10, "Some caption"); + adjustSize(); + assertParse(); + + Metadata.TimedText caption = mMetadata.getTimedText(Metadata.CAPTION); + assertEquals("" + now + "-" + 10 + ":Some caption", caption.toString()); + } + + // ---------------------------------------------------------------------- + // HELPERS TO APPEND RECORDS + // ---------------------------------------------------------------------- + + // Insert a string record at the current position. + private void writeStringRecord(int metadataId, String val) { + final int start = mParcel.dataPosition(); + mParcel.writeInt(-1); // Placeholder for the length + mParcel.writeInt(metadataId); + mParcel.writeInt(Metadata.STRING_VAL); + mParcel.writeString(val); + adjustSize(start); + } + + // Insert an int record at the current position. + private void writeIntRecord(int metadataId, int val) { + final int start = mParcel.dataPosition(); + mParcel.writeInt(-1); // Placeholder for the length + mParcel.writeInt(metadataId); + mParcel.writeInt(Metadata.INTEGER_VAL); + mParcel.writeInt(val); + adjustSize(start); + } + + // Insert a boolean record at the current position. + private void writeBooleanRecord(int metadataId, boolean val) { + final int start = mParcel.dataPosition(); + mParcel.writeInt(-1); // Placeholder for the length + mParcel.writeInt(metadataId); + mParcel.writeInt(Metadata.BOOLEAN_VAL); + mParcel.writeInt(val ? 1 : 0); + adjustSize(start); + } + + // Insert a Long record at the current position. + private void writeLongRecord(int metadataId, long val) { + final int start = mParcel.dataPosition(); + mParcel.writeInt(-1); // Placeholder for the length + mParcel.writeInt(metadataId); + mParcel.writeInt(Metadata.LONG_VAL); + mParcel.writeLong(val); + adjustSize(start); + } + + // Insert a Double record at the current position. + private void writeDoubleRecord(int metadataId, double val) { + final int start = mParcel.dataPosition(); + mParcel.writeInt(-1); // Placeholder for the length + mParcel.writeInt(metadataId); + mParcel.writeInt(Metadata.DOUBLE_VAL); + mParcel.writeDouble(val); + adjustSize(start); + } + + // Insert a ByteArray record at the current position. + private void writeByteArrayRecord(int metadataId, byte[] val) { + final int start = mParcel.dataPosition(); + mParcel.writeInt(-1); // Placeholder for the length + mParcel.writeInt(metadataId); + mParcel.writeInt(Metadata.BYTE_ARRAY_VAL); + mParcel.writeByteArray(val); + adjustSize(start); + } + + // Insert a Date record at the current position. + private void writeDateRecord(int metadataId, long time, String tz) { + final int start = mParcel.dataPosition(); + mParcel.writeInt(-1); // Placeholder for the length + mParcel.writeInt(metadataId); + mParcel.writeInt(Metadata.DATE_VAL); + mParcel.writeLong(time); + mParcel.writeString(tz); + adjustSize(start); + } + + // Insert a TimedText record at the current position. + private void writeTimedTextRecord(int metadataId, long begin, + int duration, String text) { + final int start = mParcel.dataPosition(); + mParcel.writeInt(-1); // Placeholder for the length + mParcel.writeInt(metadataId); + mParcel.writeInt(Metadata.TIMED_TEXT_VAL); + mParcel.writeLong(begin); + mParcel.writeInt(duration); + mParcel.writeString(text); + adjustSize(start); + } +} diff --git a/media/tests/players/Android.mk b/media/tests/players/Android.mk new file mode 100644 index 000000000000..eb50a514cba9 --- /dev/null +++ b/media/tests/players/Android.mk @@ -0,0 +1,29 @@ +# Copyright (C) 2009 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= invoke_mock_media_player.cpp + +LOCAL_SHARED_LIBRARIES:= \ + libbinder \ + libutils + +LOCAL_MODULE:= invoke_mock_media_player +LOCAL_MODULE_TAGS := test eng +LOCAL_PRELINK_MODULE:= false + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/tests/players/README b/media/tests/players/README new file mode 100644 index 000000000000..edf9bd634286 --- /dev/null +++ b/media/tests/players/README @@ -0,0 +1,8 @@ +Native test players for system tests. + +For functional/system/performance tests, a native test player can be used. +This directory contains the sources of such players. +The class TestPlayerStub uses the dynamic loader to load any of them. + + + diff --git a/media/tests/players/invoke_mock_media_player.cpp b/media/tests/players/invoke_mock_media_player.cpp new file mode 100644 index 000000000000..77bb5b291710 --- /dev/null +++ b/media/tests/players/invoke_mock_media_player.cpp @@ -0,0 +1,121 @@ +/* + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "TestPlayerStub" +#include "utils/Log.h" + +#include <string.h> + +#include <binder/Parcel.h> +#include <media/MediaPlayerInterface.h> +#include <utils/Errors.h> + +using android::INVALID_OPERATION; +using android::ISurface; +using android::MediaPlayerBase; +using android::OK; +using android::Parcel; +using android::SortedVector; +using android::TEST_PLAYER; +using android::UNKNOWN_ERROR; +using android::player_type; +using android::sp; +using android::status_t; + +// This file contains a test player that is loaded via the +// TestPlayerStub class. The player contains various implementation +// of the invoke method that java tests can use. + +namespace { +const char *kPing = "ping"; + +class Player: public MediaPlayerBase +{ + public: + enum TestType {TEST_UNKNOWN, PING}; + Player() {} + virtual ~Player() {} + + virtual status_t initCheck() {return OK;} + virtual bool hardwareOutput() {return true;} + + virtual status_t setDataSource(const char *url) { + LOGV("setDataSource %s", url); + mTest = TEST_UNKNOWN; + if (strncmp(url, kPing, strlen(kPing)) == 0) { + mTest = PING; + } + return OK; + } + + virtual status_t setDataSource(int fd, int64_t offset, int64_t length) {return OK;} + virtual status_t setVideoSurface(const sp<ISurface>& surface) {return OK;} + virtual status_t prepare() {return OK;} + virtual status_t prepareAsync() {return OK;} + virtual status_t start() {return OK;} + virtual status_t stop() {return OK;} + virtual status_t pause() {return OK;} + virtual bool isPlaying() {return true;} + virtual status_t seekTo(int msec) {return OK;} + virtual status_t getCurrentPosition(int *msec) {return OK;} + virtual status_t getDuration(int *msec) {return OK;} + virtual status_t reset() {return OK;} + virtual status_t setLooping(int loop) {return OK;} + virtual player_type playerType() {return TEST_PLAYER;} + virtual status_t invoke(const Parcel& request, Parcel *reply); + + private: + // Take a request, copy it to the reply. + void ping(const Parcel& request, Parcel *reply); + + status_t mStatus; + TestType mTest; +}; + +status_t Player::invoke(const Parcel& request, Parcel *reply) +{ + switch (mTest) { + case PING: + ping(request, reply); + break; + default: mStatus = UNKNOWN_ERROR; + } + return mStatus; +} + +void Player::ping(const Parcel& request, Parcel *reply) +{ + const size_t len = request.dataAvail(); + + reply->setData(static_cast<const uint8_t*>(request.readInplace(len)), len); + mStatus = OK; +} + +} + +extern "C" android::MediaPlayerBase* newPlayer() +{ + LOGD("New invoke test player"); + return new Player(); +} + +extern "C" android::status_t deletePlayer(android::MediaPlayerBase *player) +{ + LOGD("Delete invoke test player"); + delete player; + return OK; +} |