summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/java/android/app/LoaderManager.java2
-rw-r--r--core/java/android/preference/Preference.java2
-rw-r--r--core/java/android/widget/DatePicker.java2
-rw-r--r--core/java/android/widget/ExpandableListView.java7
-rw-r--r--core/res/res/layout/preference.xml27
-rw-r--r--core/res/res/layout/preference_category.xml1
-rw-r--r--core/res/res/layout/preference_category_holo.xml22
-rw-r--r--core/res/res/layout/preference_child_holo.xml75
-rw-r--r--core/res/res/layout/preference_holo.xml76
-rw-r--r--core/res/res/layout/preference_information.xml11
-rw-r--r--core/res/res/layout/preference_information_holo.xml64
-rw-r--r--core/res/res/values/styles.xml50
-rw-r--r--core/res/res/values/themes.xml42
-rw-r--r--include/media/stagefright/AudioPlayer.h1
-rw-r--r--include/surfaceflinger/Surface.h6
-rw-r--r--libs/hwui/Patch.cpp27
-rw-r--r--libs/hwui/Patch.h6
-rw-r--r--media/java/android/media/AudioService.java42
-rwxr-xr-xmedia/java/android/media/videoeditor/AudioTrack.java1128
-rwxr-xr-xmedia/java/android/media/videoeditor/Effect.java370
-rwxr-xr-xmedia/java/android/media/videoeditor/EffectColor.java261
-rwxr-xr-xmedia/java/android/media/videoeditor/EffectKenBurns.java216
-rwxr-xr-x[-rw-r--r--]media/java/android/media/videoeditor/ExtractAudioWaveformProgressListener.java1
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaArtistNativeHelper.java4028
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaImageItem.java1525
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaItem.java1285
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaProperties.java571
-rwxr-xr-xmedia/java/android/media/videoeditor/MediaVideoItem.java1168
-rwxr-xr-xmedia/java/android/media/videoeditor/Overlay.java412
-rwxr-xr-xmedia/java/android/media/videoeditor/OverlayFrame.java397
-rwxr-xr-xmedia/java/android/media/videoeditor/Transition.java693
-rwxr-xr-xmedia/java/android/media/videoeditor/TransitionAlpha.java334
-rwxr-xr-xmedia/java/android/media/videoeditor/TransitionCrossfade.java122
-rwxr-xr-xmedia/java/android/media/videoeditor/TransitionFadeBlack.java122
-rwxr-xr-xmedia/java/android/media/videoeditor/TransitionSliding.java177
-rwxr-xr-xmedia/java/android/media/videoeditor/VideoEditor.java1139
-rwxr-xr-xmedia/java/android/media/videoeditor/VideoEditorFactory.java20
-rwxr-xr-x[-rw-r--r--]media/java/android/media/videoeditor/VideoEditorImpl.java1796
-rwxr-xr-x[-rw-r--r--]media/java/android/media/videoeditor/WaveformData.java85
-rwxr-xr-xmedia/jni/mediaeditor/Android.mk92
-rwxr-xr-xmedia/jni/mediaeditor/VideoBrowserInternal.h131
-rwxr-xr-xmedia/jni/mediaeditor/VideoBrowserMain.c593
-rwxr-xr-xmedia/jni/mediaeditor/VideoBrowserMain.h163
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorClasses.cpp3174
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorClasses.h589
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorJava.cpp885
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorJava.h506
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorLogging.h55
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorMain.cpp3056
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorMain.h69
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorOsal.cpp359
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorOsal.h65
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorPropertiesMain.cpp502
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorThumbnailMain.cpp330
-rwxr-xr-xmedia/jni/mediaeditor/VideoEditorThumbnailMain.h76
-rw-r--r--media/libstagefright/include/AwesomePlayer.h1
56 files changed, 22213 insertions, 4746 deletions
diff --git a/core/java/android/app/LoaderManager.java b/core/java/android/app/LoaderManager.java
index 5f8c0984982d..1ef011c31646 100644
--- a/core/java/android/app/LoaderManager.java
+++ b/core/java/android/app/LoaderManager.java
@@ -353,7 +353,7 @@ class LoaderManagerImpl extends LoaderManager {
// Notify of the new data so the app can switch out the old data before
// we try to destroy it.
- if (mData != data) {
+ if (data == null || mData != data) {
mData = data;
if (mStarted) {
callOnLoadFinished(loader, data);
diff --git a/core/java/android/preference/Preference.java b/core/java/android/preference/Preference.java
index e869f3f8cbc5..7d37e5bdf11d 100644
--- a/core/java/android/preference/Preference.java
+++ b/core/java/android/preference/Preference.java
@@ -285,7 +285,7 @@ public class Preference implements Comparable<Preference>, OnDependencyChangeLis
* @see #Preference(Context, AttributeSet, int)
*/
public Preference(Context context, AttributeSet attrs) {
- this(context, attrs, 0);
+ this(context, attrs, com.android.internal.R.attr.preferenceStyle);
}
/**
diff --git a/core/java/android/widget/DatePicker.java b/core/java/android/widget/DatePicker.java
index dbcf1e90cfd6..4d63cf4e30a1 100644
--- a/core/java/android/widget/DatePicker.java
+++ b/core/java/android/widget/DatePicker.java
@@ -471,6 +471,8 @@ public class DatePicker extends FrameLayout {
*/
public void init(int year, int monthOfYear, int dayOfMonth,
OnDateChangedListener onDateChangedListener) {
+ // make sure there is no callback
+ mOnDateChangedListener = null;
updateDate(year, monthOfYear, dayOfMonth);
// register the callback after updating the date
mOnDateChangedListener = onDateChangedListener;
diff --git a/core/java/android/widget/ExpandableListView.java b/core/java/android/widget/ExpandableListView.java
index 472a33521b53..8279ee5e9ccd 100644
--- a/core/java/android/widget/ExpandableListView.java
+++ b/core/java/android/widget/ExpandableListView.java
@@ -211,7 +211,7 @@ public class ExpandableListView extends ListView {
.getDimensionPixelSize(com.android.internal.R.styleable.ExpandableListView_indicatorLeft, 0);
mIndicatorRight = a
.getDimensionPixelSize(com.android.internal.R.styleable.ExpandableListView_indicatorRight, 0);
- if (mIndicatorRight == 0) {
+ if (mIndicatorRight == 0 && mGroupIndicator != null) {
mIndicatorRight = mIndicatorLeft + mGroupIndicator.getIntrinsicWidth();
}
mChildIndicatorLeft = a.getDimensionPixelSize(
@@ -1022,8 +1022,11 @@ public class ExpandableListView extends ListView {
*/
public void setGroupIndicator(Drawable groupIndicator) {
mGroupIndicator = groupIndicator;
+ if (mIndicatorRight == 0 && mGroupIndicator != null) {
+ mIndicatorRight = mIndicatorLeft + mGroupIndicator.getIntrinsicWidth();
+ }
}
-
+
/**
* Sets the drawing bounds for the indicators (at minimum, the group indicator
* is affected by this; the child indicator is affected by this if the
diff --git a/core/res/res/layout/preference.xml b/core/res/res/layout/preference.xml
index 6bd5efafdb6c..1f92252b2d8d 100644
--- a/core/res/res/layout/preference.xml
+++ b/core/res/res/layout/preference.xml
@@ -24,36 +24,30 @@
android:gravity="center_vertical"
android:paddingRight="?android:attr/scrollbarSize">
- <LinearLayout
+ <ImageView
+ android:id="@+android:id/icon"
android:layout_width="wrap_content"
- android:layout_height="match_parent"
- android:minWidth="@dimen/preference_widget_width"
- android:gravity="center"
- android:orientation="horizontal">
- <ImageView
- android:id="@+android:id/icon"
- android:layout_width="wrap_content"
- android:layout_height="wrap_content"
- android:layout_gravity="center"
- />
- </LinearLayout>
+ android:layout_height="wrap_content"
+ android:layout_gravity="center"
+ />
<RelativeLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
+ android:layout_marginLeft="15dip"
android:layout_marginRight="6dip"
android:layout_marginTop="6dip"
android:layout_marginBottom="6dip"
android:layout_weight="1">
-
+
<TextView android:id="@+android:id/title"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:singleLine="true"
- android:textAppearance="?android:attr/textAppearanceMedium"
+ android:textAppearance="?android:attr/textAppearanceLarge"
android:ellipsize="marquee"
android:fadingEdge="horizontal" />
-
+
<TextView android:id="@+android:id/summary"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
@@ -69,8 +63,7 @@
<LinearLayout android:id="@+android:id/widget_frame"
android:layout_width="wrap_content"
android:layout_height="match_parent"
- android:minWidth="@dimen/preference_widget_width"
- android:gravity="center"
+ android:gravity="center_vertical"
android:orientation="vertical" />
</LinearLayout>
diff --git a/core/res/res/layout/preference_category.xml b/core/res/res/layout/preference_category.xml
index 7ffdc9af1f7a..280d952ac512 100644
--- a/core/res/res/layout/preference_category.xml
+++ b/core/res/res/layout/preference_category.xml
@@ -18,5 +18,4 @@
<TextView xmlns:android="http://schemas.android.com/apk/res/android"
style="?android:attr/listSeparatorTextViewStyle"
android:id="@+android:id/title"
- android:paddingLeft="32dp"
/>
diff --git a/core/res/res/layout/preference_category_holo.xml b/core/res/res/layout/preference_category_holo.xml
new file mode 100644
index 000000000000..5fe8b28f18bc
--- /dev/null
+++ b/core/res/res/layout/preference_category_holo.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Layout used for PreferenceCategory in a PreferenceActivity. -->
+<TextView xmlns:android="http://schemas.android.com/apk/res/android"
+ style="?android:attr/listSeparatorTextViewStyle"
+ android:id="@+android:id/title"
+ android:paddingLeft="32dp"
+/>
diff --git a/core/res/res/layout/preference_child_holo.xml b/core/res/res/layout/preference_child_holo.xml
new file mode 100644
index 000000000000..2e70d77cdc03
--- /dev/null
+++ b/core/res/res/layout/preference_child_holo.xml
@@ -0,0 +1,75 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Layout for a visually child-like Preference in a PreferenceActivity. -->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:minHeight="?android:attr/listPreferredItemHeight"
+ android:gravity="center_vertical"
+ android:paddingLeft="16dip"
+ android:paddingRight="?android:attr/scrollbarSize">
+
+ <LinearLayout
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:minWidth="@dimen/preference_widget_width"
+ android:gravity="center"
+ android:orientation="horizontal">
+ <ImageView
+ android:id="@+android:id/icon"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="center"
+ />
+ </LinearLayout>
+
+ <RelativeLayout
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginRight="6dip"
+ android:layout_marginTop="6dip"
+ android:layout_marginBottom="6dip"
+ android:layout_weight="1">
+
+ <TextView android:id="@+android:id/title"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:singleLine="true"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:ellipsize="marquee"
+ android:fadingEdge="horizontal" />
+
+ <TextView android:id="@+android:id/summary"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_below="@android:id/title"
+ android:layout_alignLeft="@android:id/title"
+ android:textAppearance="?android:attr/textAppearanceSmall"
+ android:textColor="?android:attr/textColorSecondary"
+ android:maxLines="4" />
+
+ </RelativeLayout>
+
+ <!-- Preference should place its actual preference widget here. -->
+ <LinearLayout android:id="@+android:id/widget_frame"
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:minWidth="@dimen/preference_widget_width"
+ android:gravity="center"
+ android:orientation="vertical" />
+
+</LinearLayout>
diff --git a/core/res/res/layout/preference_holo.xml b/core/res/res/layout/preference_holo.xml
new file mode 100644
index 000000000000..c448f6432784
--- /dev/null
+++ b/core/res/res/layout/preference_holo.xml
@@ -0,0 +1,76 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Layout for a Preference in a PreferenceActivity. The
+ Preference is able to place a specific widget for its particular
+ type in the "widget_frame" layout. -->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:minHeight="?android:attr/listPreferredItemHeight"
+ android:gravity="center_vertical"
+ android:paddingRight="?android:attr/scrollbarSize">
+
+ <LinearLayout
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:minWidth="@dimen/preference_widget_width"
+ android:gravity="center"
+ android:orientation="horizontal">
+ <ImageView
+ android:id="@+android:id/icon"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="center"
+ />
+ </LinearLayout>
+
+ <RelativeLayout
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginRight="6dip"
+ android:layout_marginTop="6dip"
+ android:layout_marginBottom="6dip"
+ android:layout_weight="1">
+
+ <TextView android:id="@+android:id/title"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:singleLine="true"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:ellipsize="marquee"
+ android:fadingEdge="horizontal" />
+
+ <TextView android:id="@+android:id/summary"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_below="@android:id/title"
+ android:layout_alignLeft="@android:id/title"
+ android:textAppearance="?android:attr/textAppearanceSmall"
+ android:textColor="?android:attr/textColorSecondary"
+ android:maxLines="4" />
+
+ </RelativeLayout>
+
+ <!-- Preference should place its actual preference widget here. -->
+ <LinearLayout android:id="@+android:id/widget_frame"
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:minWidth="@dimen/preference_widget_width"
+ android:gravity="center"
+ android:orientation="vertical" />
+
+</LinearLayout>
diff --git a/core/res/res/layout/preference_information.xml b/core/res/res/layout/preference_information.xml
index 9c9b83ebf3bb..32cbb9055301 100644
--- a/core/res/res/layout/preference_information.xml
+++ b/core/res/res/layout/preference_information.xml
@@ -24,13 +24,10 @@
android:gravity="center_vertical"
android:paddingRight="?android:attr/scrollbarSize">
- <View
- android:layout_width="@dimen/preference_widget_width"
- android:layout_height="match_parent" />
-
<RelativeLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
+ android:layout_marginLeft="16sp"
android:layout_marginRight="6sp"
android:layout_marginTop="6sp"
android:layout_marginBottom="6sp"
@@ -40,9 +37,9 @@
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:singleLine="true"
- android:textAppearance="?android:attr/textAppearanceMedium"
+ android:textAppearance="?android:attr/textAppearanceLarge"
android:textColor="?android:attr/textColorSecondary" />
-
+
<TextView android:id="@+android:id/summary"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
@@ -53,7 +50,7 @@
android:maxLines="2" />
</RelativeLayout>
-
+
<!-- Preference should place its actual preference widget here. -->
<LinearLayout android:id="@+android:id/widget_frame"
android:layout_width="wrap_content"
diff --git a/core/res/res/layout/preference_information_holo.xml b/core/res/res/layout/preference_information_holo.xml
new file mode 100644
index 000000000000..d6cc063efa3e
--- /dev/null
+++ b/core/res/res/layout/preference_information_holo.xml
@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Layout for a Preference in a PreferenceActivity. The
+ Preference is able to place a specific widget for its particular
+ type in the "widget_frame" layout. -->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ android:minHeight="?android:attr/listPreferredItemHeight"
+ android:gravity="center_vertical"
+ android:paddingRight="?android:attr/scrollbarSize">
+
+ <View
+ android:layout_width="@dimen/preference_widget_width"
+ android:layout_height="match_parent" />
+
+ <RelativeLayout
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginRight="6sp"
+ android:layout_marginTop="6sp"
+ android:layout_marginBottom="6sp"
+ android:layout_weight="1">
+
+ <TextView android:id="@+android:id/title"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:singleLine="true"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:textColor="?android:attr/textColorSecondary" />
+
+ <TextView android:id="@+android:id/summary"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_below="@android:id/title"
+ android:layout_alignLeft="@android:id/title"
+ android:textAppearance="?android:attr/textAppearanceSmall"
+ android:textColor="?android:attr/textColorSecondary"
+ android:maxLines="2" />
+
+ </RelativeLayout>
+
+ <!-- Preference should place its actual preference widget here. -->
+ <LinearLayout android:id="@+android:id/widget_frame"
+ android:layout_width="wrap_content"
+ android:layout_height="match_parent"
+ android:gravity="center_vertical"
+ android:orientation="vertical" />
+
+</LinearLayout>
diff --git a/core/res/res/values/styles.xml b/core/res/res/values/styles.xml
index c4e815e86ebc..a3660470ca19 100644
--- a/core/res/res/values/styles.xml
+++ b/core/res/res/values/styles.xml
@@ -889,22 +889,66 @@
<item name="android:positiveButtonText">@android:string/ok</item>
<item name="android:negativeButtonText">@android:string/cancel</item>
</style>
-
+
<style name="Preference.DialogPreference.YesNoPreference">
<item name="android:positiveButtonText">@android:string/yes</item>
<item name="android:negativeButtonText">@android:string/no</item>
</style>
-
+
<style name="Preference.DialogPreference.EditTextPreference">
<item name="android:dialogLayout">@android:layout/preference_dialog_edittext</item>
</style>
-
+
<style name="Preference.RingtonePreference">
<item name="android:ringtoneType">ringtone</item>
<item name="android:showSilent">true</item>
<item name="android:showDefault">true</item>
</style>
+ <style name="Preference.Holo">
+ <item name="android:layout">@android:layout/preference_holo</item>
+ </style>
+
+ <style name="Preference.Holo.Information">
+ <item name="android:layout">@android:layout/preference_information_holo</item>
+ <item name="android:enabled">false</item>
+ <item name="android:shouldDisableView">false</item>
+ </style>
+
+ <style name="Preference.Holo.Category">
+ <item name="android:layout">@android:layout/preference_category_holo</item>
+ <!-- The title should not dim if the category is disabled, instead only the preference children should dim. -->
+ <item name="android:shouldDisableView">false</item>
+ <item name="android:selectable">false</item>
+ </style>
+
+ <style name="Preference.Holo.CheckBoxPreference">
+ <item name="android:widgetLayout">@android:layout/preference_widget_checkbox</item>
+ </style>
+
+ <style name="Preference.Holo.PreferenceScreen">
+ </style>
+
+ <style name="Preference.Holo.DialogPreference">
+ <item name="android:positiveButtonText">@android:string/ok</item>
+ <item name="android:negativeButtonText">@android:string/cancel</item>
+ </style>
+
+ <style name="Preference.Holo.DialogPreference.YesNoPreference">
+ <item name="android:positiveButtonText">@android:string/yes</item>
+ <item name="android:negativeButtonText">@android:string/no</item>
+ </style>
+
+ <style name="Preference.Holo.DialogPreference.EditTextPreference">
+ <item name="android:dialogLayout">@android:layout/preference_dialog_edittext</item>
+ </style>
+
+ <style name="Preference.Holo.RingtonePreference">
+ <item name="android:ringtoneType">ringtone</item>
+ <item name="android:showSilent">true</item>
+ <item name="android:showDefault">true</item>
+ </style>
+
<!-- No margins or background by default. Could be different for x-large screens -->
<style name="PreferencePanel">
</style>
diff --git a/core/res/res/values/themes.xml b/core/res/res/values/themes.xml
index 744d0d8ffef5..506dd0756998 100644
--- a/core/res/res/values/themes.xml
+++ b/core/res/res/values/themes.xml
@@ -924,18 +924,18 @@
<item name="quickContactBadgeStyleSmallWindowLarge">@android:style/Widget.Holo.QuickContactBadgeSmall.WindowLarge</item>
<item name="listPopupWindowStyle">@android:style/Widget.Holo.ListPopupWindow</item>
<item name="popupMenuStyle">@android:style/Widget.Holo.PopupMenu</item>
-
+
<!-- Preference styles -->
- <item name="preferenceScreenStyle">@android:style/Preference.PreferenceScreen</item>
- <item name="preferenceCategoryStyle">@android:style/Preference.Category</item>
- <item name="preferenceStyle">@android:style/Preference</item>
- <item name="preferenceInformationStyle">@android:style/Preference.Information</item>
- <item name="checkBoxPreferenceStyle">@android:style/Preference.CheckBoxPreference</item>
- <item name="yesNoPreferenceStyle">@android:style/Preference.DialogPreference.YesNoPreference</item>
- <item name="dialogPreferenceStyle">@android:style/Preference.DialogPreference</item>
- <item name="editTextPreferenceStyle">@android:style/Preference.DialogPreference.EditTextPreference</item>
- <item name="ringtonePreferenceStyle">@android:style/Preference.RingtonePreference</item>
- <item name="preferenceLayoutChild">@android:layout/preference_child</item>
+ <item name="preferenceScreenStyle">@android:style/Preference.Holo.PreferenceScreen</item>
+ <item name="preferenceCategoryStyle">@android:style/Preference.Holo.Category</item>
+ <item name="preferenceStyle">@android:style/Preference.Holo</item>
+ <item name="preferenceInformationStyle">@android:style/Preference.Holo.Information</item>
+ <item name="checkBoxPreferenceStyle">@android:style/Preference.Holo.CheckBoxPreference</item>
+ <item name="yesNoPreferenceStyle">@android:style/Preference.Holo.DialogPreference.YesNoPreference</item>
+ <item name="dialogPreferenceStyle">@android:style/Preference.Holo.DialogPreference</item>
+ <item name="editTextPreferenceStyle">@android:style/Preference.Holo.DialogPreference.EditTextPreference</item>
+ <item name="ringtonePreferenceStyle">@android:style/Preference.Holo.RingtonePreference</item>
+ <item name="preferenceLayoutChild">@android:layout/preference_child_holo</item>
<item name="detailsElementBackground">@android:drawable/panel_bg_holo_dark</item>
<!-- Search widget styles -->
@@ -1187,16 +1187,16 @@
<item name="popupMenuStyle">@android:style/Widget.Holo.Light.PopupMenu</item>
<!-- Preference styles -->
- <item name="preferenceScreenStyle">@android:style/Preference.PreferenceScreen</item>
- <item name="preferenceCategoryStyle">@android:style/Preference.Category</item>
- <item name="preferenceStyle">@android:style/Preference</item>
- <item name="preferenceInformationStyle">@android:style/Preference.Information</item>
- <item name="checkBoxPreferenceStyle">@android:style/Preference.CheckBoxPreference</item>
- <item name="yesNoPreferenceStyle">@android:style/Preference.DialogPreference.YesNoPreference</item>
- <item name="dialogPreferenceStyle">@android:style/Preference.DialogPreference</item>
- <item name="editTextPreferenceStyle">@android:style/Preference.DialogPreference.EditTextPreference</item>
- <item name="ringtonePreferenceStyle">@android:style/Preference.RingtonePreference</item>
- <item name="preferenceLayoutChild">@android:layout/preference_child</item>
+ <item name="preferenceScreenStyle">@android:style/Preference.Holo.PreferenceScreen</item>
+ <item name="preferenceCategoryStyle">@android:style/Preference.Holo.Category</item>
+ <item name="preferenceStyle">@android:style/Preference.Holo</item>
+ <item name="preferenceInformationStyle">@android:style/Preference.Holo.Information</item>
+ <item name="checkBoxPreferenceStyle">@android:style/Preference.Holo.CheckBoxPreference</item>
+ <item name="yesNoPreferenceStyle">@android:style/Preference.Holo.DialogPreference.YesNoPreference</item>
+ <item name="dialogPreferenceStyle">@android:style/Preference.Holo.DialogPreference</item>
+ <item name="editTextPreferenceStyle">@android:style/Preference.Holo.DialogPreference.EditTextPreference</item>
+ <item name="ringtonePreferenceStyle">@android:style/Preference.Holo.RingtonePreference</item>
+ <item name="preferenceLayoutChild">@android:layout/preference_child_holo</item>
<item name="detailsElementBackground">@android:drawable/panel_bg_holo_light</item>
<!-- Search widget styles -->
diff --git a/include/media/stagefright/AudioPlayer.h b/include/media/stagefright/AudioPlayer.h
index 37af0326ab10..d12ee9c5e237 100644
--- a/include/media/stagefright/AudioPlayer.h
+++ b/include/media/stagefright/AudioPlayer.h
@@ -65,6 +65,7 @@ public:
bool reachedEOS(status_t *finalStatus);
private:
+ friend class VideoEditorAudioPlayer;
sp<MediaSource> mSource;
AudioTrack *mAudioTrack;
diff --git a/include/surfaceflinger/Surface.h b/include/surfaceflinger/Surface.h
index 553205286408..d783caff2a7f 100644
--- a/include/surfaceflinger/Surface.h
+++ b/include/surfaceflinger/Surface.h
@@ -100,6 +100,9 @@ private:
friend class MediaPlayer;
// for testing
friend class Test;
+ // videoEditor preview classes
+ friend class VideoEditorPreviewController;
+
const sp<ISurface>& getISurface() const { return mSurface; }
@@ -181,6 +184,9 @@ private:
friend class SoftwareRenderer;
// this is just to be able to write some unit tests
friend class Test;
+ // videoEditor preview classes
+ friend class VideoEditorPreviewController;
+ friend class PreviewRenderer;
private:
friend class SurfaceComposerClient;
diff --git a/libs/hwui/Patch.cpp b/libs/hwui/Patch.cpp
index 77cbb80fa4da..535c0ac3093c 100644
--- a/libs/hwui/Patch.cpp
+++ b/libs/hwui/Patch.cpp
@@ -154,7 +154,8 @@ void Patch::updateVertices(const float bitmapWidth, const float bitmapHeight,
float y1 = 0.0f;
float v1 = 0.0f;
- for (uint32_t i = 0; i < mYCount; i++) {
+ uint32_t i = 0;
+ for ( ; i < mYCount; i++) {
float stepY = mYDivs[i];
float y2 = 0.0f;
@@ -166,7 +167,8 @@ void Patch::updateVertices(const float bitmapWidth, const float bitmapHeight,
}
float v2 = fmax(0.0f, stepY - 0.5f) / bitmapHeight;
- generateRow(vertex, y1, y2, v1, v2, stretchX, right - left, bitmapWidth, quadCount);
+ generateRow(vertex, y1, y2, v1, v2, stretchX, right - left,
+ bitmapWidth, quadCount, i & 1);
y1 = y2;
v1 = (stepY + 0.5f) / bitmapHeight;
@@ -175,7 +177,7 @@ void Patch::updateVertices(const float bitmapWidth, const float bitmapHeight,
}
generateRow(vertex, y1, bottom - top, v1, 1.0f, stretchX, right - left,
- bitmapWidth, quadCount);
+ bitmapWidth, quadCount, i & 1);
if (verticesCount > 0) {
Caches::getInstance().bindMeshBuffer(meshBuffer);
@@ -193,14 +195,15 @@ void Patch::updateVertices(const float bitmapWidth, const float bitmapHeight,
}
void Patch::generateRow(TextureVertex*& vertex, float y1, float y2, float v1, float v2,
- float stretchX, float width, float bitmapWidth, uint32_t& quadCount) {
+ float stretchX, float width, float bitmapWidth, uint32_t& quadCount, bool isStretch) {
float previousStepX = 0.0f;
float x1 = 0.0f;
float u1 = 0.0f;
// Generate the row quad by quad
- for (uint32_t i = 0; i < mXCount; i++) {
+ uint32_t i = 0;
+ for ( ; i < mXCount; i++) {
float stepX = mXDivs[i];
float x2 = 0.0f;
@@ -212,7 +215,7 @@ void Patch::generateRow(TextureVertex*& vertex, float y1, float y2, float v1, fl
}
float u2 = fmax(0.0f, stepX - 0.5f) / bitmapWidth;
- bool valid = generateQuad(vertex, x1, y1, x2, y2, u1, v1, u2, v2, quadCount);
+ generateQuad(vertex, x1, y1, x2, y2, u1, v1, u2, v2, quadCount, isStretch || (i & 1));
x1 = x2;
u1 = (stepX + 0.5f) / bitmapWidth;
@@ -220,13 +223,13 @@ void Patch::generateRow(TextureVertex*& vertex, float y1, float y2, float v1, fl
previousStepX = stepX;
}
- generateQuad(vertex, x1, y1, width, y2, u1, v1, 1.0f, v2, quadCount);
+ generateQuad(vertex, x1, y1, width, y2, u1, v1, 1.0f, v2, quadCount, isStretch || (i & 1));
}
-bool Patch::generateQuad(TextureVertex*& vertex, float x1, float y1, float x2, float y2,
- float u1, float v1, float u2, float v2, uint32_t& quadCount) {
+void Patch::generateQuad(TextureVertex*& vertex, float x1, float y1, float x2, float y2,
+ float u1, float v1, float u2, float v2, uint32_t& quadCount, bool isStretch) {
const uint32_t oldQuadCount = quadCount;
- const bool valid = x2 >= x1 && y2 >= y1;
+ const bool valid = isStretch || (x2 - x1 > 0.9999f && y2 - y1 > 0.9999f);
if (valid) {
quadCount++;
}
@@ -238,7 +241,7 @@ bool Patch::generateQuad(TextureVertex*& vertex, float x1, float y1, float x2, f
PATCH_LOGD(" left, top = %.2f, %.2f\t\tu1, v1 = %.2f, %.2f", x1, y1, u1, v1);
PATCH_LOGD(" right, bottom = %.2f, %.2f\t\tu2, v2 = %.2f, %.2f", x2, y2, u2, v2);
#endif
- return false;
+ return;
}
#if RENDER_LAYERS_AS_REGIONS
@@ -267,8 +270,6 @@ bool Patch::generateQuad(TextureVertex*& vertex, float x1, float y1, float x2, f
PATCH_LOGD(" left, top = %.2f, %.2f\t\tu1, v1 = %.2f, %.2f", x1, y1, u1, v1);
PATCH_LOGD(" right, bottom = %.2f, %.2f\t\tu2, v2 = %.2f, %.2f", x2, y2, u2, v2);
#endif
-
- return true;
}
}; // namespace uirenderer
diff --git a/libs/hwui/Patch.h b/libs/hwui/Patch.h
index 45ce99842c20..4de0c760d6c1 100644
--- a/libs/hwui/Patch.h
+++ b/libs/hwui/Patch.h
@@ -70,11 +70,11 @@ private:
void generateRow(TextureVertex*& vertex, float y1, float y2,
float v1, float v2, float stretchX, float width, float bitmapWidth,
- uint32_t& quadCount);
- bool generateQuad(TextureVertex*& vertex,
+ uint32_t& quadCount, bool isStretch);
+ void generateQuad(TextureVertex*& vertex,
float x1, float y1, float x2, float y2,
float u1, float v1, float u2, float v2,
- uint32_t& quadCount);
+ uint32_t& quadCount, bool isStretch);
}; // struct Patch
}; // namespace uirenderer
diff --git a/media/java/android/media/AudioService.java b/media/java/android/media/AudioService.java
index 6a79384c5926..e404b05e77b6 100644
--- a/media/java/android/media/AudioService.java
+++ b/media/java/android/media/AudioService.java
@@ -109,6 +109,7 @@ public class AudioService extends IAudioService.Stub {
private static final int MSG_MEDIA_SERVER_STARTED = 6;
private static final int MSG_PLAY_SOUND_EFFECT = 7;
private static final int MSG_BTA2DP_DOCK_TIMEOUT = 8;
+ private static final int MSG_LOAD_SOUND_EFFECTS = 9;
private static final int BTA2DP_DOCK_TIMEOUT_MILLIS = 8000;
@@ -930,6 +931,7 @@ public class AudioService extends IAudioService.Stub {
* this indicates we have a valid sample loaded for this effect.
*/
+ int lastSample = 0;
for (int effect = 0; effect < AudioManager.NUM_SOUND_EFFECTS; effect++) {
// Do not load sample if this effect uses the MediaPlayer
if (SOUND_EFFECT_FILES_MAP[effect][1] == 0) {
@@ -940,23 +942,32 @@ public class AudioService extends IAudioService.Stub {
+ SOUND_EFFECTS_PATH
+ SOUND_EFFECT_FILES[SOUND_EFFECT_FILES_MAP[effect][0]];
int sampleId = mSoundPool.load(filePath, 0);
- SOUND_EFFECT_FILES_MAP[effect][1] = sampleId;
- poolId[SOUND_EFFECT_FILES_MAP[effect][0]] = sampleId;
if (sampleId <= 0) {
Log.w(TAG, "Soundpool could not load file: "+filePath);
+ } else {
+ SOUND_EFFECT_FILES_MAP[effect][1] = sampleId;
+ poolId[SOUND_EFFECT_FILES_MAP[effect][0]] = sampleId;
+ lastSample = sampleId;
}
- mSoundPoolCallBack.setLastSample(sampleId);
} else {
SOUND_EFFECT_FILES_MAP[effect][1] = poolId[SOUND_EFFECT_FILES_MAP[effect][0]];
}
}
// wait for all samples to be loaded
- try {
- mSoundEffectsLock.wait();
- status = mSoundPoolCallBack.status();
- } catch (java.lang.InterruptedException e) {
+ if (lastSample != 0) {
+ mSoundPoolCallBack.setLastSample(lastSample);
+
+ try {
+ mSoundEffectsLock.wait();
+ status = mSoundPoolCallBack.status();
+ } catch (java.lang.InterruptedException e) {
+ Log.w(TAG, "Interrupted while waiting sound pool callback.");
+ status = -1;
+ }
+ } else {
status = -1;
}
+
if (mSoundPoolLooper != null) {
mSoundPoolLooper.quit();
mSoundPoolLooper = null;
@@ -965,8 +976,14 @@ public class AudioService extends IAudioService.Stub {
if (status != 0) {
Log.w(TAG,
"loadSoundEffects(), Error "
- + mSoundPoolCallBack.status()
+ + ((lastSample != 0) ? mSoundPoolCallBack.status() : -1)
+ " while loading samples");
+ for (int effect = 0; effect < AudioManager.NUM_SOUND_EFFECTS; effect++) {
+ if (SOUND_EFFECT_FILES_MAP[effect][1] > 0) {
+ SOUND_EFFECT_FILES_MAP[effect][1] = -1;
+ }
+ }
+
mSoundPool.release();
mSoundPool = null;
}
@@ -985,6 +1002,7 @@ public class AudioService extends IAudioService.Stub {
return;
}
+ mAudioHandler.removeMessages(MSG_LOAD_SOUND_EFFECTS);
mAudioHandler.removeMessages(MSG_PLAY_SOUND_EFFECT);
int[] poolId = new int[SOUND_EFFECT_FILES.length];
@@ -1049,7 +1067,6 @@ public class AudioService extends IAudioService.Stub {
mStatus = status;
}
if (sampleId == mLastSample) {
- Log.e(TAG, "onLoadComplete last sample loaded");
mSoundEffectsLock.notify();
}
}
@@ -1918,6 +1935,10 @@ public class AudioService extends IAudioService.Stub {
AudioSystem.setParameters("restarting=false");
break;
+ case MSG_LOAD_SOUND_EFFECTS:
+ loadSoundEffects();
+ break;
+
case MSG_PLAY_SOUND_EFFECT:
playSoundEffect(msg.arg1, msg.arg2);
break;
@@ -2245,7 +2266,8 @@ public class AudioService extends IAudioService.Stub {
}
} else if (action.equals(Intent.ACTION_BOOT_COMPLETED)) {
mBootCompleted = true;
- loadSoundEffects();
+ sendMsg(mAudioHandler, MSG_LOAD_SOUND_EFFECTS, SHARED_MSG, SENDMSG_NOOP,
+ 0, 0, null, 0);
Intent newIntent = new Intent(AudioManager.ACTION_SCO_AUDIO_STATE_CHANGED);
newIntent.putExtra(AudioManager.EXTRA_SCO_AUDIO_STATE,
AudioManager.SCO_AUDIO_STATE_DISCONNECTED);
diff --git a/media/java/android/media/videoeditor/AudioTrack.java b/media/java/android/media/videoeditor/AudioTrack.java
index e95ef3522a56..9e68a5dfc6d3 100755
--- a/media/java/android/media/videoeditor/AudioTrack.java
+++ b/media/java/android/media/videoeditor/AudioTrack.java
@@ -1,481 +1,647 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import java.io.IOException;
-import java.lang.ref.SoftReference;
-
-/**
- * This class allows to handle an audio track. This audio file is mixed with the
- * audio samples of the media items.
- * {@hide}
- */
-public class AudioTrack {
- // Instance variables
- private final String mUniqueId;
- private final String mFilename;
- private long mStartTimeMs;
- private long mTimelineDurationMs;
- private int mVolumePercent;
- private long mBeginBoundaryTimeMs;
- private long mEndBoundaryTimeMs;
- private boolean mLoop;
- private boolean mMuted;
-
- private final long mDurationMs;
- private final int mAudioChannels;
- private final int mAudioType;
- private final int mAudioBitrate;
- private final int mAudioSamplingFrequency;
-
- // Ducking variables
- private int mDuckingThreshold;
- private int mDuckedTrackVolume;
- private boolean mIsDuckingEnabled;
-
- // The audio waveform filename
- private String mAudioWaveformFilename;
- // The audio waveform data
- private SoftReference<WaveformData> mWaveformData;
-
- /**
- * An object of this type cannot be instantiated by using the default
- * constructor
- */
- @SuppressWarnings("unused")
- private AudioTrack() throws IOException {
- this(null, null, null);
- }
-
- /**
- * Constructor
- *
- * @param editor The video editor reference
- * @param audioTrackId The audio track id
- * @param filename The absolute file name
- *
- * @throws IOException if file is not found
- * @throws IllegalArgumentException if file format is not supported or if
- * the codec is not supported
- */
- public AudioTrack(VideoEditor editor, String audioTrackId, String filename)
- throws IOException {
- mUniqueId = audioTrackId;
- mFilename = filename;
- mStartTimeMs = 0;
- // TODO: This value represents to the duration of the audio file
- mDurationMs = 300000;
- // TODO: This value needs to be read from the audio track of the source
- // file
- mAudioChannels = 2;
- mAudioType = MediaProperties.ACODEC_AAC_LC;
- mAudioBitrate = 128000;
- mAudioSamplingFrequency = 44100;
-
- mTimelineDurationMs = mDurationMs;
- mVolumePercent = 100;
-
- // Play the entire audio track
- mBeginBoundaryTimeMs = 0;
- mEndBoundaryTimeMs = mDurationMs;
-
- // By default loop is disabled
- mLoop = false;
-
- // By default the audio track is not muted
- mMuted = false;
-
- // Ducking is enabled by default
- mDuckingThreshold = 0;
- mDuckedTrackVolume = 0;
- mIsDuckingEnabled = false;
-
- // The audio waveform file is generated later
- mAudioWaveformFilename = null;
- mWaveformData = null;
- }
-
- /**
- * Constructor
- *
- * @param editor The video editor reference
- * @param audioTrackId The audio track id
- * @param filename The audio filename
- * @param startTimeMs the start time in milliseconds (relative to the
- * timeline)
- * @param beginMs start time in the audio track in milliseconds (relative to
- * the beginning of the audio track)
- * @param endMs end time in the audio track in milliseconds (relative to the
- * beginning of the audio track)
- * @param loop true to loop the audio track
- * @param volume The volume in percentage
- * @param muted true if the audio track is muted
- * @param threshold Ducking will be activated when the relative energy in
- * the media items audio signal goes above this value. The valid
- * range of values is 0 to 100.
- * @param duckedTrackVolume The relative volume of the audio track when ducking
- * is active. The valid range of values is 0 to 100.
- * @param audioWaveformFilename The name of the waveform file
- *
- * @throws IOException if file is not found
- */
- AudioTrack(VideoEditor editor, String audioTrackId, String filename, long startTimeMs,
- long beginMs, long endMs, boolean loop, int volume, boolean muted,
- boolean duckingEnabled, int duckThreshold, int duckedTrackVolume,
- String audioWaveformFilename) throws IOException {
- mUniqueId = audioTrackId;
- mFilename = filename;
- mStartTimeMs = startTimeMs;
-
- // TODO: This value represents to the duration of the audio file
- mDurationMs = 300000;
-
- // TODO: This value needs to be read from the audio track of the source
- // file
- mAudioChannels = 2;
- mAudioType = MediaProperties.ACODEC_AAC_LC;
- mAudioBitrate = 128000;
- mAudioSamplingFrequency = 44100;
-
- mTimelineDurationMs = endMs - beginMs;
- mVolumePercent = volume;
-
- mBeginBoundaryTimeMs = beginMs;
- mEndBoundaryTimeMs = endMs;
-
- mLoop = loop;
- mMuted = muted;
-
- mIsDuckingEnabled = duckingEnabled;
- mDuckingThreshold = duckThreshold;
- mDuckedTrackVolume = duckedTrackVolume;
-
- mAudioWaveformFilename = audioWaveformFilename;
- if (audioWaveformFilename != null) {
- mWaveformData =
- new SoftReference<WaveformData>(new WaveformData(audioWaveformFilename));
- } else {
- mWaveformData = null;
- }
- }
-
- /**
- * @return The id of the audio track
- */
- public String getId() {
- return mUniqueId;
- }
-
- /**
- * Get the filename source for this audio track.
- *
- * @return The filename as an absolute file name
- */
- public String getFilename() {
- return mFilename;
- }
-
- /**
- * @return The number of audio channels in the source of this audio track
- */
- public int getAudioChannels() {
- return mAudioChannels;
- }
-
- /**
- * @return The audio codec of the source of this audio track
- */
- public int getAudioType() {
- return mAudioType;
- }
-
- /**
- * @return The audio sample frequency of the audio track
- */
- public int getAudioSamplingFrequency() {
- return mAudioSamplingFrequency;
- }
-
- /**
- * @return The audio bitrate of the audio track
- */
- public int getAudioBitrate() {
- return mAudioBitrate;
- }
-
- /**
- * Set the volume of this audio track as percentage of the volume in the
- * original audio source file.
- *
- * @param volumePercent Percentage of the volume to apply. If it is set to
- * 0, then volume becomes mute. It it is set to 100, then volume
- * is same as original volume. It it is set to 200, then volume
- * is doubled (provided that volume amplification is supported)
- *
- * @throws UnsupportedOperationException if volume amplification is
- * requested and is not supported.
- */
- public void setVolume(int volumePercent) {
- mVolumePercent = volumePercent;
- }
-
- /**
- * Get the volume of the audio track as percentage of the volume in the
- * original audio source file.
- *
- * @return The volume in percentage
- */
- public int getVolume() {
- return mVolumePercent;
- }
-
- /**
- * @param muted true to mute the audio track
- */
- public void setMute(boolean muted) {
- mMuted = muted;
- }
-
- /**
- * @return true if the audio track is muted
- */
- public boolean isMuted() {
- return mMuted;
- }
-
- /**
- * Set the start time of this audio track relative to the storyboard
- * timeline. Default value is 0.
- *
- * @param startTimeMs the start time in milliseconds
- */
- public void setStartTime(long startTimeMs) {
- mStartTimeMs = startTimeMs;
- }
-
- /**
- * Get the start time of this audio track relative to the storyboard
- * timeline.
- *
- * @return The start time in milliseconds
- */
- public long getStartTime() {
- return mStartTimeMs;
- }
-
- /**
- * @return The duration in milliseconds. This value represents the audio
- * track duration (not looped)
- */
- public long getDuration() {
- return mDurationMs;
- }
-
- /**
- * @return The timeline duration as defined by the begin and end boundaries
- */
- public long getTimelineDuration() {
- return mTimelineDurationMs;
- }
-
- /**
- * Sets the start and end marks for trimming an audio track
- *
- * @param beginMs start time in the audio track in milliseconds (relative to
- * the beginning of the audio track)
- * @param endMs end time in the audio track in milliseconds (relative to the
- * beginning of the audio track)
- */
- public void setExtractBoundaries(long beginMs, long endMs) {
- if (beginMs > mDurationMs) {
- throw new IllegalArgumentException("Invalid start time");
- }
- if (endMs > mDurationMs) {
- throw new IllegalArgumentException("Invalid end time");
- }
-
- mBeginBoundaryTimeMs = beginMs;
- mEndBoundaryTimeMs = endMs;
-
- mTimelineDurationMs = mEndBoundaryTimeMs - mBeginBoundaryTimeMs;
- }
-
- /**
- * @return The boundary begin time
- */
- public long getBoundaryBeginTime() {
- return mBeginBoundaryTimeMs;
- }
-
- /**
- * @return The boundary end time
- */
- public long getBoundaryEndTime() {
- return mEndBoundaryTimeMs;
- }
-
- /**
- * Enable the loop mode for this audio track. Note that only one of the
- * audio tracks in the timeline can have the loop mode enabled. When looping
- * is enabled the samples between mBeginBoundaryTimeMs and
- * mEndBoundaryTimeMs are looped.
- */
- public void enableLoop() {
- mLoop = true;
- }
-
- /**
- * Disable the loop mode
- */
- public void disableLoop() {
- mLoop = false;
- }
-
- /**
- * @return true if looping is enabled
- */
- public boolean isLooping() {
- return mLoop;
- }
-
- /**
- * Disable the audio duck effect
- */
- public void disableDucking() {
- mIsDuckingEnabled = false;
- }
-
- /**
- * Enable ducking by specifying the required parameters
- *
- * @param threshold Ducking will be activated when the energy in
- * the media items audio signal goes above this value. The valid
- * range of values is 0db to 90dB. 0dB is equivalent to disabling
- * ducking.
- * @param duckedTrackVolume The relative volume of the audio track when ducking
- * is active. The valid range of values is 0 to 100.
- */
- public void enableDucking(int threshold, int duckedTrackVolume) {
- if (threshold < 0 || threshold > 90) {
- throw new IllegalArgumentException("Invalid threshold value: " + threshold);
- }
-
- if (duckedTrackVolume < 0 || duckedTrackVolume > 100) {
- throw new IllegalArgumentException("Invalid duckedTrackVolume value: "
- + duckedTrackVolume);
- }
-
- mDuckingThreshold = threshold;
- mDuckedTrackVolume = duckedTrackVolume;
- mIsDuckingEnabled = true;
- }
-
- /**
- * @return true if ducking is enabled
- */
- public boolean isDuckingEnabled() {
- return mIsDuckingEnabled;
- }
-
- /**
- * @return The ducking threshold
- */
- public int getDuckingThreshhold() {
- return mDuckingThreshold;
- }
-
- /**
- * @return The ducked track volume
- */
- public int getDuckedTrackVolume() {
- return mDuckedTrackVolume;
- }
-
- /**
- * This API allows to generate a file containing the sample volume levels of
- * this audio track object. This function may take significant time and is
- * blocking. The filename can be retrieved using getAudioWaveformFilename().
- *
- * @param listener The progress listener
- *
- * @throws IOException if the output file cannot be created
- * @throws IllegalArgumentException if the audio file does not have a valid
- * audio track
- */
- public void extractAudioWaveform(ExtractAudioWaveformProgressListener listener)
- throws IOException {
- // TODO: Set mAudioWaveformFilename at the end once the extract is
- // complete
- mWaveformData = new SoftReference<WaveformData>(new WaveformData(mAudioWaveformFilename));
- }
-
- /**
- * Get the audio waveform file name if extractAudioWaveform was successful.
- * The file format is as following:
- * <ul>
- * <li>first 4 bytes provide the number of samples for each value, as
- * big-endian signed</li>
- * <li>4 following bytes is the total number of values in the file, as
- * big-endian signed</li>
- * <li>then, all values follow as bytes</li>
- * </ul>
- *
- * @return the name of the file, null if the file does not exist
- */
- String getAudioWaveformFilename() {
- return mAudioWaveformFilename;
- }
-
- /**
- * @return The waveform data
- */
- public WaveformData getWaveformData() throws IOException {
- if (mWaveformData == null) {
- return null;
- }
-
- WaveformData waveformData = mWaveformData.get();
- if (waveformData != null) {
- return waveformData;
- } else if (mAudioWaveformFilename != null) {
- waveformData = new WaveformData(mAudioWaveformFilename);
- mWaveformData = new SoftReference<WaveformData>(waveformData);
- return waveformData;
- } else {
- return null;
- }
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public boolean equals(Object object) {
- if (!(object instanceof AudioTrack)) {
- return false;
- }
- return mUniqueId.equals(((AudioTrack)object).mUniqueId);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int hashCode() {
- return mUniqueId.hashCode();
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.ref.SoftReference;
+
+import android.media.videoeditor.MediaArtistNativeHelper.Properties;
+
+/**
+ * This class allows to handle an audio track. This audio file is mixed with the
+ * audio samples of the media items.
+ * {@hide}
+ */
+public class AudioTrack {
+
+ /**
+ * Instance variables
+ * Private object for calling native methods via MediaArtistNativeHelper
+ */
+ private final MediaArtistNativeHelper mMANativeHelper;
+ private final String mUniqueId;
+ private final String mFilename;
+ private long mStartTimeMs;
+ private long mTimelineDurationMs;
+ private int mVolumePercent;
+ private long mBeginBoundaryTimeMs;
+ private long mEndBoundaryTimeMs;
+ private boolean mLoop;
+ private boolean mMuted;
+ private final long mDurationMs;
+ private final int mAudioChannels;
+ private final int mAudioType;
+ private final int mAudioBitrate;
+ private final int mAudioSamplingFrequency;
+
+ /**
+ * Ducking variables
+ */
+ private int mDuckingThreshold;
+ private int mDuckedTrackVolume;
+ private boolean mIsDuckingEnabled;
+
+ /**
+ * The audio waveform filename
+ */
+ private String mAudioWaveformFilename;
+
+ /**
+ * The audio waveform data
+ */
+ private SoftReference<WaveformData> mWaveformData;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private AudioTrack() throws IOException {
+ this(null, null, null);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param audioTrackId The audio track id
+ * @param filename The absolute file name
+ *
+ * @throws IOException if file is not found
+ * @throws IllegalArgumentException if file format is not supported or if
+ * the codec is not supported or if editor is not of type
+ * VideoEditorImpl.
+ */
+ public AudioTrack(VideoEditor editor, String audioTrackId, String filename) throws IOException {
+ this(editor, audioTrackId, filename, 0, 0, MediaItem.END_OF_FILE, false, 100, false, false,
+ 0, 0, null);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param audioTrackId The audio track id
+ * @param filename The audio filename. In case file contains Audio and Video,
+ * only the Audio stream will be used as Audio Track.
+ * @param startTimeMs the start time in milliseconds (relative to the
+ * timeline)
+ * @param beginMs start time in the audio track in milliseconds (relative to
+ * the beginning of the audio track)
+ * @param endMs end time in the audio track in milliseconds (relative to the
+ * beginning of the audio track)
+ * @param loop true to loop the audio track
+ * @param volume The volume in percentage
+ * @param muted true if the audio track is muted
+ * @param threshold Ducking will be activated when the relative energy in
+ * the media items audio signal goes above this value. The valid
+ * range of values is 0 to 90.
+ * @param duckedTrackVolume The relative volume of the audio track when
+ * ducking is active. The valid range of values is 0 to 100.
+ * @param audioWaveformFilename The name of the waveform file
+ *
+ * @throws IOException if file is not found
+ * @throws IllegalArgumentException if file format is not supported or if
+ * the codec is not supported or if editor is not of type
+ * VideoEditorImpl.
+ */
+ AudioTrack(VideoEditor editor, String audioTrackId, String filename,
+ long startTimeMs,long beginMs, long endMs, boolean loop,
+ int volume, boolean muted,boolean duckingEnabled,
+ int duckThreshold, int duckedTrackVolume,
+ String audioWaveformFilename) throws IOException {
+ Properties properties = null;
+ File file = new File(filename);
+ if (!file.exists()) {
+ throw new IOException(filename + " not found ! ");
+ }
+
+ if (editor instanceof VideoEditorImpl) {
+ mMANativeHelper = ((VideoEditorImpl)editor).getNativeContext();
+ } else {
+ throw new IllegalArgumentException("editor is not of type VideoEditorImpl");
+ }
+ try {
+ properties = mMANativeHelper.getMediaProperties(filename);
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Unsupported file or file not found");
+ }
+ switch (mMANativeHelper.getFileType(properties.fileType)) {
+ case MediaProperties.FILE_3GP:
+ case MediaProperties.FILE_MP4:
+ case MediaProperties.FILE_MP3:
+ break;
+
+ default: {
+ throw new IllegalArgumentException("Unsupported input file type");
+ }
+ }
+ switch (mMANativeHelper.getAudioCodecType(properties.audioFormat)) {
+ case MediaProperties.ACODEC_AMRNB:
+ case MediaProperties.ACODEC_AMRWB:
+ case MediaProperties.ACODEC_AAC_LC:
+ case MediaProperties.ACODEC_MP3:
+ break;
+ default:
+ throw new IllegalArgumentException("Unsupported Audio Codec Format in Input File");
+ }
+
+ if (endMs == MediaItem.END_OF_FILE) {
+ endMs = properties.audioDuration;
+ }
+
+ mUniqueId = audioTrackId;
+ mFilename = filename;
+ mStartTimeMs = startTimeMs;
+ mDurationMs = properties.audioDuration;
+ mAudioChannels = properties.audioChannels;
+ mAudioBitrate = properties.audioBitrate;
+ mAudioSamplingFrequency = properties.audioSamplingFrequency;
+ mAudioType = properties.audioFormat;
+ mTimelineDurationMs = endMs - beginMs;
+ mVolumePercent = volume;
+
+ mBeginBoundaryTimeMs = beginMs;
+ mEndBoundaryTimeMs = endMs;
+
+ mLoop = loop;
+ mMuted = muted;
+ mIsDuckingEnabled = duckingEnabled;
+ mDuckingThreshold = duckThreshold;
+ mDuckedTrackVolume = duckedTrackVolume;
+
+ mAudioWaveformFilename = audioWaveformFilename;
+ if (audioWaveformFilename != null) {
+ mWaveformData =
+ new SoftReference<WaveformData>(new WaveformData(audioWaveformFilename));
+ } else {
+ mWaveformData = null;
+ }
+ }
+
+ /**
+ * Get the id of the audio track
+ *
+ * @return The id of the audio track
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * Get the filename for this audio track source.
+ *
+ * @return The filename as an absolute file name
+ */
+ public String getFilename() {
+ return mFilename;
+ }
+
+ /**
+ * Get the number of audio channels in the source of this audio track
+ *
+ * @return The number of audio channels in the source of this audio track
+ */
+ public int getAudioChannels() {
+ return mAudioChannels;
+ }
+
+ /**
+ * Get the audio codec of the source of this audio track
+ *
+ * @return The audio codec of the source of this audio track
+ * {@link android.media.videoeditor.MediaProperties}
+ */
+ public int getAudioType() {
+ return mAudioType;
+ }
+
+ /**
+ * Get the audio sample frequency of the audio track
+ *
+ * @return The audio sample frequency of the audio track
+ */
+ public int getAudioSamplingFrequency() {
+ return mAudioSamplingFrequency;
+ }
+
+ /**
+ * Get the audio bitrate of the audio track
+ *
+ * @return The audio bitrate of the audio track
+ */
+ public int getAudioBitrate() {
+ return mAudioBitrate;
+ }
+
+ /**
+ * Set the volume of this audio track as percentage of the volume in the
+ * original audio source file.
+ *
+ * @param volumePercent Percentage of the volume to apply. If it is set to
+ * 0, then volume becomes mute. It it is set to 100, then volume
+ * is same as original volume. It it is set to 200, then volume
+ * is doubled (provided that volume amplification is supported)
+ *
+ * @throws UnsupportedOperationException if volume amplification is
+ * requested and is not supported.
+ */
+ public void setVolume(int volumePercent) {
+ if (volumePercent > MediaProperties.AUDIO_MAX_VOLUME_PERCENT) {
+ throw new IllegalArgumentException("Volume set exceeds maximum allowed value");
+ }
+
+ if (volumePercent < 0) {
+ throw new IllegalArgumentException("Invalid Volume ");
+ }
+ mVolumePercent = volumePercent;
+ /**
+ * Force update of preview settings
+ */
+ mMANativeHelper.setGeneratePreview(true);
+ }
+
+ /**
+ * Get the volume of the audio track as percentage of the volume in the
+ * original audio source file.
+ *
+ * @return The volume in percentage
+ */
+ public int getVolume() {
+ return mVolumePercent;
+ }
+
+ /**
+ * Mute/Unmute the audio track
+ *
+ * @param muted true to mute the audio track. SetMute(true) will make
+ * the volume of this Audio Track to 0.
+ */
+ public void setMute(boolean muted) {
+ mMuted = muted;
+ /**
+ * Force update of preview settings
+ */
+ mMANativeHelper.setGeneratePreview(true);
+ }
+
+ /**
+ * Check if the audio track is muted
+ *
+ * @return true if the audio track is muted
+ */
+ public boolean isMuted() {
+ return mMuted;
+ }
+
+ /**
+ * Get the start time of this audio track relative to the storyboard
+ * timeline.
+ *
+ * @return The start time in milliseconds
+ */
+
+ public long getStartTime() {
+ return mStartTimeMs;
+ }
+
+ /**
+ * Get the audio track duration
+ *
+ * @return The duration in milliseconds. This value represents actual audio
+ * track duration. This value is not effected by 'enableLoop' or
+ * 'setExtractBoundaries'.
+ */
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /**
+ * Get the audio track timeline duration
+ *
+ * @return The timeline duration as defined by the begin and end boundaries
+ */
+ public long getTimelineDuration() {
+ return mTimelineDurationMs;
+ }
+
+ /**
+ * Sets the start and end marks for trimming an audio track
+ *
+ * @param beginMs start time in the audio track in milliseconds (relative to
+ * the beginning of the audio track)
+ * @param endMs end time in the audio track in milliseconds (relative to the
+ * beginning of the audio track)
+ */
+ public void setExtractBoundaries(long beginMs, long endMs) {
+ if (beginMs > mDurationMs) {
+ throw new IllegalArgumentException("Invalid start time");
+ }
+ if (endMs > mDurationMs) {
+ throw new IllegalArgumentException("Invalid end time");
+ }
+ if (beginMs < 0) {
+ throw new IllegalArgumentException("Invalid start time; is < 0");
+ }
+ if (endMs < 0) {
+ throw new IllegalArgumentException("Invalid end time; is < 0");
+ }
+
+ mBeginBoundaryTimeMs = beginMs;
+ mEndBoundaryTimeMs = endMs;
+
+ mTimelineDurationMs = mEndBoundaryTimeMs - mBeginBoundaryTimeMs;
+ /**
+ * Force update of preview settings
+ */
+ mMANativeHelper.setGeneratePreview(true);
+ }
+
+ /**
+ * Get the boundary begin time
+ *
+ * @return The boundary begin time
+ */
+ public long getBoundaryBeginTime() {
+ return mBeginBoundaryTimeMs;
+ }
+
+ /**
+ * Get the boundary end time
+ *
+ * @return The boundary end time
+ */
+ public long getBoundaryEndTime() {
+ return mEndBoundaryTimeMs;
+ }
+
+ /**
+ * Enable the loop mode for this audio track. Note that only one of the
+ * audio tracks in the timeline can have the loop mode enabled. When looping
+ * is enabled the samples between mBeginBoundaryTimeMs and
+ * mEndBoundaryTimeMs are looped.
+ */
+ public void enableLoop() {
+ if (!mLoop) {
+ mLoop = true;
+ /**
+ * Force update of preview settings
+ */
+ mMANativeHelper.setGeneratePreview(true);
+ }
+ }
+
+ /**
+ * Disable the loop mode
+ */
+ public void disableLoop() {
+ if (mLoop) {
+ mLoop = false;
+ /**
+ * Force update of preview settings
+ */
+ mMANativeHelper.setGeneratePreview(true);
+ }
+ }
+
+ /**
+ * Check if looping is enabled
+ *
+ * @return true if looping is enabled
+ */
+ public boolean isLooping() {
+ return mLoop;
+ }
+
+ /**
+ * Disable the audio duck effect
+ */
+ public void disableDucking() {
+ if (mIsDuckingEnabled) {
+ mIsDuckingEnabled = false;
+ /**
+ * Force update of preview settings
+ */
+ mMANativeHelper.setGeneratePreview(true);
+ }
+ }
+
+ /**
+ * Enable ducking by specifying the required parameters
+ *
+ * @param threshold Ducking will be activated when the energy in
+ * the media items audio signal goes above this value. The valid
+ * range of values is 0db to 90dB. 0dB is equivalent to disabling
+ * ducking.
+ * @param duckedTrackVolume The relative volume of the audio track when ducking
+ * is active. The valid range of values is 0 to 100.
+ */
+ public void enableDucking(int threshold, int duckedTrackVolume) {
+ if (threshold < 0 || threshold > 90) {
+ throw new IllegalArgumentException("Invalid threshold value: " + threshold);
+ }
+
+ if (duckedTrackVolume < 0 || duckedTrackVolume > 100) {
+ throw new IllegalArgumentException("Invalid duckedTrackVolume value: "
+ + duckedTrackVolume);
+ }
+
+ mDuckingThreshold = threshold;
+ mDuckedTrackVolume = duckedTrackVolume;
+ mIsDuckingEnabled = true;
+ /**
+ * Force update of preview settings
+ */
+ mMANativeHelper.setGeneratePreview(true);
+ }
+
+ /**
+ * Check if ducking is enabled
+ *
+ * @return true if ducking is enabled
+ */
+ public boolean isDuckingEnabled() {
+ return mIsDuckingEnabled;
+ }
+
+ /**
+ * Get the ducking threshold.
+ *
+ * @return The ducking threshold
+ */
+ public int getDuckingThreshhold() {
+ return mDuckingThreshold;
+ }
+
+ /**
+ * Get the ducked track volume.
+ *
+ * @return The ducked track volume
+ */
+ public int getDuckedTrackVolume() {
+ return mDuckedTrackVolume;
+ }
+
+ /**
+ * This API allows to generate a file containing the sample volume levels of
+ * this audio track object. This function may take significant time and is
+ * blocking. The filename can be retrieved using getAudioWaveformFilename().
+ *
+ * @param listener The progress listener
+ *
+ * @throws IOException if the output file cannot be created
+ * @throws IllegalArgumentException if the audio file does not have a valid
+ * audio track
+ * @throws IllegalStateException if the codec type is unsupported
+ */
+ public void extractAudioWaveform(ExtractAudioWaveformProgressListener listener)
+ throws IOException {
+ if (mAudioWaveformFilename == null) {
+ /**
+ * AudioWaveformFilename is generated
+ */
+ final String projectPath = mMANativeHelper.getProjectPath();
+ final String audioWaveFilename = String.format(projectPath + "/audioWaveformFile-"
+ + getId() + ".dat");
+
+ /**
+ * Logic to get frame duration = (no. of frames per sample * 1000)/
+ * sampling frequency
+ */
+ final int frameDuration;
+ final int sampleCount;
+ final int codecType = mMANativeHelper.getAudioCodecType(mAudioType);
+ switch (codecType) {
+ case MediaProperties.ACODEC_AMRNB: {
+ frameDuration = (MediaProperties.SAMPLES_PER_FRAME_AMRNB * 1000)
+ / MediaProperties.DEFAULT_SAMPLING_FREQUENCY;
+ sampleCount = MediaProperties.SAMPLES_PER_FRAME_AMRNB;
+ break;
+ }
+
+ case MediaProperties.ACODEC_AMRWB: {
+ frameDuration = (MediaProperties.SAMPLES_PER_FRAME_AMRWB * 1000)
+ / MediaProperties.DEFAULT_SAMPLING_FREQUENCY;
+ sampleCount = MediaProperties.SAMPLES_PER_FRAME_AMRWB;
+ break;
+ }
+
+ case MediaProperties.ACODEC_AAC_LC: {
+ frameDuration = (MediaProperties.SAMPLES_PER_FRAME_AAC * 1000)
+ / MediaProperties.DEFAULT_SAMPLING_FREQUENCY;
+ sampleCount = MediaProperties.SAMPLES_PER_FRAME_AAC;
+ break;
+ }
+
+ case MediaProperties.ACODEC_MP3: {
+ frameDuration = (MediaProperties.SAMPLES_PER_FRAME_MP3 * 1000)
+ / MediaProperties.DEFAULT_SAMPLING_FREQUENCY;
+ sampleCount = MediaProperties.SAMPLES_PER_FRAME_MP3;
+ break;
+ }
+
+ default: {
+ throw new IllegalStateException("Unsupported codec type: "
+ + codecType);
+ }
+ }
+
+ mMANativeHelper.generateAudioGraph( mUniqueId,
+ mFilename,
+ audioWaveFilename,
+ frameDuration,
+ MediaProperties.DEFAULT_CHANNEL_COUNT,
+ sampleCount,
+ listener,
+ false);
+ /**
+ * Record the generated file name
+ */
+ mAudioWaveformFilename = audioWaveFilename;
+ }
+ mWaveformData = new SoftReference<WaveformData>(new WaveformData(mAudioWaveformFilename));
+ }
+
+ /**
+ * Get the audio waveform file name if extractAudioWaveform was successful.
+ *
+ * @return the name of the file, null if the file does not exist
+ */
+ String getAudioWaveformFilename() {
+ return mAudioWaveformFilename;
+ }
+
+ /**
+ * Delete the waveform file
+ */
+ void invalidate() {
+ if (mAudioWaveformFilename != null) {
+ new File(mAudioWaveformFilename).delete();
+ mAudioWaveformFilename = null;
+ mWaveformData = null;
+ }
+ }
+
+ /**
+ * Get the audio waveform data.
+ *
+ * @return The waveform data
+ *
+ * @throws IOException if the waveform file cannot be found
+ */
+ public WaveformData getWaveformData() throws IOException {
+ if (mWaveformData == null) {
+ return null;
+ }
+
+ WaveformData waveformData = mWaveformData.get();
+ if (waveformData != null) {
+ return waveformData;
+ } else if (mAudioWaveformFilename != null) {
+ try {
+ waveformData = new WaveformData(mAudioWaveformFilename);
+ } catch (IOException e) {
+ throw e;
+ }
+ mWaveformData = new SoftReference<WaveformData>(waveformData);
+ return waveformData;
+ } else {
+ return null;
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof AudioTrack)) {
+ return false;
+ }
+ return mUniqueId.equals(((AudioTrack)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+}
diff --git a/media/java/android/media/videoeditor/Effect.java b/media/java/android/media/videoeditor/Effect.java
index 8fd0d2711282..3362d47d48a6 100755
--- a/media/java/android/media/videoeditor/Effect.java
+++ b/media/java/android/media/videoeditor/Effect.java
@@ -1,173 +1,197 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-/**
- * This is the super class for all effects. An effect can only be applied to a
- * single media item. If one wants to apply the same effect to multiple media
- * items, multiple @{MediaItem.addEffect(Effect)} call must be invoked on each
- * of the MediaItem objects.
- * {@hide}
- */
-public abstract class Effect {
- // Instance variables
- private final String mUniqueId;
- // The effect owner
- private final MediaItem mMediaItem;
- protected long mDurationMs;
- // The start time of the effect relative to the media item timeline
- protected long mStartTimeMs;
-
- /**
- * Default constructor
- */
- @SuppressWarnings("unused")
- private Effect() {
- mMediaItem = null;
- mUniqueId = null;
- mStartTimeMs = 0;
- mDurationMs = 0;
- }
-
- /**
- * Constructor
- *
- * @param mediaItem The media item owner
- * @param effectId The effect id
- * @param startTimeMs The start time relative to the media item to which it
- * is applied
- * @param durationMs The effect duration in milliseconds
- */
- public Effect(MediaItem mediaItem, String effectId, long startTimeMs, long durationMs) {
- if (mediaItem == null) {
- throw new IllegalArgumentException("Media item cannot be null");
- }
-
- if (startTimeMs + durationMs > mediaItem.getDuration()) {
- throw new IllegalArgumentException("Invalid start time and duration");
- }
-
- mMediaItem = mediaItem;
- mUniqueId = effectId;
- mStartTimeMs = startTimeMs;
- mDurationMs = durationMs;
- }
-
- /**
- * @return The id of the effect
- */
- public String getId() {
- return mUniqueId;
- }
-
- /**
- * Set the duration of the effect. If a preview or export is in progress,
- * then this change is effective for next preview or export session.
- *
- * @param durationMs of the effect in milliseconds
- */
- public void setDuration(long durationMs) {
- if (mStartTimeMs + durationMs > mMediaItem.getDuration()) {
- throw new IllegalArgumentException("Duration is too large");
- }
-
- final long oldDurationMs = mDurationMs;
- mDurationMs = durationMs;
-
- mMediaItem.invalidateTransitions(mStartTimeMs, oldDurationMs, mStartTimeMs, mDurationMs);
- }
-
- /**
- * Get the duration of the effect
- *
- * @return The duration of the effect in milliseconds
- */
- public long getDuration() {
- return mDurationMs;
- }
-
- /**
- * Set start time of the effect. If a preview or export is in progress, then
- * this change is effective for next preview or export session.
- *
- * @param startTimeMs The start time of the effect relative to the beginning
- * of the media item in milliseconds
- */
- public void setStartTime(long startTimeMs) {
- if (startTimeMs + mDurationMs > mMediaItem.getDuration()) {
- throw new IllegalArgumentException("Start time is too large");
- }
-
- final long oldStartTimeMs = mStartTimeMs;
- mStartTimeMs = startTimeMs;
-
- mMediaItem.invalidateTransitions(oldStartTimeMs, mDurationMs, mStartTimeMs, mDurationMs);
- }
-
- /**
- * @return The start time in milliseconds
- */
- public long getStartTime() {
- return mStartTimeMs;
- }
-
- /**
- * Set the start time and duration
- *
- * @param startTimeMs start time in milliseconds
- * @param durationMs The duration in milliseconds
- */
- public void setStartTimeAndDuration(long startTimeMs, long durationMs) {
- if (startTimeMs + durationMs > mMediaItem.getDuration()) {
- throw new IllegalArgumentException("Invalid start time or duration");
- }
-
- final long oldStartTimeMs = mStartTimeMs;
- final long oldDurationMs = mDurationMs;
-
- mStartTimeMs = startTimeMs;
- mDurationMs = durationMs;
-
- mMediaItem.invalidateTransitions(oldStartTimeMs, oldDurationMs, mStartTimeMs, mDurationMs);
- }
-
- /**
- * @return The media item owner
- */
- public MediaItem getMediaItem() {
- return mMediaItem;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public boolean equals(Object object) {
- if (!(object instanceof Effect)) {
- return false;
- }
- return mUniqueId.equals(((Effect)object).mUniqueId);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int hashCode() {
- return mUniqueId.hashCode();
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+/**
+ * This is the super class for all effects. An effect can only be applied to a
+ * single media item.
+ * {@hide}
+ */
+public abstract class Effect {
+ /**
+ * Instance variables
+ */
+ private final String mUniqueId;
+ /**
+ * The effect owner
+ */
+ private final MediaItem mMediaItem;
+
+ protected long mDurationMs;
+ /**
+ * The start time of the effect relative to the beginning
+ * of the media item
+ */
+ protected long mStartTimeMs;
+
+ /**
+ * Default constructor
+ */
+ @SuppressWarnings("unused")
+ private Effect() {
+ mMediaItem = null;
+ mUniqueId = null;
+ mStartTimeMs = 0;
+ mDurationMs = 0;
+ }
+
+ /**
+ * Constructor
+ *
+ * @param mediaItem The media item owner
+ * @param effectId The effect id
+ * @param startTimeMs The start time relative to the media item to which it
+ * is applied
+ * @param durationMs The effect duration in milliseconds
+ */
+ public Effect(MediaItem mediaItem, String effectId, long startTimeMs,
+ long durationMs) {
+ if (mediaItem == null) {
+ throw new IllegalArgumentException("Media item cannot be null");
+ }
+
+ if ((startTimeMs < 0) || (durationMs < 0)) {
+ throw new IllegalArgumentException("Invalid start time Or/And Duration");
+ }
+ if (startTimeMs + durationMs > mediaItem.getDuration()) {
+ throw new IllegalArgumentException("Invalid start time and duration");
+ }
+
+ mMediaItem = mediaItem;
+ mUniqueId = effectId;
+ mStartTimeMs = startTimeMs;
+ mDurationMs = durationMs;
+ }
+
+ /**
+ * Get the id of the effect.
+ *
+ * @return The id of the effect
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * Set the duration of the effect. If a preview or export is in progress,
+ * then this change is effective for next preview or export session.
+ *
+ * @param durationMs of the effect in milliseconds
+ */
+ public void setDuration(long durationMs) {
+ if (durationMs <0) {
+ throw new IllegalArgumentException("Invalid duration");
+ }
+
+ if (mStartTimeMs + durationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Duration is too large");
+ }
+
+ final long oldDurationMs = mDurationMs;
+ mDurationMs = durationMs;
+
+ mMediaItem.invalidateTransitions(mStartTimeMs, oldDurationMs,
+ mStartTimeMs, mDurationMs);
+ }
+
+ /**
+ * Get the duration of the effect
+ *
+ * @return The duration of the effect in milliseconds
+ */
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /**
+ * Set start time of the effect. If a preview or export is in progress, then
+ * this change is effective for next preview or export session.
+ *
+ * @param startTimeMs The start time of the effect relative to the beginning
+ * of the media item in milliseconds
+ */
+ public void setStartTime(long startTimeMs) {
+ if (startTimeMs + mDurationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Start time is too large");
+ }
+
+ final long oldStartTimeMs = mStartTimeMs;
+ mStartTimeMs = startTimeMs;
+
+ mMediaItem.invalidateTransitions(oldStartTimeMs, mDurationMs,
+ mStartTimeMs, mDurationMs);
+ }
+
+ /**
+ * Get the start time of the effect
+ *
+ * @return The start time in milliseconds
+ */
+ public long getStartTime() {
+ return mStartTimeMs;
+ }
+
+ /**
+ * Set the start time and duration
+ *
+ * @param startTimeMs start time in milliseconds
+ * @param durationMs The duration in milliseconds
+ */
+ public void setStartTimeAndDuration(long startTimeMs, long durationMs) {
+ if (startTimeMs + durationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Invalid start time or duration");
+ }
+
+ final long oldStartTimeMs = mStartTimeMs;
+ final long oldDurationMs = mDurationMs;
+
+ mStartTimeMs = startTimeMs;
+ mDurationMs = durationMs;
+
+ mMediaItem.invalidateTransitions(oldStartTimeMs, oldDurationMs,
+ mStartTimeMs, mDurationMs);
+ }
+
+ /**
+ * Get the media item owner.
+ *
+ * @return The media item owner
+ */
+ public MediaItem getMediaItem() {
+ return mMediaItem;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof Effect)) {
+ return false;
+ }
+ return mUniqueId.equals(((Effect)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+}
diff --git a/media/java/android/media/videoeditor/EffectColor.java b/media/java/android/media/videoeditor/EffectColor.java
index ac48e3711d77..6c5ac2dc123c 100755
--- a/media/java/android/media/videoeditor/EffectColor.java
+++ b/media/java/android/media/videoeditor/EffectColor.java
@@ -1,119 +1,142 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-/**
- * This class allows to apply color on a media item.
- * {@hide}
- */
-public class EffectColor extends Effect {
-
- /**
- * Change the video frame color to the RGB color value provided
- */
- public static final int TYPE_COLOR = 1;
- /**
- * Change the video frame color to a gradation from RGB color (at the top of
- * the frame) to black (at the bottom of the frame).
- */
- public static final int TYPE_GRADIENT = 2;
- /**
- * Change the video frame color to sepia
- */
- public static final int TYPE_SEPIA = 3;
- /**
- * Invert the video frame color
- */
- public static final int TYPE_NEGATIVE = 4;
- /**
- * Make the video look like as if it was recorded in 50's
- */
- public static final int TYPE_FIFTIES = 5;
-
- // Predefined colors
- public static final int GREEN = 0x0000ff00;
- public static final int PINK = 0x00ff66cc;
- public static final int GRAY = 0x007f7f7f;
-
- // The effect type
- private final int mType;
-
- // The effect color
- private final int mColor;
-
- /**
- * An object of this type cannot be instantiated by using the default
- * constructor
- */
- @SuppressWarnings("unused")
- private EffectColor() {
- this(null, null, 0, 0, 0, 0);
- }
-
- /**
- * Constructor
- *
- * @param mediaItem The media item owner
- * @param effectId The effect id
- * @param startTimeMs The start time relative to the media item to which it
- * is applied
- * @param durationMs The duration of this effect in milliseconds
- * @param type type of the effect. type is one of: TYPE_COLOR,
- * TYPE_GRADIENT, TYPE_SEPIA, TYPE_NEGATIVE, TYPE_FIFTIES.
- * @param color If type is TYPE_COLOR, color is the RGB color as 888.
- * If type is TYPE_GRADIENT, color is the RGB color at the
- * top of the frame. Otherwise, color is ignored
- */
- public EffectColor(MediaItem mediaItem, String effectId, long startTimeMs, long durationMs,
- int type, int color) {
- super(mediaItem, effectId, startTimeMs, durationMs);
- switch (type) {
- case TYPE_COLOR:
- case TYPE_GRADIENT: {
- mColor = color;
- break;
- }
-
- case TYPE_SEPIA:
- case TYPE_NEGATIVE:
- case TYPE_FIFTIES: {
- mColor = -1;
- break;
- }
-
- default: {
- throw new IllegalArgumentException("Invalid type: " + type);
- }
- }
-
- mType = type;
- }
-
- /**
- * @return The effect type
- */
- public int getType() {
- return mType;
- }
-
- /**
- * @return the color as RGB 888 if type is TYPE_COLOR or TYPE_GRADIENT.
- */
- public int getColor() {
- return mColor;
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+/**
+ * This class allows to apply color effect on a media item.
+ * {@hide}
+ */
+public class EffectColor extends Effect {
+
+ /**
+ * Change the video frame color to the RGB color value provided
+ */
+ public static final int TYPE_COLOR = 1;
+ /**
+ * Change the video frame color to a gradation from RGB color (at the top of
+ * the frame) to black (at the bottom of the frame).
+ */
+ public static final int TYPE_GRADIENT = 2;
+ /**
+ * Change the video frame color to sepia
+ */
+ public static final int TYPE_SEPIA = 3;
+ /**
+ * Invert the video frame color
+ */
+ public static final int TYPE_NEGATIVE = 4;
+ /**
+ * Make the video look like as if it was recorded in 50's
+ */
+ public static final int TYPE_FIFTIES = 5;
+ /**
+ * Change the video frame color to the RGB color value GREEN
+ */
+ public static final int GREEN = 0x0000ff00;
+ /**
+ * Change the video frame color to the RGB color value PINK
+ */
+ public static final int PINK = 0x00ff66cc;
+ /**
+ * Change the video frame color to the RGB color value GRAY
+ */
+ public static final int GRAY = 0x007f7f7f;
+
+ /**
+ * The effect type
+ */
+ private final int mType;
+
+ /**
+ * The effect color
+ */
+ private final int mColor;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private EffectColor() {
+ this(null, null, 0, 0, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param mediaItem The media item owner
+ * @param effectId The effect id
+ * @param startTimeMs The start time relative to the media item to which it
+ * is applied
+ * @param durationMs The duration of this effect in milliseconds
+ * @param type type of the effect. type is one of: TYPE_COLOR,
+ * TYPE_GRADIENT, TYPE_SEPIA, TYPE_NEGATIVE, TYPE_FIFTIES.
+ * @param color If type is TYPE_COLOR, color is the RGB color as 888.
+ * If type is TYPE_GRADIENT, color is the RGB color at the
+ * top of the frame. Otherwise, color is ignored
+ */
+ public EffectColor(MediaItem mediaItem, String effectId, long startTimeMs,
+ long durationMs, int type, int color) {
+ super(mediaItem, effectId, startTimeMs, durationMs);
+ switch (type) {
+ case TYPE_COLOR:
+ case TYPE_GRADIENT: {
+ switch (color) {
+ case GREEN:
+ case PINK:
+ case GRAY:
+ mColor = color;
+ break;
+
+ default:
+ throw new IllegalArgumentException("Invalid Color: " + color);
+ }
+ break;
+ }
+ case TYPE_SEPIA:
+ case TYPE_NEGATIVE:
+ case TYPE_FIFTIES: {
+ mColor = -1;
+ break;
+ }
+
+ default: {
+ throw new IllegalArgumentException("Invalid type: " + type);
+ }
+ }
+ mType = type;
+ }
+
+ /**
+ * Get the effect type.
+ *
+ * @return The effect type
+ */
+ public int getType() {
+ return mType;
+ }
+
+ /**
+ * Get the color if effect type is TYPE_COLOR or TYPE_GRADIENT.
+ *
+ * @return the color as RGB 888 if type is TYPE_COLOR or TYPE_GRADIENT.
+ */
+ public int getColor() {
+ return mColor;
+ }
+}
diff --git a/media/java/android/media/videoeditor/EffectKenBurns.java b/media/java/android/media/videoeditor/EffectKenBurns.java
index ae2e70d7bc71..66c9e86079a3 100755
--- a/media/java/android/media/videoeditor/EffectKenBurns.java
+++ b/media/java/android/media/videoeditor/EffectKenBurns.java
@@ -1,88 +1,128 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import android.graphics.Rect;
-
-/**
- * This class represents a Ken Burns effect.
- * {@hide}
- */
-public class EffectKenBurns extends Effect {
- // Instance variables
- private Rect mStartRect;
- private Rect mEndRect;
-
- /**
- * Objects of this type cannot be instantiated by using the default
- * constructor
- */
- @SuppressWarnings("unused")
- private EffectKenBurns() {
- this(null, null, null, null, 0, 0);
- }
-
- /**
- * Constructor
- *
- * @param mediaItem The media item owner
- * @param effectId The effect id
- * @param startRect The start rectangle
- * @param endRect The end rectangle
- * @param startTimeMs The start time
- * @param durationMs The duration of the Ken Burns effect in milliseconds
- */
- public EffectKenBurns(MediaItem mediaItem, String effectId, Rect startRect, Rect endRect,
- long startTimeMs, long durationMs) {
- super(mediaItem, effectId, startTimeMs, durationMs);
-
- mStartRect = startRect;
- mEndRect = endRect;
- }
-
- /**
- * @param startRect The start rectangle
- *
- * @throws IllegalArgumentException if start rectangle is incorrectly set.
- */
- public void setStartRect(Rect startRect) {
- mStartRect = startRect;
- }
-
- /**
- * @return The start rectangle
- */
- public Rect getStartRect() {
- return mStartRect;
- }
-
- /**
- * @param endRect The end rectangle
- *
- * @throws IllegalArgumentException if end rectangle is incorrectly set.
- */
- public void setEndRect(Rect endRect) {
- mEndRect = endRect;
- }
-
- /**
- * @return The end rectangle
- */
- public Rect getEndRect() {
- return mEndRect;
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import android.graphics.Rect;
+
+/**
+ * This class represents a Ken Burns effect.
+ * {@hide}
+ */
+public class EffectKenBurns extends Effect {
+ /**
+ * Instance variables
+ */
+ private Rect mStartRect;
+ private Rect mEndRect;
+
+ /**
+ * Objects of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private EffectKenBurns() {
+ this(null, null, null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param mediaItem The media item owner
+ * @param effectId The effect id
+ * @param startRect The start rectangle
+ * @param endRect The end rectangle
+ * @param startTimeMs The start time
+ * @param durationMs The duration of the Ken Burns effect in milliseconds
+ */
+ public EffectKenBurns(MediaItem mediaItem, String effectId, Rect startRect,
+ Rect endRect, long startTimeMs, long durationMs) {
+ super(mediaItem, effectId, startTimeMs, durationMs);
+
+ mStartRect = startRect;
+ mEndRect = endRect;
+ }
+
+ /**
+ * Set the start rectangle.
+ *
+ * @param startRect The start rectangle
+ *
+ * @throws IllegalArgumentException if start rectangle is incorrectly set.
+ */
+ public void setStartRect(Rect startRect) {
+ if ( (startRect.left == 0) && (startRect.right == 0)
+ && (startRect.bottom == 0) && (startRect.top == 0) ) {
+ throw new IllegalArgumentException("Invalid Rectangle");
+ }
+
+ mStartRect = startRect;
+ }
+
+ /**
+ * Get the start rectangle.
+ *
+ * @return The start rectangle
+ */
+ public Rect getStartRect() {
+ return mStartRect;
+ }
+
+ /**
+ * Set the end rectangle.
+ *
+ * @param endRect The end rectangle
+ *
+ * @throws IllegalArgumentException if end rectangle is incorrectly set.
+ */
+ public void setEndRect(Rect endRect) {
+ if ( (endRect.left == 0) && (endRect.right == 0)
+ && (endRect.bottom == 0) && (endRect.top == 0) ) {
+ throw new IllegalArgumentException("Invalid Rectangle");
+ }
+
+ mEndRect = endRect;
+ }
+
+ /**
+ * Get the end rectangle.
+ *
+ * @return The end rectangle
+ */
+ public Rect getEndRect() {
+ return mEndRect;
+ }
+
+ /**
+ * Get the KenBurn effect start and end rectangle coordinates
+ * @param start The rect object to be populated with start
+ * rectangle coordinates
+ *
+ * @param end The rect object to be populated with end
+ * rectangle coordinates
+ */
+ void getKenBurnsSettings(Rect start, Rect end) {
+ start.left = getStartRect().left;
+ start.top = getStartRect().top;
+ start.right = getStartRect().right;
+ start.bottom = getStartRect().bottom;
+ end.left = getEndRect().left;
+ end.top = getEndRect().top;
+ end.right = getEndRect().right;
+ end.bottom = getEndRect().bottom;
+ }
+}
diff --git a/media/java/android/media/videoeditor/ExtractAudioWaveformProgressListener.java b/media/java/android/media/videoeditor/ExtractAudioWaveformProgressListener.java
index 1cce148db535..7ba7de3dc8d7 100644..100755
--- a/media/java/android/media/videoeditor/ExtractAudioWaveformProgressListener.java
+++ b/media/java/android/media/videoeditor/ExtractAudioWaveformProgressListener.java
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+
package android.media.videoeditor;
/**
diff --git a/media/java/android/media/videoeditor/MediaArtistNativeHelper.java b/media/java/android/media/videoeditor/MediaArtistNativeHelper.java
new file mode 100755
index 000000000000..c3862e2368fa
--- /dev/null
+++ b/media/java/android/media/videoeditor/MediaArtistNativeHelper.java
@@ -0,0 +1,4028 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import java.io.File;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.nio.IntBuffer;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import android.graphics.Bitmap;
+import android.media.videoeditor.VideoEditor.ExportProgressListener;
+import android.media.videoeditor.VideoEditor.PreviewProgressListener;
+import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
+import android.util.Log;
+import android.util.Pair;
+import android.view.Surface;
+
+/**
+ *This class provide Native methods to be used by MediaArtist {@hide}
+ */
+class MediaArtistNativeHelper {
+
+ static {
+ System.loadLibrary("videoeditor_jni");
+ }
+
+ private final int MAX_THUMBNAIL_PERMITTED = 8;
+
+ private final VideoEditor mVideoEditor;
+
+ public EditSettings mStoryBoardSettings;
+
+ private String mOutputFilename;
+
+ EditSettings mEditSettings = null;
+
+ PreviewClipProperties mClipProperties = null;
+
+ private EditSettings mPreviewEditSettings;
+
+ private AudioSettings mAudioSettings = null;
+
+ private AudioTrack mAudioTrack = null;
+
+ public boolean mInvalidatePreviewArray = true;
+
+ private boolean mRegenerateAudio = true;
+
+ private String mExportFilename = null;
+
+ private boolean mExportDone = false;
+
+ private int mProgressToApp;
+
+
+ public static final int TASK_LOADING_SETTINGS = 1;
+
+ public static final int TASK_ENCODING = 2;
+
+ private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm";
+
+ // Processing indication
+ public static final int PROCESSING_NONE = 0;
+ public static final int PROCESSING_AUDIO_PCM = 1;
+ public static final int PROCESSING_TRANSITION = 2;
+ public static final int PROCESSING_KENBURNS = 3;
+ public static final int PROCESSING_INTERMEDIATE1 = 11;
+ public static final int PROCESSING_INTERMEDIATE2 = 12;
+ public static final int PROCESSING_INTERMEDIATE3 = 13;
+ public static final int PROCESSING_EXPORT = 20;
+
+ private int mProcessingState;
+ private Object mProcessingObject;
+
+ private PreviewProgressListener mPreviewProgressListener;
+ private ExportProgressListener mExportProgressListener;
+ private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener;
+ private MediaProcessingProgressListener mMediaProcessingProgressListener;
+ private final String mProjectPath;
+
+ private long mPreviewProgress;
+
+ private String mAudioTrackPCMFilePath;
+
+ int mTotalClips = 0;
+
+ int mPreviewEffectsSize = 0;
+
+ private boolean mErrorFlagSet = false;
+
+ @SuppressWarnings("unused")
+ private int mManualEditContext;
+
+
+ List<Effect> mMediaEffectList;
+
+ List<Overlay> mMediaOverLayList;
+
+ /* Listeners */
+
+ /**
+ * Interface definition for a listener to be invoked when there is an update
+ * in a running task.
+ */
+ public interface OnProgressUpdateListener {
+ /**
+ * Called when there is an update.
+ *
+ * @param taskId id of the task reporting an update.
+ * @param progress progress of the task [0..100].
+ * @see BasicEdit#TASK_ENCODING
+ */
+ public void OnProgressUpdate(int taskId, int progress);
+ }
+
+ /** Defines the version. */
+ public final class Version {
+
+ /** Major version number */
+ public int major;
+
+ /** Minor version number */
+ public int minor;
+
+ /** Revision number */
+ public int revision;
+
+ /** VIDEOEDITOR major version number */
+ private static final int VIDEOEDITOR_MAJOR_VERSION = 0;
+
+ /** VIDEOEDITOR minor version number */
+ private static final int VIDEOEDITOR_MINOR_VERSION = 0;
+
+ /** VIDEOEDITOR revision number */
+ private static final int VIDEOEDITOR_REVISION_VERSION = 1;
+
+ /** Method which returns the current VIDEOEDITOR version */
+ public Version getVersion() {
+ Version version = new Version();
+
+ version.major = Version.VIDEOEDITOR_MAJOR_VERSION;
+ version.minor = Version.VIDEOEDITOR_MINOR_VERSION;
+ version.revision = Version.VIDEOEDITOR_REVISION_VERSION;
+
+ return version;
+ }
+ }
+
+ /**
+ * Defines output audio formats.
+ */
+ public final class AudioFormat {
+ /** No audio present in output clip. Used to generate video only clip */
+ public static final int NO_AUDIO = 0;
+
+ /** AMR Narrow Band. */
+ public static final int AMR_NB = 1;
+
+ /** Advanced Audio Coding (AAC). */
+ public static final int AAC = 2;
+
+ /** Advanced Audio Codec Plus (HE-AAC v1). */
+ public static final int AAC_PLUS = 3;
+
+ /** Advanced Audio Codec Plus (HE-AAC v2). */
+ public static final int ENHANCED_AAC_PLUS = 4;
+
+ /** MPEG layer 3 (MP3). */
+ public static final int MP3 = 5;
+
+ /** Enhanced Variable RateCodec (EVRC). */
+ public static final int EVRC = 6;
+
+ /** PCM (PCM). */
+ public static final int PCM = 7;
+
+ /** No transcoding. Output audio format is same as input audio format */
+ public static final int NULL_AUDIO = 254;
+
+ /** Unsupported audio format. */
+ public static final int UNSUPPORTED_AUDIO = 255;
+ }
+
+ /**
+ * Defines audio sampling frequencies.
+ */
+ public final class AudioSamplingFrequency {
+ /**
+ * Default sampling frequency. Uses the default frequency for a specific
+ * audio format. For AAC the only supported (and thus default) sampling
+ * frequency is 16 kHz. For this audio format the sampling frequency in
+ * the OutputParams.
+ **/
+ public static final int FREQ_DEFAULT = 0;
+
+ /** Audio sampling frequency of 8000 Hz. */
+ public static final int FREQ_8000 = 8000;
+
+ /** Audio sampling frequency of 11025 Hz. */
+ public static final int FREQ_11025 = 11025;
+
+ /** Audio sampling frequency of 12000 Hz. */
+ public static final int FREQ_12000 = 12000;
+
+ /** Audio sampling frequency of 16000 Hz. */
+ public static final int FREQ_16000 = 16000;
+
+ /** Audio sampling frequency of 22050 Hz. */
+ public static final int FREQ_22050 = 22050;
+
+ /** Audio sampling frequency of 24000 Hz. */
+ public static final int FREQ_24000 = 24000;
+
+ /** Audio sampling frequency of 32000 Hz. */
+ public static final int FREQ_32000 = 32000;
+
+ /** Audio sampling frequency of 44100 Hz. */
+ public static final int FREQ_44100 = 44100;
+
+ /** Audio sampling frequency of 48000 Hz. Not available for output file. */
+ public static final int FREQ_48000 = 48000;
+ }
+
+ /**
+ * Defines the supported fixed audio and video bitrates. These values are
+ * for output audio video only.
+ */
+ public final class Bitrate {
+ /** Variable bitrate. Means no bitrate regulation */
+ public static final int VARIABLE = -1;
+
+ /** An undefined bitrate. */
+ public static final int UNDEFINED = 0;
+
+ /** A bitrate of 9.2 kbits/s. */
+ public static final int BR_9_2_KBPS = 9200;
+
+ /** A bitrate of 12.2 kbits/s. */
+ public static final int BR_12_2_KBPS = 12200;
+
+ /** A bitrate of 16 kbits/s. */
+ public static final int BR_16_KBPS = 16000;
+
+ /** A bitrate of 24 kbits/s. */
+ public static final int BR_24_KBPS = 24000;
+
+ /** A bitrate of 32 kbits/s. */
+ public static final int BR_32_KBPS = 32000;
+
+ /** A bitrate of 48 kbits/s. */
+ public static final int BR_48_KBPS = 48000;
+
+ /** A bitrate of 64 kbits/s. */
+ public static final int BR_64_KBPS = 64000;
+
+ /** A bitrate of 96 kbits/s. */
+ public static final int BR_96_KBPS = 96000;
+
+ /** A bitrate of 128 kbits/s. */
+ public static final int BR_128_KBPS = 128000;
+
+ /** A bitrate of 192 kbits/s. */
+ public static final int BR_192_KBPS = 192000;
+
+ /** A bitrate of 256 kbits/s. */
+ public static final int BR_256_KBPS = 256000;
+
+ /** A bitrate of 288 kbits/s. */
+ public static final int BR_288_KBPS = 288000;
+
+ /** A bitrate of 384 kbits/s. */
+ public static final int BR_384_KBPS = 384000;
+
+ /** A bitrate of 512 kbits/s. */
+ public static final int BR_512_KBPS = 512000;
+
+ /** A bitrate of 800 kbits/s. */
+ public static final int BR_800_KBPS = 800000;
+
+ /** A bitrate of 2 Mbits/s. */
+ public static final int BR_2_MBPS = 2000000;
+
+ /** A bitrate of 5 Mbits/s. */
+ public static final int BR_5_MBPS = 5000000;
+
+ /** A bitrate of 8 Mbits/s. */
+ public static final int BR_8_MBPS = 8000000;
+ }
+
+ /**
+ * Defines all supported file types.
+ */
+ public final class FileType {
+ /** 3GPP file type. */
+ public static final int THREE_GPP = 0;
+
+ /** MP4 file type. */
+ public static final int MP4 = 1;
+
+ /** AMR file type. */
+ public static final int AMR = 2;
+
+ /** MP3 audio file type. */
+ public static final int MP3 = 3;
+
+ /** PCM audio file type. */
+ public static final int PCM = 4;
+
+ /** JPEG image file type. */
+ public static final int JPG = 5;
+
+ /** GIF image file type. */
+ public static final int GIF = 6;
+
+ /** PNG image file type. */
+ public static final int PNG = 7;
+
+ /** Unsupported file type. */
+ public static final int UNSUPPORTED = 255;
+ }
+
+ /**
+ * Defines rendering types. Rendering can only be applied to files
+ * containing video streams.
+ **/
+ public final class MediaRendering {
+ /**
+ * Resize to fit the output video with changing the aspect ratio if
+ * needed.
+ */
+ public static final int RESIZING = 0;
+
+ /**
+ * Crop the input video to fit it with the output video resolution.
+ **/
+ public static final int CROPPING = 1;
+
+ /**
+ * Resize to fit the output video resolution but maintain the aspect
+ * ratio. This framing type adds black borders if needed.
+ */
+ public static final int BLACK_BORDERS = 2;
+ }
+
+ /**
+ * Defines the results.
+ */
+ public final class Result {
+ /** No error. result OK */
+ public static final int NO_ERROR = 0;
+
+ /** File not found */
+ public static final int ERR_FILE_NOT_FOUND = 1;
+
+ /**
+ * In case of UTF8 conversion, the size of the converted path will be
+ * more than the corresponding allocated buffer.
+ */
+ public static final int ERR_BUFFER_OUT_TOO_SMALL = 2;
+
+ /** Invalid file type. */
+ public static final int ERR_INVALID_FILE_TYPE = 3;
+
+ /** Invalid effect kind. */
+ public static final int ERR_INVALID_EFFECT_KIND = 4;
+
+ /** Invalid video effect. */
+ public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5;
+
+ /** Invalid audio effect. */
+ public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6;
+
+ /** Invalid video transition. */
+ public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7;
+
+ /** Invalid audio transition. */
+ public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8;
+
+ /** Invalid encoding frame rate. */
+ public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9;
+
+ /** External effect is called but this function is not set. */
+ public static final int ERR_EXTERNAL_EFFECT_NULL = 10;
+
+ /** External transition is called but this function is not set. */
+ public static final int ERR_EXTERNAL_TRANSITION_NULL = 11;
+
+ /** Begin time cut is larger than the video clip duration. */
+ public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12;
+
+ /** Begin cut time is larger or equal than end cut. */
+ public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13;
+
+ /** Two consecutive transitions are overlapping on one clip. */
+ public static final int ERR_OVERLAPPING_TRANSITIONS = 14;
+
+ /** Internal error, type size mismatch. */
+ public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15;
+
+ /** An input 3GPP file is invalid/corrupted. */
+ public static final int ERR_INVALID_3GPP_FILE = 16;
+
+ /** A file contains an unsupported video format. */
+ public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17;
+
+ /** A file contains an unsupported audio format. */
+ public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18;
+
+ /** A file format is not supported. */
+ public static final int ERR_AMR_EDITING_UNSUPPORTED = 19;
+
+ /** An input clip has an unexpectedly large Video AU. */
+ public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20;
+
+ /** An input clip has an unexpectedly large Audio AU. */
+ public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21;
+
+ /** An input clip has a corrupted Audio AU. */
+ public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22;
+
+ /** The video encoder encountered an Access Unit error. */
+ public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23;
+
+ /** Unsupported video format for Video Editing. */
+ public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24;
+
+ /** Unsupported H263 profile for Video Editing. */
+ public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25;
+
+ /** Unsupported MPEG-4 profile for Video Editing. */
+ public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26;
+
+ /** Unsupported MPEG-4 RVLC tool for Video Editing. */
+ public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27;
+
+ /** Unsupported audio format for Video Editing. */
+ public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28;
+
+ /** File contains no supported stream. */
+ public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29;
+
+ /** File contains no video stream or an unsupported video stream. */
+ public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30;
+
+ /** Internal error, clip analysis version mismatch. */
+ public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31;
+
+ /**
+ * At least one of the clip analysis has been generated on another
+ * platform (WIN32, ARM, etc.).
+ */
+ public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32;
+
+ /** Clips don't have the same video format (H263 or MPEG4). */
+ public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33;
+
+ /** Clips don't have the same frame size. */
+ public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34;
+
+ /** Clips don't have the same MPEG-4 time scale. */
+ public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35;
+
+ /** Clips don't have the same use of MPEG-4 data partitioning. */
+ public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36;
+
+ /** MP3 clips can't be assembled. */
+ public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37;
+
+ /**
+ * The input 3GPP file does not contain any supported audio or video
+ * track.
+ */
+ public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38;
+
+ /**
+ * The Volume of the added audio track (AddVolume) must be strictly
+ * superior than zero.
+ */
+ public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39;
+
+ /**
+ * The time at which an audio track is added can't be higher than the
+ * input video track duration..
+ */
+ public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40;
+
+ /** The audio track file format setting is undefined. */
+ public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41;
+
+ /** The added audio track stream has an unsupported format. */
+ public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42;
+
+ /** The audio mixing feature doesn't support the audio track type. */
+ public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43;
+
+ /** The audio mixing feature doesn't support MP3 audio tracks. */
+ public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44;
+
+ /**
+ * An added audio track limits the available features: uiAddCts must be
+ * 0 and bRemoveOriginal must be true.
+ */
+ public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45;
+
+ /**
+ * An added audio track limits the available features: uiAddCts must be
+ * 0 and bRemoveOriginal must be true.
+ */
+ public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46;
+
+ /** Input audio track is not of a type that can be mixed with output. */
+ public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47;
+
+ /** Input audio track is not AMR-NB, so it can't be mixed with output. */
+ public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48;
+
+ /**
+ * An added EVRC audio track limit the available features: uiAddCts must
+ * be 0 and bRemoveOriginal must be true.
+ */
+ public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49;
+
+ /** H263 profiles other than 0 are not supported. */
+ public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51;
+
+ /** File contains no video stream or an unsupported video stream. */
+ public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52;
+
+ /** Transcoding of the input file(s) is necessary. */
+ public static final int WAR_TRANSCODING_NECESSARY = 53;
+
+ /**
+ * The size of the output file will exceed the maximum configured value.
+ */
+ public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54;
+
+ /** The time scale is too big. */
+ public static final int WAR_TIMESCALE_TOO_BIG = 55;
+
+ /** The year is out of range */
+ public static final int ERR_CLOCK_BAD_REF_YEAR = 56;
+
+ /** The directory could not be opened */
+ public static final int ERR_DIR_OPEN_FAILED = 57;
+
+ /** The directory could not be read */
+ public static final int ERR_DIR_READ_FAILED = 58;
+
+ /** There are no more entries in the current directory */
+ public static final int ERR_DIR_NO_MORE_ENTRY = 59;
+
+ /** The input parameter/s has error */
+ public static final int ERR_PARAMETER = 60;
+
+ /** There is a state machine error */
+ public static final int ERR_STATE = 61;
+
+ /** Memory allocation failed */
+ public static final int ERR_ALLOC = 62;
+
+ /** Context is invalid */
+ public static final int ERR_BAD_CONTEXT = 63;
+
+ /** Context creation failed */
+ public static final int ERR_CONTEXT_FAILED = 64;
+
+ /** Invalid stream ID */
+ public static final int ERR_BAD_STREAM_ID = 65;
+
+ /** Invalid option ID */
+ public static final int ERR_BAD_OPTION_ID = 66;
+
+ /** The option is write only */
+ public static final int ERR_WRITE_ONLY = 67;
+
+ /** The option is read only */
+ public static final int ERR_READ_ONLY = 68;
+
+ /** The feature is not implemented in this version */
+ public static final int ERR_NOT_IMPLEMENTED = 69;
+
+ /** The media type is not supported */
+ public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70;
+
+ /** No data to be encoded */
+ public static final int WAR_NO_DATA_YET = 71;
+
+ /** No data to be decoded */
+ public static final int WAR_NO_MORE_STREAM = 72;
+
+ /** Time stamp is invalid */
+ public static final int WAR_INVALID_TIME = 73;
+
+ /** No more data to be decoded */
+ public static final int WAR_NO_MORE_AU = 74;
+
+ /** Semaphore timed out */
+ public static final int WAR_TIME_OUT = 75;
+
+ /** Memory buffer is full */
+ public static final int WAR_BUFFER_FULL = 76;
+
+ /** Server has asked for redirection */
+ public static final int WAR_REDIRECT = 77;
+
+ /** Too many streams in input */
+ public static final int WAR_TOO_MUCH_STREAMS = 78;
+
+ /** The file cannot be opened/ written into as it is locked */
+ public static final int ERR_FILE_LOCKED = 79;
+
+ /** The file access mode is invalid */
+ public static final int ERR_FILE_BAD_MODE_ACCESS = 80;
+
+ /** The file pointer points to an invalid location */
+ public static final int ERR_FILE_INVALID_POSITION = 81;
+
+ /** Invalid string */
+ public static final int ERR_STR_BAD_STRING = 94;
+
+ /** The input string cannot be converted */
+ public static final int ERR_STR_CONV_FAILED = 95;
+
+ /** The string size is too large */
+ public static final int ERR_STR_OVERFLOW = 96;
+
+ /** Bad string arguments */
+ public static final int ERR_STR_BAD_ARGS = 97;
+
+ /** The string value is larger than maximum size allowed */
+ public static final int WAR_STR_OVERFLOW = 98;
+
+ /** The string value is not present in this comparison operation */
+ public static final int WAR_STR_NOT_FOUND = 99;
+
+ /** The thread is not started */
+ public static final int ERR_THREAD_NOT_STARTED = 100;
+
+ /** Trancoding done warning */
+ public static final int WAR_TRANSCODING_DONE = 101;
+
+ /** Unsupported mediatype */
+ public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102;
+
+ /** Input file contains invalid/unsupported streams */
+ public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103;
+
+ /** Invalid input file */
+ public static final int ERR_INVALID_INPUT_FILE = 104;
+
+ /** Invalid output video format */
+ public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105;
+
+ /** Invalid output video frame size */
+ public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106;
+
+ /** Invalid output video frame rate */
+ public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107;
+
+ /** Invalid output audio format */
+ public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108;
+
+ /** Invalid video frame size for H.263 */
+ public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109;
+
+ /** Invalid video frame rate for H.263 */
+ public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110;
+
+ /** invalid playback duration */
+ public static final int ERR_DURATION_IS_NULL = 111;
+
+ /** Invalid H.263 profile in file */
+ public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112;
+
+ /** Invalid AAC sampling frequency */
+ public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113;
+
+ /** Audio conversion failure */
+ public static final int ERR_AUDIO_CONVERSION_FAILED = 114;
+
+ /** Invalid trim start and end times */
+ public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115;
+
+ /** End time smaller than start time for trim */
+ public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116;
+
+ /** Output file size is small */
+ public static final int ERR_MAXFILESIZE_TOO_SMALL = 117;
+
+ /** Output video bitrate is too low */
+ public static final int ERR_VIDEOBITRATE_TOO_LOW = 118;
+
+ /** Output audio bitrate is too low */
+ public static final int ERR_AUDIOBITRATE_TOO_LOW = 119;
+
+ /** Output video bitrate is too high */
+ public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120;
+
+ /** Output audio bitrate is too high */
+ public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121;
+
+ /** Output file size is too small */
+ public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122;
+
+ /** Unknown stream type */
+ public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123;
+
+ /** Invalid metadata in input stream */
+ public static final int WAR_READER_NO_METADATA = 124;
+
+ /** Invalid file reader info warning */
+ public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125;
+
+ /** Warning to indicate the the writer is being stopped */
+ public static final int WAR_WRITER_STOP_REQ = 131;
+
+ /** Video decoder failed to provide frame for transcoding */
+ public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132;
+
+ /** Video deblocking filter is not implemented */
+ public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133;
+
+ /** H.263 decoder profile not supported */
+ public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134;
+
+ /** The input file contains unsupported H.263 profile */
+ public static final int ERR_DECODER_H263_NOT_BASELINE = 135;
+
+ /** There is no more space to store the output file */
+ public static final int ERR_NOMORE_SPACE_FOR_FILE = 136;
+
+ /** Internal error. */
+ public static final int ERR_INTERNAL = 255;
+
+ }
+
+ /**
+ * Defines output video formats.
+ */
+ public final class VideoFormat {
+ /** No video present in output clip. Used to generate audio only clip */
+ public static final int NO_VIDEO = 0;
+
+ /** H263 baseline format. */
+ public static final int H263 = 1;
+
+ /** MPEG4 video Simple Profile format. */
+ public static final int MPEG4 = 2;
+
+ /** MPEG4 video Simple Profile format with support for EMP. */
+ public static final int MPEG4_EMP = 3;
+
+ /** H264 video */
+ public static final int H264 = 4;
+
+ /** No transcoding. Output video format is same as input video format */
+ public static final int NULL_VIDEO = 254;
+
+ /** Unsupported video format. */
+ public static final int UNSUPPORTED = 255;
+ }
+
+ /** Defines video profiles and levels. */
+ public final class VideoProfile {
+ /** MPEG4, Simple Profile, Level 0. */
+ public static final int MPEG4_SP_LEVEL_0 = 0;
+
+ /** MPEG4, Simple Profile, Level 0B. */
+ public static final int MPEG4_SP_LEVEL_0B = 1;
+
+ /** MPEG4, Simple Profile, Level 1. */
+ public static final int MPEG4_SP_LEVEL_1 = 2;
+
+ /** MPEG4, Simple Profile, Level 2. */
+ public static final int MPEG4_SP_LEVEL_2 = 3;
+
+ /** MPEG4, Simple Profile, Level 3. */
+ public static final int MPEG4_SP_LEVEL_3 = 4;
+
+ /** H263, Profile 0, Level 10. */
+ public static final int H263_PROFILE_0_LEVEL_10 = 5;
+
+ /** H263, Profile 0, Level 20. */
+ public static final int H263_PROFILE_0_LEVEL_20 = 6;
+
+ /** H263, Profile 0, Level 30. */
+ public static final int H263_PROFILE_0_LEVEL_30 = 7;
+
+ /** H263, Profile 0, Level 40. */
+ public static final int H263_PROFILE_0_LEVEL_40 = 8;
+
+ /** H263, Profile 0, Level 45. */
+ public static final int H263_PROFILE_0_LEVEL_45 = 9;
+
+ /** MPEG4, Simple Profile, Level 4A. */
+ public static final int MPEG4_SP_LEVEL_4A = 10;
+
+ /** MPEG4, Simple Profile, Level 0. */
+ public static final int MPEG4_SP_LEVEL_5 = 11;
+
+ /** H264, Profile 0, Level 1. */
+ public static final int H264_PROFILE_0_LEVEL_1 = 12;
+
+ /** H264, Profile 0, Level 1b. */
+ public static final int H264_PROFILE_0_LEVEL_1b = 13;
+
+ /** H264, Profile 0, Level 1.1 */
+ public static final int H264_PROFILE_0_LEVEL_1_1 = 14;
+
+ /** H264, Profile 0, Level 1.2 */
+ public static final int H264_PROFILE_0_LEVEL_1_2 = 15;
+
+ /** H264, Profile 0, Level 1.3 */
+ public static final int H264_PROFILE_0_LEVEL_1_3 = 16;
+
+ /** H264, Profile 0, Level 2. */
+ public static final int H264_PROFILE_0_LEVEL_2 = 17;
+
+ /** H264, Profile 0, Level 2.1 */
+ public static final int H264_PROFILE_0_LEVEL_2_1 = 18;
+
+ /** H264, Profile 0, Level 2.2 */
+ public static final int H264_PROFILE_0_LEVEL_2_2 = 19;
+
+ /** H264, Profile 0, Level 3. */
+ public static final int H264_PROFILE_0_LEVEL_3 = 20;
+
+ /** H264, Profile 0, Level 3.1 */
+ public static final int H264_PROFILE_0_LEVEL_3_1 = 21;
+
+ /** H264, Profile 0, Level 3.2 */
+ public static final int H264_PROFILE_0_LEVEL_3_2 = 22;
+
+ /** H264, Profile 0, Level 4. */
+ public static final int H264_PROFILE_0_LEVEL_4 = 23;
+
+ /** H264, Profile 0, Level 4.1 */
+ public static final int H264_PROFILE_0_LEVEL_4_1 = 24;
+
+ /** H264, Profile 0, Level 4.2 */
+ public static final int H264_PROFILE_0_LEVEL_4_2 = 25;
+
+ /** H264, Profile 0, Level 5. */
+ public static final int H264_PROFILE_0_LEVEL_5 = 26;
+
+ /** H264, Profile 0, Level 5.1 */
+ public static final int H264_PROFILE_0_LEVEL_5_1 = 27;
+
+ /** Profile out of range. */
+ public static final int OUT_OF_RANGE = 255;
+ }
+
+ /** Defines video frame sizes. */
+ public final class VideoFrameSize {
+
+ public static final int SIZE_UNDEFINED = -1;
+
+ /** SQCIF 128 x 96 pixels. */
+ public static final int SQCIF = 0;
+
+ /** QQVGA 160 x 120 pixels. */
+ public static final int QQVGA = 1;
+
+ /** QCIF 176 x 144 pixels. */
+ public static final int QCIF = 2;
+
+ /** QVGA 320 x 240 pixels. */
+ public static final int QVGA = 3;
+
+ /** CIF 352 x 288 pixels. */
+ public static final int CIF = 4;
+
+ /** VGA 640 x 480 pixels. */
+ public static final int VGA = 5;
+
+ /** WVGA 800 X 480 pixels */
+ public static final int WVGA = 6;
+
+ /** NTSC 720 X 480 pixels */
+ public static final int NTSC = 7;
+
+ /** 640 x 360 */
+ public static final int nHD = 8;
+
+ /** 854 x 480 */
+ public static final int WVGA16x9 = 9;
+
+ /** 720p 1280 X 720 */
+ public static final int V720p = 10;
+
+ /** 1080 x 720 */
+ public static final int W720p = 11;
+
+ /** 1080 960 x 720 */
+ public static final int S720p = 12;
+ }
+
+ /**
+ * Defines output video frame rates.
+ */
+ public final class VideoFrameRate {
+ /** Frame rate of 5 frames per second. */
+ public static final int FR_5_FPS = 0;
+
+ /** Frame rate of 7.5 frames per second. */
+ public static final int FR_7_5_FPS = 1;
+
+ /** Frame rate of 10 frames per second. */
+ public static final int FR_10_FPS = 2;
+
+ /** Frame rate of 12.5 frames per second. */
+ public static final int FR_12_5_FPS = 3;
+
+ /** Frame rate of 15 frames per second. */
+ public static final int FR_15_FPS = 4;
+
+ /** Frame rate of 20 frames per second. */
+ public static final int FR_20_FPS = 5;
+
+ /** Frame rate of 25 frames per second. */
+ public static final int FR_25_FPS = 6;
+
+ /** Frame rate of 30 frames per second. */
+ public static final int FR_30_FPS = 7;
+ }
+
+ /**
+ * Defines Video Effect Types.
+ */
+ public static class VideoEffect {
+
+ public static final int NONE = 0;
+
+ public static final int FADE_FROM_BLACK = 8;
+
+ public static final int CURTAIN_OPENING = 9;
+
+ public static final int FADE_TO_BLACK = 16;
+
+ public static final int CURTAIN_CLOSING = 17;
+
+ public static final int EXTERNAL = 256;
+
+ public static final int BLACK_AND_WHITE = 257;
+
+ public static final int PINK = 258;
+
+ public static final int GREEN = 259;
+
+ public static final int SEPIA = 260;
+
+ public static final int NEGATIVE = 261;
+
+ public static final int FRAMING = 262;
+
+ public static final int TEXT = 263;
+
+ public static final int ZOOM_IN = 264;
+
+ public static final int ZOOM_OUT = 265;
+
+ public static final int FIFTIES = 266;
+
+ public static final int COLORRGB16 = 267;
+
+ public static final int GRADIENT = 268;
+ }
+
+ /**
+ * Defines the video transitions.
+ */
+ public static class VideoTransition {
+ /** No transition */
+ public static final int NONE = 0;
+
+ /** Cross fade transition */
+ public static final int CROSS_FADE = 1;
+
+ /** External transition. Currently not available. */
+ public static final int EXTERNAL = 256;
+
+ /** AlphaMagic transition. */
+ public static final int ALPHA_MAGIC = 257;
+
+ /** Slide transition. */
+ public static final int SLIDE_TRANSITION = 258;
+
+ /** Fade to black transition. */
+ public static final int FADE_BLACK = 259;
+ }
+
+ /**
+ * Defines settings for the AlphaMagic transition
+ */
+ public static class AlphaMagicSettings {
+ /** Name of the alpha file (JPEG file). */
+ public String file;
+
+ /** Blending percentage [0..100] 0 = no blending. */
+ public int blendingPercent;
+
+ /** Invert the default rotation direction of the AlphaMagic effect. */
+ public boolean invertRotation;
+
+ public int rgbWidth;
+ public int rgbHeight;
+ }
+
+ /** Defines the direction of the Slide transition. */
+ public static final class SlideDirection {
+
+ /** Right out left in. */
+ public static final int RIGHT_OUT_LEFT_IN = 0;
+
+ /** Left out right in. */
+ public static final int LEFT_OUT_RIGTH_IN = 1;
+
+ /** Top out bottom in. */
+ public static final int TOP_OUT_BOTTOM_IN = 2;
+
+ /** Bottom out top in */
+ public static final int BOTTOM_OUT_TOP_IN = 3;
+ }
+
+ /** Defines the Slide transition settings. */
+ public static class SlideTransitionSettings {
+ /**
+ * Direction of the slide transition. See {@link SlideDirection
+ * SlideDirection} for valid values.
+ */
+ public int direction;
+ }
+
+ /**
+ * Defines the settings of a single clip.
+ */
+ public static class ClipSettings {
+
+ /**
+ * The path to the clip file.
+ * <p>
+ * File format of the clip, it can be:
+ * <ul>
+ * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio
+ * <li>JPG file
+ * </ul>
+ */
+
+ public String clipPath;
+
+ /**
+ * The path of the decoded file. This is used only for image files.
+ */
+ public String clipDecodedPath;
+
+ /**
+ * The path of the Original file. This is used only for image files.
+ */
+ public String clipOriginalPath;
+
+ /**
+ * File type of the clip. See {@link FileType FileType} for valid
+ * values.
+ */
+ public int fileType;
+
+ /** Begin of the cut in the clip in milliseconds. */
+ public int beginCutTime;
+
+ /**
+ * End of the cut in the clip in milliseconds. Set both
+ * <code>beginCutTime</code> and <code>endCutTime</code> to
+ * <code>0</code> to get the full length of the clip without a cut. In
+ * case of JPG clip, this is the duration of the JPEG file.
+ */
+ public int endCutTime;
+
+ /**
+ * Begin of the cut in the clip in percentage of the file duration.
+ */
+ public int beginCutPercent;
+
+ /**
+ * End of the cut in the clip in percentage of the file duration. Set
+ * both <code>beginCutPercent</code> and <code>endCutPercent</code> to
+ * <code>0</code> to get the full length of the clip without a cut.
+ */
+ public int endCutPercent;
+
+ /** Enable panning and zooming. */
+ public boolean panZoomEnabled;
+
+ /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
+ public int panZoomPercentStart;
+
+ /** Top left X coordinate at start of clip. */
+ public int panZoomTopLeftXStart;
+
+ /** Top left Y coordinate at start of clip. */
+ public int panZoomTopLeftYStart;
+
+ /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
+ public int panZoomPercentEnd;
+
+ /** Top left X coordinate at end of clip. */
+ public int panZoomTopLeftXEnd;
+
+ /** Top left Y coordinate at end of clip. */
+ public int panZoomTopLeftYEnd;
+
+ /**
+ * Set The media rendering. See {@link MediaRendering MediaRendering}
+ * for valid values.
+ */
+ public int mediaRendering;
+
+ /**
+ * RGB width and Height
+ */
+ public int rgbWidth;
+ public int rgbHeight;
+ }
+
+ /**
+ * Defines settings for a transition.
+ */
+ public static class TransitionSettings {
+
+ /** Duration of the transition in msec. */
+ public int duration;
+
+ /**
+ * Transition type for video. See {@link VideoTransition
+ * VideoTransition} for valid values.
+ */
+ public int videoTransitionType;
+
+ /**
+ * Transition type for audio. See {@link AudioTransition
+ * AudioTransition} for valid values.
+ */
+ public int audioTransitionType;
+
+ /**
+ * Transition behaviour. See {@link TransitionBehaviour
+ * TransitionBehaviour} for valid values.
+ */
+ public int transitionBehaviour;
+
+ /**
+ * Settings for AlphaMagic transition. Only needs to be set if
+ * <code>videoTransitionType</code> is set to
+ * <code>VideoTransition.ALPHA_MAGIC</code>. See
+ * {@link AlphaMagicSettings AlphaMagicSettings}.
+ */
+ public AlphaMagicSettings alphaSettings;
+
+ /**
+ * Settings for the Slide transition. See
+ * {@link SlideTransitionSettings SlideTransitionSettings}.
+ */
+ public SlideTransitionSettings slideSettings;
+ }
+
+ public static final class AudioTransition {
+ /** No audio transition. */
+ public static final int NONE = 0;
+
+ /** Cross-fade audio transition. */
+ public static final int CROSS_FADE = 1;
+ }
+
+ /**
+ * Defines transition behaviours.
+ **/
+
+ public static final class TransitionBehaviour {
+
+ /** The transition uses an increasing speed. */
+ public static final int SPEED_UP = 0;
+
+ /** The transition uses a linear (constant) speed. */
+ public static final int LINEAR = 1;
+
+ /** The transition uses a decreasing speed. */
+ public static final int SPEED_DOWN = 2;
+
+ /**
+ * The transition uses a constant speed, but slows down in the middle
+ * section.
+ */
+ public static final int SLOW_MIDDLE = 3;
+
+ /**
+ * The transition uses a constant speed, but increases speed in the
+ * middle section.
+ */
+ public static final int FAST_MIDDLE = 4;
+ }
+
+ /** Defines settings for the background music. */
+ public static class BackgroundMusicSettings {
+
+ /** Background music file. */
+ public String file;
+
+ /** File type. See {@link FileType FileType} for valid values. */
+ public int fileType;
+
+ /**
+ * Insertion time in milliseconds, in the output video where the
+ * background music must be inserted.
+ */
+ public long insertionTime;
+
+ /**
+ * Volume, as a percentage of the background music track, to use. If
+ * this field is set to 100, the background music will replace the audio
+ * from the video input file(s).
+ */
+ public int volumePercent;
+
+ /**
+ * Start time in milliseconds in the background muisc file from where
+ * the background music should loop. Set both <code>beginLoop</code> and
+ * <code>endLoop</code> to <code>0</code> to disable looping.
+ */
+ public long beginLoop;
+
+ /**
+ * End time in milliseconds in the background music file to where the
+ * background music should loop. Set both <code>beginLoop</code> and
+ * <code>endLoop</code> to <code>0</code> to disable looping.
+ */
+ public long endLoop;
+
+ public boolean enableDucking;
+
+ public int duckingThreshold;
+
+ public int lowVolume;
+
+ public boolean isLooping;
+
+ }
+
+ /** Defines settings for an effect. */
+ public static class AudioEffect {
+ /** No audio effect. */
+ public static final int NONE = 0;
+
+ /** Fade-in effect. */
+ public static final int FADE_IN = 8;
+
+ /** Fade-out effect. */
+ public static final int FADE_OUT = 16;
+ }
+
+ /** Defines the effect settings. */
+ public static class EffectSettings {
+
+ /** Start time of the effect in milliseconds. */
+ public int startTime;
+
+ /** Duration of the effect in milliseconds. */
+ public int duration;
+
+ /**
+ * Video effect type. See {@link VideoEffect VideoEffect} for valid
+ * values.
+ */
+ public int videoEffectType;
+
+ /**
+ * Audio effect type. See {@link AudioEffect AudioEffect} for valid
+ * values.
+ */
+ public int audioEffectType;
+
+ /**
+ * Start time of the effect in percents of the duration of the clip. A
+ * value of 0 percent means start time is from the beginning of the
+ * clip.
+ */
+ public int startPercent;
+
+ /**
+ * Duration of the effect in percents of the duration of the clip.
+ */
+ public int durationPercent;
+
+ /**
+ * Framing file.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
+ * this field is ignored.
+ */
+ public String framingFile;
+
+ /**
+ * Framing buffer.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
+ * this field is ignored.
+ */
+ public int[] framingBuffer;
+
+ /**
+ * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6);
+ **/
+
+ public int bitmapType;
+
+ public int width;
+
+ public int height;
+
+ /**
+ * Top left x coordinate. This coordinate is used to set the x
+ * coordinate of the picture in the framing file when the framing file
+ * is selected. The x coordinate is also used to set the location of the
+ * text in the text effect.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
+ * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
+ * ignored.
+ */
+ public int topLeftX;
+
+ /**
+ * Top left y coordinate. This coordinate is used to set the y
+ * coordinate of the picture in the framing file when the framing file
+ * is selected. The y coordinate is also used to set the location of the
+ * text in the text effect.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
+ * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
+ * ignored.
+ */
+ public int topLeftY;
+
+ /**
+ * Should the frame be resized or not. If this field is set to
+ * <link>true</code> then the frame size is matched with the output
+ * video size.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
+ * this field is ignored.
+ */
+ public boolean framingResize;
+
+ /**
+ * Size to which the framing buffer needs to be resized to
+ * This is valid only if framingResize is true
+ */
+ public int framingScaledSize;
+ /**
+ * Text to insert in the video.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
+ * field is ignored.
+ */
+ public String text;
+
+ /**
+ * Text attributes for the text to insert in the video.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
+ * field is ignored. For more details about this field see the
+ * integration guide.
+ */
+ public String textRenderingData;
+
+ /** Width of the text buffer in pixels. */
+ public int textBufferWidth;
+
+ /** Height of the text buffer in pixels. */
+ public int textBufferHeight;
+
+ /**
+ * Processing rate for the fifties effect. A high value (e.g. 30)
+ * results in high effect strength.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise
+ * this field is ignored.
+ */
+ public int fiftiesFrameRate;
+
+ /**
+ * RGB 16 color of the RGB16 and gradient color effect.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or
+ * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this
+ * field is ignored.
+ */
+ public int rgb16InputColor;
+
+ /**
+ * Start alpha blending percentage.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
+ * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
+ * is ignored.
+ */
+ public int alphaBlendingStartPercent;
+
+ /**
+ * Middle alpha blending percentage.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
+ * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
+ * is ignored.
+ */
+ public int alphaBlendingMiddlePercent;
+
+ /**
+ * End alpha blending percentage.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
+ * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
+ * is ignored.
+ */
+ public int alphaBlendingEndPercent;
+
+ /**
+ * Duration, in percentage of effect duration of the fade-in phase.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
+ * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
+ * is ignored.
+ */
+ public int alphaBlendingFadeInTimePercent;
+
+ /**
+ * Duration, in percentage of effect duration of the fade-out phase.
+ * <p>
+ * This field is only used when the field <code>videoEffectType</code>
+ * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
+ * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
+ * is ignored.
+ */
+ public int alphaBlendingFadeOutTimePercent;
+ }
+
+ /** Defines the clip properties for preview */
+ public static class PreviewClips {
+
+ /**
+ * The path to the clip file.
+ * <p>
+ * File format of the clip, it can be:
+ * <ul>
+ * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio
+ * <li>JPG file
+ * </ul>
+ */
+
+ public String clipPath;
+
+ /**
+ * File type of the clip. See {@link FileType FileType} for valid
+ * values.
+ */
+ public int fileType;
+
+ /** Begin of the cut in the clip in milliseconds. */
+ public long beginPlayTime;
+
+ public long endPlayTime;
+
+ /**
+ * Set The media rendering. See {@link MediaRendering MediaRendering}
+ * for valid values.
+ */
+ public int mediaRendering;
+
+ }
+
+ /** Defines the audio settings. */
+ public static class AudioSettings {
+
+ String pFile;
+
+ /** < PCM file path */
+ String Id;
+
+ boolean bRemoveOriginal;
+
+ /** < If true, the original audio track is not taken into account */
+ int channels;
+
+ /** < Number of channels (1=mono, 2=stereo) of BGM clip */
+ int Fs;
+
+ /**
+ * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of
+ * BGM clip
+ */
+ int ExtendedFs;
+
+ /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */
+ long startMs;
+
+ /** < Time, in milliseconds, at which the added audio track is inserted */
+ long beginCutTime;
+
+ long endCutTime;
+
+ int fileType;
+
+ int volume;
+
+ /** < Volume, in percentage, of the added audio track */
+ boolean loop;
+
+ /** < Looping on/off > **/
+
+ /** Audio mix and Duck **/
+ int ducking_threshold;
+
+ int ducking_lowVolume;
+
+ boolean bInDucking_enable;
+
+ String pcmFilePath;
+
+ }
+
+ /** Encapsulates preview clips and effect settings */
+ public static class PreviewSettings {
+
+ public PreviewClips[] previewClipsArray;
+
+ /** The effect settings. */
+ public EffectSettings[] effectSettingsArray;
+
+ }
+
+ /** Encapsulates clip properties */
+ public static class PreviewClipProperties {
+
+ public Properties[] clipProperties;
+
+ }
+
+ /** Defines the editing settings. */
+ public static class EditSettings {
+
+ /**
+ * Array of clip settings. There is one <code>clipSetting</code> for
+ * each clip.
+ */
+ public ClipSettings[] clipSettingsArray;
+
+ /**
+ * Array of transition settings. If there are n clips (and thus n
+ * <code>clipSettings</code>) then there are (n-1) transitions and (n-1)
+ * <code>transistionSettings</code> in
+ * <code>transistionSettingsArray</code>.
+ */
+ public TransitionSettings[] transitionSettingsArray;
+
+ /** The effect settings. */
+ public EffectSettings[] effectSettingsArray;
+
+ /**
+ * Video frame rate of the output clip. See {@link VideoFrameRate
+ * VideoFrameRate} for valid values.
+ */
+ public int videoFrameRate;
+
+ /** Output file name. Must be an absolute path. */
+ public String outputFile;
+
+ /**
+ * Size of the video frames in the output clip. See
+ * {@link VideoFrameSize VideoFrameSize} for valid values.
+ */
+ public int videoFrameSize;
+
+ /**
+ * Format of the video stream in the output clip. See
+ * {@link VideoFormat VideoFormat} for valid values.
+ */
+ public int videoFormat;
+
+ /**
+ * Format of the audio stream in the output clip. See
+ * {@link AudioFormat AudioFormat} for valid values.
+ */
+ public int audioFormat;
+
+ /**
+ * Sampling frequency of the audio stream in the output clip. See
+ * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid
+ * values.
+ */
+ public int audioSamplingFreq;
+
+ /**
+ * Maximum file size. By setting this you can set the maximum size of
+ * the output clip. Set it to <code>0</code> to let the class ignore
+ * this filed.
+ */
+ public int maxFileSize;
+
+ /**
+ * Number of audio channels in output clip. Use <code>0</code> for none,
+ * <code>1</code> for mono or <code>2</code> for stereo. None is only
+ * allowed when the <code>audioFormat</code> field is set to
+ * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or
+ * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only
+ * allowed when the <code>audioFormat</code> field is set to
+ * {@link AudioFormat#AAC AudioFormat.AAC}
+ */
+ public int audioChannels;
+
+ /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */
+ public int videoBitrate;
+
+ /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */
+ public int audioBitrate;
+
+ /**
+ * Background music settings. See {@link BackgroundMusicSettings
+ * BackgroundMusicSettings} for valid values.
+ */
+ public BackgroundMusicSettings backgroundMusicSettings;
+
+ public int primaryTrackVolume;
+
+ }
+
+ /**
+ * Defines the media properties.
+ **/
+
+ public static class Properties {
+
+ /**
+ * Duration of the media in milliseconds.
+ */
+
+ public int duration;
+
+ /**
+ * File type.
+ */
+
+ public int fileType;
+
+ /**
+ * Video format.
+ */
+
+ public int videoFormat;
+
+ /**
+ * Duration of the video stream of the media in milliseconds.
+ */
+
+ public int videoDuration;
+
+ /**
+ * Bitrate of the video stream of the media.
+ */
+
+ public int videoBitrate;
+
+ /**
+ * Width of the video frames or the width of the still picture in
+ * pixels.
+ */
+
+ public int width;
+
+ /**
+ * Height of the video frames or the height of the still picture in
+ * pixels.
+ */
+
+ public int height;
+
+ /**
+ * Average frame rate of video in the media in frames per second.
+ */
+
+ public float averageFrameRate;
+
+ /**
+ * Profile and level of the video in the media.
+ */
+
+ public int profileAndLevel;
+
+ /**
+ * Audio format.
+ */
+
+ public int audioFormat;
+
+ /**
+ * Duration of the audio stream of the media in milliseconds.
+ */
+
+ public int audioDuration;
+
+ /**
+ * Bitrate of the audio stream of the media.
+ */
+
+ public int audioBitrate;
+
+ /**
+ * Number of audio channels in the media.
+ */
+
+ public int audioChannels;
+
+ /**
+ * Sampling frequency of the audio stream in the media in samples per
+ * second.
+ */
+
+ public int audioSamplingFrequency;
+
+ /**
+ * Volume value of the audio track as percentage.
+ */
+ public int audioVolumeValue;
+
+ public String Id;
+
+ }
+
+ /**
+ * Constructor
+ *
+ * @param projectPath The path where the VideoEditor stores all files
+ * related to the project
+ * @param veObj The video editor reference
+ */
+ public MediaArtistNativeHelper(String projectPath, VideoEditor veObj) {
+ mProjectPath = projectPath;
+ if (veObj != null) {
+ mVideoEditor = veObj;
+ } else {
+ mVideoEditor = null;
+ throw new IllegalArgumentException("video editor object is null");
+ }
+ if (mStoryBoardSettings == null)
+ mStoryBoardSettings = new EditSettings();
+
+ mMediaEffectList = new ArrayList<Effect>();
+ mMediaOverLayList = new ArrayList<Overlay>();
+ _init(mProjectPath, "null");
+ mAudioTrackPCMFilePath = null;
+ }
+
+ /**
+ * @return The project path
+ */
+ String getProjectPath() {
+ return mProjectPath;
+ }
+
+ /**
+ * @return The Audio Track PCM file path
+ */
+ String getProjectAudioTrackPCMFilePath() {
+ return mAudioTrackPCMFilePath;
+ }
+
+ /**
+ * Invalidates the PCM file
+ */
+ void invalidatePcmFile() {
+ if (mAudioTrackPCMFilePath != null) {
+ new File(mAudioTrackPCMFilePath).delete();
+ mAudioTrackPCMFilePath = null;
+ }
+ }
+
+ @SuppressWarnings("unused")
+ private void onProgressUpdate(int taskId, int progress) {
+ if (mProcessingState == PROCESSING_EXPORT) {
+ if (mExportProgressListener != null) {
+ if ((progress % 2) == 0) {
+ mProgressToApp++;
+ mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, mProgressToApp);
+ }
+ }
+ }
+ else {
+ // Adapt progress depending on current state
+ int actualProgress = 0;
+ int action = 0;
+
+ if (mProcessingState == PROCESSING_AUDIO_PCM) {
+ action = MediaProcessingProgressListener.ACTION_DECODE;
+ } else {
+ action = MediaProcessingProgressListener.ACTION_ENCODE;
+ }
+
+ switch (mProcessingState) {
+ case PROCESSING_AUDIO_PCM:
+ actualProgress = progress;
+ break;
+ case PROCESSING_TRANSITION:
+ actualProgress = progress;
+ break;
+ case PROCESSING_KENBURNS:
+ actualProgress = progress;
+ break;
+ case PROCESSING_INTERMEDIATE1:
+ if ((progress == 0) && (mProgressToApp != 0)) {
+ mProgressToApp = 0;
+ }
+ if ((progress != 0) || (mProgressToApp != 0)) {
+ actualProgress = progress/4;
+ }
+ break;
+ case PROCESSING_INTERMEDIATE2:
+ if ((progress != 0) || (mProgressToApp != 0)) {
+ actualProgress = 25 + progress/4;
+ }
+ break;
+ case PROCESSING_INTERMEDIATE3:
+ if ((progress != 0) || (mProgressToApp != 0)) {
+ actualProgress = 50 + progress/2;
+ }
+ break;
+ case PROCESSING_NONE:
+
+ default:
+ Log.e("MediaArtistNativeHelper", "ERROR unexpected State=" + mProcessingState);
+ return;
+ }
+ if ((mProgressToApp != actualProgress) && (actualProgress != 0)) {
+
+ mProgressToApp = actualProgress;
+
+ if (mMediaProcessingProgressListener != null) {
+ // Send the progress indication
+ mMediaProcessingProgressListener.onProgress(mProcessingObject,
+ action,
+ actualProgress);
+ }
+ }
+ /* avoid 0 in next intermediate call */
+ if (mProgressToApp == 0) {
+ if (mMediaProcessingProgressListener != null) {
+ /*
+ * Send the progress indication
+ */
+ mMediaProcessingProgressListener.onProgress(mProcessingObject,
+ action,
+ actualProgress);
+ }
+ mProgressToApp = 1;
+ }
+ }
+ }
+
+ @SuppressWarnings("unused")
+ private void onPreviewProgressUpdate(int progress, boolean isFinished) {
+ if (mPreviewProgressListener != null) {
+ mPreviewProgressListener.onProgress(mVideoEditor, progress, isFinished);
+ mPreviewProgress = progress;
+ }
+ }
+
+ /**
+ * Release the native helper object
+ */
+ public void releaseNativeHelper() {
+ try {
+ release();
+ } catch (IllegalStateException ex) {
+ Log.e("MediaArtistNativeHelper",
+ "Illegal State exeption caught in releaseNativeHelper");
+ throw ex;
+ } catch (RuntimeException ex) {
+ Log.e("MediaArtistNativeHelper", "Runtime exeption caught in releaseNativeHelper");
+ throw ex;
+ }
+ }
+
+ /**
+ * Release the native helper to end the Audio Graph process
+ */
+ @SuppressWarnings("unused")
+ private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) {
+
+ if ((mExtractAudioWaveformProgressListener != null) && (progress > 0))
+ {
+ mExtractAudioWaveformProgressListener.onProgress(progress);
+ }
+ }
+
+ /**
+ * Populates the Effect Settings in EffectSettings
+ *
+ * @param effects The reference of EffectColor
+ *
+ * @return The populated effect settings in EffectSettings
+ * reference
+ */
+ EffectSettings getEffectSettings(EffectColor effects) {
+ EffectSettings effectSettings = new EffectSettings();
+ effectSettings.startTime = (int)effects.getStartTime();
+ effectSettings.duration = (int)effects.getDuration();
+ effectSettings.videoEffectType = getEffectColorType(effects);
+ effectSettings.audioEffectType = 0;
+ effectSettings.startPercent = 0;
+ effectSettings.durationPercent = 0;
+ effectSettings.framingFile = null;
+ effectSettings.topLeftX = 0;
+ effectSettings.topLeftY = 0;
+ effectSettings.framingResize = false;
+ effectSettings.text = null;
+ effectSettings.textRenderingData = null;
+ effectSettings.textBufferWidth = 0;
+ effectSettings.textBufferHeight = 0;
+ if (effects.getType() == EffectColor.TYPE_FIFTIES) {
+ effectSettings.fiftiesFrameRate = 15;
+ } else {
+ effectSettings.fiftiesFrameRate = 0;
+ }
+
+ if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16)
+ || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) {
+ effectSettings.rgb16InputColor = effects.getColor();
+ }
+
+ effectSettings.alphaBlendingStartPercent = 0;
+ effectSettings.alphaBlendingMiddlePercent = 0;
+ effectSettings.alphaBlendingEndPercent = 0;
+ effectSettings.alphaBlendingFadeInTimePercent = 0;
+ effectSettings.alphaBlendingFadeOutTimePercent = 0;
+ return effectSettings;
+ }
+
+ /**
+ * Populates the Overlay Settings in EffectSettings
+ *
+ * @param overlay The reference of OverlayFrame
+ *
+ * @return The populated overlay settings in EffectSettings
+ * reference
+ */
+ EffectSettings getOverlaySettings(OverlayFrame overlay) {
+ EffectSettings effectSettings = new EffectSettings();
+ Bitmap bitmap = null;
+
+ effectSettings.startTime = (int)overlay.getStartTime();
+ effectSettings.duration = (int)overlay.getDuration();
+ effectSettings.videoEffectType = VideoEffect.FRAMING;
+ effectSettings.audioEffectType = 0;
+ effectSettings.startPercent = 0;
+ effectSettings.durationPercent = 0;
+ effectSettings.framingFile = null;
+
+ if ((bitmap = overlay.getBitmap()) != null) {
+ effectSettings.framingFile = overlay.getFilename();
+
+ if (effectSettings.framingFile == null) {
+ try {
+ (overlay).save(mProjectPath);
+ } catch (IOException e) {
+ Log.e("MediaArtistNativeHelper","getOverlaySettings : File not found");
+ }
+ effectSettings.framingFile = overlay.getFilename();
+ }
+ if (bitmap.getConfig() == Bitmap.Config.ARGB_8888)
+ effectSettings.bitmapType = 6;
+ else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444)
+ effectSettings.bitmapType = 5;
+ else if (bitmap.getConfig() == Bitmap.Config.RGB_565)
+ effectSettings.bitmapType = 4;
+ else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8)
+ throw new RuntimeException("Bitmap config not supported");
+
+ effectSettings.width = bitmap.getWidth();
+ effectSettings.height = bitmap.getHeight();
+ effectSettings.framingBuffer = new int[effectSettings.width];
+ int tmp = 0;
+ short maxAlpha = 0;
+ short minAlpha = (short)0xFF;
+ short alpha = 0;
+ while (tmp < effectSettings.height) {
+ bitmap.getPixels(effectSettings.framingBuffer, 0,
+ effectSettings.width, 0, tmp,
+ effectSettings.width, 1);
+ for (int i = 0; i < effectSettings.width; i++) {
+ alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF);
+ if (alpha > maxAlpha) {
+ maxAlpha = alpha;
+ }
+ if (alpha < minAlpha) {
+ minAlpha = alpha;
+ }
+ }
+ tmp += 1;
+ }
+ alpha = (short)((maxAlpha + minAlpha) / 2);
+ alpha = (short)((alpha * 100) / 256);
+ effectSettings.alphaBlendingEndPercent = alpha;
+ effectSettings.alphaBlendingMiddlePercent = alpha;
+ effectSettings.alphaBlendingStartPercent = alpha;
+ effectSettings.alphaBlendingFadeInTimePercent = 100;
+ effectSettings.alphaBlendingFadeOutTimePercent = 100;
+ effectSettings.framingBuffer = null;
+ }
+
+ effectSettings.topLeftX = 0;
+ effectSettings.topLeftY = 0;
+
+ effectSettings.framingResize = true;
+ effectSettings.text = null;
+ effectSettings.textRenderingData = null;
+ effectSettings.textBufferWidth = 0;
+ effectSettings.textBufferHeight = 0;
+ effectSettings.fiftiesFrameRate = 0;
+ effectSettings.rgb16InputColor = 0;
+ int mediaItemHeight;
+ int aspectRatio;
+ if (overlay.getMediaItem() instanceof MediaImageItem) {
+ if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) {
+ //Kenburns was applied
+ mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight();
+ aspectRatio = getAspectRatio(
+ ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth()
+ , mediaItemHeight);
+ }
+ else {
+ //For image get the scaled height. Aspect ratio would remain the same
+ mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight();
+ aspectRatio = overlay.getMediaItem().getAspectRatio();
+ effectSettings.framingResize = false; //since the image can be of odd size.
+ }
+ } else {
+ aspectRatio = overlay.getMediaItem().getAspectRatio();
+ mediaItemHeight = overlay.getMediaItem().getHeight();
+ }
+ effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight);
+ return effectSettings;
+ }
+
+ /**
+ * Sets the audio regenerate flag
+ *
+ * @param flag The boolean to set the audio regenerate flag
+ *
+ */
+ void setAudioflag(boolean flag) {
+ //check if the file exists.
+ if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) {
+ flag = true;
+ }
+ mRegenerateAudio = flag;
+ }
+
+ /**
+ * Gets the audio regenerate flag
+ *
+ * @param return The boolean to get the audio regenerate flag
+ *
+ */
+ boolean getAudioflag() {
+ return mRegenerateAudio;
+ }
+
+ /**
+ * Maps the average frame rate to one of the defined enum values
+ *
+ * @param averageFrameRate The average frame rate of video item
+ *
+ * @return The frame rate from one of the defined enum values
+ */
+ public int GetClosestVideoFrameRate(int averageFrameRate) {
+ if (averageFrameRate >= 25) {
+ return VideoFrameRate.FR_30_FPS;
+ } else if (averageFrameRate >= 20) {
+ return VideoFrameRate.FR_25_FPS;
+ } else if (averageFrameRate >= 15) {
+ return VideoFrameRate.FR_20_FPS;
+ } else if (averageFrameRate >= 12) {
+ return VideoFrameRate.FR_15_FPS;
+ } else if (averageFrameRate >= 10) {
+ return VideoFrameRate.FR_12_5_FPS;
+ } else if (averageFrameRate >= 7) {
+ return VideoFrameRate.FR_10_FPS;
+ } else if (averageFrameRate >= 5) {
+ return VideoFrameRate.FR_7_5_FPS;
+ } else {
+ return -1;
+ }
+ }
+
+ /**
+ * Helper function to adjust the effect or overlay start time
+ * depending on the begin and end boundary time of meddia item
+ */
+ public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect,
+ int beginCutTime,
+ int endCutTime) {
+
+ int effectStartTime = 0;
+ int effectDuration = 0;
+
+ /**
+ * cbct -> clip begin cut time
+ * cect -> clip end cut time
+ ****************************************
+ * | |
+ * | cbct cect |
+ * | <-1--> | | |
+ * | <--|-2-> | |
+ * | | <---3---> | |
+ * | | <--|-4---> |
+ * | | | <--5--> |
+ * | <---|------6----|----> |
+ * | |
+ * < : effectStart
+ * > : effectStart + effectDuration
+ ****************************************
+ **/
+
+ /** 1 & 5 */
+ /**
+ * Effect falls out side the trim duration. In such a case effects shall
+ * not be applied.
+ */
+ if ((lEffect.startTime > endCutTime)
+ || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) {
+
+ effectStartTime = 0;
+ effectDuration = 0;
+
+ lEffect.startTime = effectStartTime;
+ lEffect.duration = effectDuration;
+ return;
+ }
+
+ /** 2 */
+ if ((lEffect.startTime < beginCutTime)
+ && ((lEffect.startTime + lEffect.duration) > beginCutTime)
+ && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
+ effectStartTime = 0;
+ effectDuration = lEffect.duration;
+
+ effectDuration -= (beginCutTime - lEffect.startTime);
+ lEffect.startTime = effectStartTime;
+ lEffect.duration = effectDuration;
+ return;
+ }
+
+ /** 3 */
+ if ((lEffect.startTime >= beginCutTime)
+ && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
+ effectStartTime = lEffect.startTime - beginCutTime;
+ lEffect.startTime = effectStartTime;
+ lEffect.duration = lEffect.duration;
+ return;
+ }
+
+ /** 4 */
+ if ((lEffect.startTime >= beginCutTime)
+ && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
+ effectStartTime = lEffect.startTime - beginCutTime;
+ effectDuration = endCutTime - lEffect.startTime;
+ lEffect.startTime = effectStartTime;
+ lEffect.duration = effectDuration;
+ return;
+ }
+
+ /** 6 */
+ if ((lEffect.startTime < beginCutTime)
+ && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
+ effectStartTime = 0;
+ effectDuration = endCutTime - beginCutTime;
+ lEffect.startTime = effectStartTime;
+ lEffect.duration = effectDuration;
+ return;
+ }
+
+ }
+
+ /**
+ * Generates the clip for preview or export
+ *
+ * @param editSettings The EditSettings reference for generating
+ * a clip for preview or export
+ *
+ * @return error value
+ */
+ public int generateClip(EditSettings editSettings) {
+ int err = 0;
+
+ try {
+ err = nativeGenerateClip(editSettings);
+ } catch (IllegalArgumentException ex) {
+ Log.e("MediaArtistNativeHelper","Illegal Argument exception in load settings");
+ return -1;
+ } catch (IllegalStateException ex) {
+ Log.e("MediaArtistNativeHelper","Illegal state exception in load settings");
+ return -1;
+ } catch (RuntimeException ex) {
+ Log.e("MediaArtistNativeHelper", "Runtime exception in load settings");
+ return -1;
+ }
+ return err;
+ }
+
+ /**
+ * Init function to initialise the ClipSettings reference to
+ * default values
+ *
+ * @param lclipSettings The ClipSettings reference
+ */
+ void initClipSettings(ClipSettings lclipSettings) {
+ lclipSettings.clipPath = null;
+ lclipSettings.clipDecodedPath = null;
+ lclipSettings.clipOriginalPath = null;
+ lclipSettings.fileType = 0;
+ lclipSettings.endCutTime = 0;
+ lclipSettings.beginCutTime = 0;
+ lclipSettings.beginCutPercent = 0;
+ lclipSettings.endCutPercent = 0;
+ lclipSettings.panZoomEnabled = false;
+ lclipSettings.panZoomPercentStart = 0;
+ lclipSettings.panZoomTopLeftXStart = 0;
+ lclipSettings.panZoomTopLeftYStart = 0;
+ lclipSettings.panZoomPercentEnd = 0;
+ lclipSettings.panZoomTopLeftXEnd = 0;
+ lclipSettings.panZoomTopLeftYEnd = 0;
+ lclipSettings.mediaRendering = 0;
+ }
+
+
+ /**
+ * Populates the settings for generating an effect clip
+ *
+ * @param lMediaItem The media item for which the effect clip
+ * needs to be generated
+ * @param lclipSettings The ClipSettings reference containing
+ * clips data
+ * @param e The EditSettings reference containing effect specific data
+ * @param uniqueId The unique id used in the name of the output clip
+ * @param clipNo Used for internal purpose
+ *
+ * @return The name and path of generated clip
+ */
+ String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings,
+ EditSettings e,String uniqueId,int clipNo) {
+ int err = 0;
+ EditSettings editSettings = null;
+ String EffectClipPath = null;
+
+ editSettings = new EditSettings();
+
+ editSettings.clipSettingsArray = new ClipSettings[1];
+ editSettings.clipSettingsArray[0] = lclipSettings;
+
+ editSettings.backgroundMusicSettings = null;
+ editSettings.transitionSettingsArray = null;
+ editSettings.effectSettingsArray = e.effectSettingsArray;
+
+ EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_"
+ + lMediaItem.getId() + uniqueId + ".3gp");
+
+ File tmpFile = new File(EffectClipPath);
+ if (tmpFile.exists()) {
+ tmpFile.delete();
+ }
+
+ if (lMediaItem instanceof MediaVideoItem) {
+ MediaVideoItem m = (MediaVideoItem)lMediaItem;
+
+ editSettings.audioFormat = AudioFormat.AAC;
+ editSettings.audioChannels = 2;
+ editSettings.audioBitrate = Bitrate.BR_64_KBPS;
+ editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
+
+ editSettings.videoBitrate = Bitrate.BR_5_MBPS;
+ //editSettings.videoFormat = VideoFormat.MPEG4;
+ editSettings.videoFormat = VideoFormat.H264;
+ editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
+ editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), m
+ .getHeight());
+
+ } else {
+ MediaImageItem m = (MediaImageItem)lMediaItem;
+ editSettings.audioBitrate = Bitrate.BR_64_KBPS;
+ editSettings.audioChannels = 2;
+ editSettings.audioFormat = AudioFormat.AAC;
+ editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
+
+ editSettings.videoBitrate = Bitrate.BR_5_MBPS;
+ editSettings.videoFormat = VideoFormat.H264;
+ editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
+ editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), m
+ .getScaledHeight());
+ }
+
+ editSettings.outputFile = EffectClipPath;
+
+ if (clipNo == 1) {
+ mProcessingState = PROCESSING_INTERMEDIATE1;
+ } else if (clipNo == 2) {
+ mProcessingState = PROCESSING_INTERMEDIATE2;
+ }
+ mProcessingObject = lMediaItem;
+ err = generateClip(editSettings);
+ mProcessingState = PROCESSING_NONE;
+
+ if (err == 0) {
+ lclipSettings.clipPath = EffectClipPath;
+ lclipSettings.fileType = FileType.THREE_GPP;
+ return EffectClipPath;
+ } else {
+ throw new RuntimeException("preview generation cannot be completed");
+ }
+ }
+
+
+ /**
+ * Populates the settings for generating a Ken Burn effect clip
+ *
+ * @param m The media image item for which the Ken Burn effect clip
+ * needs to be generated
+ * @param e The EditSettings reference clip specific data
+ *
+ * @return The name and path of generated clip
+ */
+ String generateKenBurnsClip(EditSettings e, MediaImageItem m) {
+ String output = null;
+ int err = 0;
+
+ e.backgroundMusicSettings = null;
+ e.transitionSettingsArray = null;
+ e.effectSettingsArray = null;
+ output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp");
+
+ File tmpFile = new File(output);
+ if (tmpFile.exists()) {
+ tmpFile.delete();
+ }
+
+ e.outputFile = output;
+ e.audioBitrate = Bitrate.BR_64_KBPS;
+ e.audioChannels = 2;
+ e.audioFormat = AudioFormat.AAC;
+ e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
+
+ e.videoBitrate = Bitrate.BR_5_MBPS;
+ e.videoFormat = VideoFormat.H264;
+ e.videoFrameRate = VideoFrameRate.FR_30_FPS;
+ e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
+ m.getScaledHeight());
+ mProcessingState = PROCESSING_KENBURNS;
+ mProcessingObject = m;
+ err = generateClip(e);
+ // Reset the processing state and check for errors
+ mProcessingState = PROCESSING_NONE;
+ if (err != 0) {
+ throw new RuntimeException("preview generation cannot be completed");
+ }
+ return output;
+ }
+
+
+ /**
+ * Calculates the output resolution for transition clip
+ *
+ * @param m1 First media item associated with transition
+ * @param m2 Second media item associated with transition
+ *
+ * @return The transition resolution
+ */
+ private int getTransitionResolution(MediaItem m1, MediaItem m2) {
+ int clip1Height = 0;
+ int clip2Height = 0;
+ int videoSize = 0;
+
+ if (m1 != null && m2 != null) {
+ if (m1 instanceof MediaVideoItem) {
+ clip1Height = m1.getHeight();
+ } else if (m1 instanceof MediaImageItem) {
+ clip1Height = ((MediaImageItem)m1).getScaledHeight();
+ }
+ if (m2 instanceof MediaVideoItem) {
+ clip2Height = m2.getHeight();
+ } else if (m2 instanceof MediaImageItem) {
+ clip2Height = ((MediaImageItem)m2).getScaledHeight();
+ }
+ if (clip1Height > clip2Height) {
+ videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
+ clip1Height);
+ } else {
+ videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
+ clip2Height);
+ }
+ } else if (m1 == null && m2 != null) {
+ if (m2 instanceof MediaVideoItem) {
+ clip2Height = m2.getHeight();
+ } else if (m2 instanceof MediaImageItem) {
+ clip2Height = ((MediaImageItem)m2).getScaledHeight();
+ }
+ videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
+ clip2Height);
+ } else if (m1 != null && m2 == null) {
+ if (m1 instanceof MediaVideoItem) {
+ clip1Height = m1.getHeight();
+ } else if (m1 instanceof MediaImageItem) {
+ clip1Height = ((MediaImageItem)m1).getScaledHeight();
+ }
+ videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
+ clip1Height);
+ }
+ return videoSize;
+ }
+
+ /**
+ * Populates the settings for generating an transition clip
+ *
+ * @param m1 First media item associated with transition
+ * @param m2 Second media item associated with transition
+ * @param e The EditSettings reference containing
+ * clip specific data
+ * @param uniqueId The unique id used in the name of the output clip
+ * @param t The Transition specific data
+ *
+ * @return The name and path of generated clip
+ */
+ String generateTransitionClip(EditSettings e, String uniqueId,
+ MediaItem m1, MediaItem m2,Transition t) {
+ String outputFilename = null;
+ int err = 0;
+
+ outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp");
+ e.outputFile = outputFilename;
+ e.audioBitrate = Bitrate.BR_64_KBPS;
+ e.audioChannels = 2;
+ e.audioFormat = AudioFormat.AAC;
+ e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
+
+ e.videoBitrate = Bitrate.BR_5_MBPS;
+ e.videoFormat = VideoFormat.H264;
+ e.videoFrameRate = VideoFrameRate.FR_30_FPS;
+ e.videoFrameSize = getTransitionResolution(m1, m2);
+
+ if (new File(outputFilename).exists()) {
+ new File(outputFilename).delete();
+ }
+ mProcessingState = PROCESSING_INTERMEDIATE3;
+ mProcessingObject = t;
+ err = generateClip(e);
+ // Reset the processing state and check for errors
+ mProcessingState = PROCESSING_NONE;
+ if (err != 0) {
+ throw new RuntimeException("preview generation cannot be completed");
+ }
+ return outputFilename;
+ }
+
+ /**
+ * Populates effects and overlays in EffectSettings structure
+ * and also adjust the start time and duration of effects and overlays
+ * w.r.t to total story board time
+ *
+ * @param m1 Media item associated with effect
+ * @param effectSettings The EffectSettings reference containing
+ * effect specific data
+ * @param beginCutTime The begin cut time of the clip associated with effect
+ * @param endCutTime The end cut time of the clip associated with effect
+ * @param storyBoardTime The current story board time
+ *
+ * @return The updated index
+ */
+ private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i,
+ int beginCutTime, int endCutTime, int storyBoardTime) {
+ List<Effect> effects = m.getAllEffects();
+ List<Overlay> overlays = m.getAllOverlays();
+
+ if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
+ && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
+ beginCutTime += m.getBeginTransition().getDuration();
+ endCutTime -= m.getEndTransition().getDuration();
+ } else if (m.getBeginTransition() == null && m.getEndTransition() != null
+ && m.getEndTransition().getDuration() > 0) {
+ endCutTime -= m.getEndTransition().getDuration();
+ } else if (m.getEndTransition() == null && m.getBeginTransition() != null
+ && m.getBeginTransition().getDuration() > 0) {
+ beginCutTime += m.getBeginTransition().getDuration();
+ }
+
+ for (Effect effect : effects) {
+ if (effect instanceof EffectColor) {
+ effectSettings[i] = getEffectSettings((EffectColor)effect);
+ adjustEffectsStartTimeAndDuration(effectSettings[i],
+ beginCutTime, endCutTime);
+ effectSettings[i].startTime += storyBoardTime;
+ i++;
+ }
+ }
+ for (Overlay overlay : overlays) {
+ effectSettings[i] = getOverlaySettings((OverlayFrame)overlay);
+ adjustEffectsStartTimeAndDuration(effectSettings[i],
+ beginCutTime, endCutTime);
+ effectSettings[i].startTime += storyBoardTime;
+ i++;
+ }
+ return i;
+ }
+
+ /**
+ * Adjusts the media item boundaries for use in export or preview
+ *
+ * @param clipSettings The ClipSettings reference
+ * @param clipProperties The Properties reference
+ * @param m The media item
+ */
+ private void adjustMediaItemBoundary(ClipSettings clipSettings,
+ Properties clipProperties, MediaItem m) {
+ if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
+ && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
+
+ clipSettings.beginCutTime += m.getBeginTransition().getDuration();
+ clipSettings.endCutTime -= m.getEndTransition().getDuration();
+
+ } else if (m.getBeginTransition() == null && m.getEndTransition() != null
+ && m.getEndTransition().getDuration() > 0) {
+
+ clipSettings.endCutTime -= m.getEndTransition().getDuration();
+
+ } else if (m.getEndTransition() == null && m.getBeginTransition() != null
+ && m.getBeginTransition().getDuration() > 0) {
+
+ clipSettings.beginCutTime += m.getBeginTransition().getDuration();
+ }
+ clipProperties.duration = clipSettings.endCutTime -
+ clipSettings.beginCutTime;
+
+ if (clipProperties.videoDuration != 0) {
+ clipProperties.videoDuration = clipSettings.endCutTime -
+ clipSettings.beginCutTime;
+ }
+
+ if (clipProperties.audioDuration != 0) {
+ clipProperties.audioDuration = clipSettings.endCutTime -
+ clipSettings.beginCutTime;
+ }
+ }
+
+ /**
+ * Generates the transition if transition is present
+ * and is in invalidated state
+ *
+ * @param transition The Transition reference
+ * @param editSettings The EditSettings reference
+ * @param clipPropertiesArray The clip Properties array
+ * @param i The index in clip Properties array for current clip
+ */
+ private void generateTransition(Transition transition, EditSettings editSettings,
+ PreviewClipProperties clipPropertiesArray, int index) {
+ if (!(transition.isGenerated())) {
+ transition.generate();
+ }
+ editSettings.clipSettingsArray[index] = new ClipSettings();
+ editSettings.clipSettingsArray[index].clipPath = transition.getFilename();
+ editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP;
+ editSettings.clipSettingsArray[index].beginCutTime = 0;
+ editSettings.clipSettingsArray[index].endCutTime =
+ (int)transition.getDuration();
+ editSettings.clipSettingsArray[index].mediaRendering =
+ MediaRendering.BLACK_BORDERS;
+ try {
+ clipPropertiesArray.clipProperties[index] =
+ getMediaProperties(transition.getFilename());
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Unsupported file or file not found");
+ }
+ clipPropertiesArray.clipProperties[index].Id = null;
+ clipPropertiesArray.clipProperties[index].audioVolumeValue = 100;
+ clipPropertiesArray.clipProperties[index].duration =
+ (int)transition.getDuration();
+ if (clipPropertiesArray.clipProperties[index].videoDuration != 0) {
+ clipPropertiesArray.clipProperties[index].videoDuration =
+ (int)transition.getDuration();
+ }
+ if (clipPropertiesArray.clipProperties[index].audioDuration != 0) {
+ clipPropertiesArray.clipProperties[index].audioDuration =
+ (int)transition.getDuration();
+ }
+ }
+
+ /**
+ * Sets the volume for current media item in clip properties array
+ *
+ * @param m The media item
+ * @param clipProperties The clip properties array reference
+ * @param i The index in clip Properties array for current clip
+ */
+ private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties,
+ int index) {
+ if (m instanceof MediaVideoItem) {
+ boolean videoMuted = ((MediaVideoItem)m).isMuted();
+ if (videoMuted == false) {
+ mClipProperties.clipProperties[index].audioVolumeValue = ((MediaVideoItem)m)
+ .getVolume();
+ } else {
+ mClipProperties.clipProperties[index].audioVolumeValue = 0;
+ }
+ } else if (m instanceof MediaImageItem) {
+ mClipProperties.clipProperties[index].audioVolumeValue = 0;
+ }
+ }
+
+ /**
+ * Checks for odd size image width and height
+ *
+ * @param m The media item
+ * @param clipProperties The clip properties array reference
+ * @param i The index in clip Properties array for current clip
+ */
+ private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) {
+ if (m instanceof MediaImageItem) {
+ int width = mClipProperties.clipProperties[index].width;
+ int height = mClipProperties.clipProperties[index].height;
+
+ if ((width % 2) != 0) {
+ width -= 1;
+ }
+ if ((height % 2) != 0) {
+ height -= 1;
+ }
+ mClipProperties.clipProperties[index].width = width;
+ mClipProperties.clipProperties[index].height = height;
+ }
+ }
+
+ /**
+ * Populates the media item properties and calculates the maximum
+ * height among all the clips
+ *
+ * @param m The media item
+ * @param i The index in clip Properties array for current clip
+ * @param maxHeight The max height from the clip properties
+ *
+ * @return Updates the max height if current clip's height is greater
+ * than all previous clips height
+ */
+ private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) {
+ mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings();
+ if (m instanceof MediaVideoItem) {
+ mPreviewEditSettings.clipSettingsArray[index] = ((MediaVideoItem)m)
+ .getVideoClipProperties();
+ if (((MediaVideoItem)m).getHeight() > maxHeight) {
+ maxHeight = ((MediaVideoItem)m).getHeight();
+ }
+ } else if (m instanceof MediaImageItem) {
+ mPreviewEditSettings.clipSettingsArray[index] = ((MediaImageItem)m)
+ .getImageClipProperties();
+ if (((MediaImageItem)m).getScaledHeight() > maxHeight) {
+ maxHeight = ((MediaImageItem)m).getScaledHeight();
+ }
+ }
+ /** + Handle the image files here */
+ if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) {
+ mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath = ((MediaImageItem)m)
+ .getDecodedImageFileName();
+
+ mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath =
+ mPreviewEditSettings.clipSettingsArray[index].clipPath;
+ }
+ return maxHeight;
+ }
+
+ /**
+ * Populates the background music track properties
+ *
+ * @param mediaBGMList The background music list
+ *
+ */
+ private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) {
+
+ if (mediaBGMList.size() == 1) {
+ mAudioTrack = mediaBGMList.get(0);
+ } else
+ {
+ mAudioTrack = null;
+ }
+
+ if (mAudioTrack != null) {
+ mAudioSettings = new AudioSettings();
+ Properties mAudioProperties = new Properties();
+ mAudioSettings.pFile = null;
+ mAudioSettings.Id = mAudioTrack.getId();
+ try {
+ mAudioProperties = getMediaProperties(mAudioTrack.getFilename());
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Unsupported file or file not found");
+ }
+ mAudioSettings.bRemoveOriginal = false;
+ mAudioSettings.channels = mAudioProperties.audioChannels;
+ mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency;
+ mAudioSettings.loop = mAudioTrack.isLooping();
+ mAudioSettings.ExtendedFs = 0;
+ mAudioSettings.pFile = mAudioTrack.getFilename();
+ mAudioSettings.startMs = mAudioTrack.getStartTime();
+ mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime();
+ mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime();
+ if (mAudioTrack.isMuted()) {
+ mAudioSettings.volume = 0;
+ } else {
+ mAudioSettings.volume = mAudioTrack.getVolume();
+ }
+ mAudioSettings.fileType = mAudioProperties.fileType;
+ mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume();
+ mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold();
+ mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled();
+ mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE);
+ //String.format(mProjectPath + "/" + "AudioPcm" + ".pcm");
+ mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath;
+
+ mPreviewEditSettings.backgroundMusicSettings =
+ new BackgroundMusicSettings();
+ mPreviewEditSettings.backgroundMusicSettings.file =
+ mAudioTrackPCMFilePath;
+ mPreviewEditSettings.backgroundMusicSettings.fileType =
+ mAudioProperties.fileType;
+ mPreviewEditSettings.backgroundMusicSettings.insertionTime =
+ mAudioTrack.getStartTime();
+ mPreviewEditSettings.backgroundMusicSettings.volumePercent =
+ mAudioTrack.getVolume();
+ mPreviewEditSettings.backgroundMusicSettings.beginLoop = mAudioTrack
+ .getBoundaryBeginTime();
+ mPreviewEditSettings.backgroundMusicSettings.endLoop =
+ mAudioTrack.getBoundaryEndTime();
+ mPreviewEditSettings.backgroundMusicSettings.enableDucking = mAudioTrack
+ .isDuckingEnabled();
+ mPreviewEditSettings.backgroundMusicSettings.duckingThreshold = mAudioTrack
+ .getDuckingThreshhold();
+ mPreviewEditSettings.backgroundMusicSettings.lowVolume = mAudioTrack
+ .getDuckedTrackVolume();
+ mPreviewEditSettings.backgroundMusicSettings.isLooping =
+ mAudioTrack.isLooping();
+ mPreviewEditSettings.primaryTrackVolume = 100;
+ mProcessingState = PROCESSING_AUDIO_PCM;
+ mProcessingObject = mAudioTrack;
+ } else {
+ if (mAudioSettings != null) {
+ mAudioSettings = null;
+ }
+ if (mPreviewEditSettings.backgroundMusicSettings != null) {
+ mPreviewEditSettings.backgroundMusicSettings = null;
+ }
+ mAudioTrackPCMFilePath = null;
+ }
+ }
+
+ /**
+ * Calculates all the effects in all the media items
+ * in media items list
+ *
+ * @param mediaItemsList The media item list
+ *
+ * @return The total number of effects
+ *
+ */
+ private int getTotalEffects(List<MediaItem> mediaItemsList) {
+ int totalEffects = 0;
+ final Iterator<MediaItem> it = mediaItemsList.iterator();
+ while (it.hasNext()) {
+ final MediaItem t = it.next();
+ totalEffects += t.getAllEffects().size();
+ totalEffects += t.getAllOverlays().size();
+ final Iterator<Effect> ef = t.getAllEffects().iterator();
+ while (ef.hasNext()) {
+ final Effect e = ef.next();
+ if (e instanceof EffectKenBurns)
+ totalEffects--;
+ }
+ }
+ return totalEffects;
+ }
+
+ /**
+ * This function is responsible for forming clip settings
+ * array and clip properties array including transition clips
+ * and effect settings for preview purpose or export.
+ *
+ *
+ * @param mediaItemsList The media item list
+ * @param mediaTransitionList The transitions list
+ * @param mediaBGMList The background music list
+ * @param listener The MediaProcessingProgressListener
+ *
+ */
+ public void previewStoryBoard(List<MediaItem> mediaItemsList,
+ List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList,
+ MediaProcessingProgressListener listener) {
+ if (mInvalidatePreviewArray) {
+ int previewIndex = 0;
+ int totalEffects = 0;
+ int storyBoardTime = 0;
+ int maxHeight = 0;
+ int beginCutTime = 0;
+ int endCutTime = 0;
+ int effectIndex = 0;
+ Transition lTransition = null;
+ MediaItem lMediaItem = null;
+ mPreviewEditSettings = new EditSettings();
+ mClipProperties = new PreviewClipProperties();
+ mTotalClips = 0;
+
+ mTotalClips = mediaItemsList.size();
+ for (Transition transition : mediaTransitionList) {
+ if (transition.getDuration() > 0)
+ mTotalClips++;
+ }
+
+ totalEffects = getTotalEffects(mediaItemsList);
+
+ mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips];
+ mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects];
+ mClipProperties.clipProperties = new Properties[mTotalClips];
+
+ /** record the call back progress listner */
+ if (listener != null)
+ {
+ mMediaProcessingProgressListener = listener;
+ mProgressToApp = 0;
+ }
+
+ if (mediaItemsList.size() > 0) {
+ for (int i = 0; i < mediaItemsList.size(); i++) {
+ /* Get the Media Item from the list */
+ lMediaItem = mediaItemsList.get(i);
+ if (lMediaItem instanceof MediaVideoItem) {
+ beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime();
+ endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime();
+ } else if (lMediaItem instanceof MediaImageItem) {
+ beginCutTime = 0;
+ endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration();
+ }
+ /* Get the transition associated with Media Item */
+ lTransition = lMediaItem.getBeginTransition();
+ if (lTransition != null && (lTransition.getDuration() > 0)) {
+ /* generate transition clip */
+ generateTransition(lTransition, mPreviewEditSettings,
+ mClipProperties, previewIndex);
+ storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
+ previewIndex++;
+ }
+ /* Populate media item properties */
+ maxHeight = populateMediaItemProperties(lMediaItem,
+ previewIndex,
+ maxHeight);
+ if (lMediaItem instanceof MediaImageItem)
+ {
+ int tmpCnt = 0;
+ boolean bEffectKbPresent = false;
+ List<Effect> effectList = lMediaItem.getAllEffects();
+ /**
+ * check if Kenburns effect is present
+ */
+ while ( tmpCnt < effectList.size()) {
+ if (effectList.get(tmpCnt) instanceof EffectKenBurns) {
+ bEffectKbPresent = true;
+ break;
+ }
+ tmpCnt++;
+ }
+
+ if (bEffectKbPresent) {
+ try {
+ mClipProperties.clipProperties[previewIndex]
+ = getMediaProperties(((MediaImageItem)lMediaItem).getGeneratedImageClip());
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Unsupported file or file not found");
+ }
+ } else {
+ try {
+ mClipProperties.clipProperties[previewIndex]
+ = getMediaProperties(((MediaImageItem)lMediaItem).getScaledImageFileName());
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Unsupported file or file not found");
+ }
+ mClipProperties.clipProperties[previewIndex].width = ((MediaImageItem)lMediaItem).getScaledWidth();
+ mClipProperties.clipProperties[previewIndex].height = ((MediaImageItem)lMediaItem).getScaledHeight();
+ }
+
+ }else
+ {
+ try {
+ mClipProperties.clipProperties[previewIndex]
+ = getMediaProperties(lMediaItem.getFilename());
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Unsupported file or file not found");
+ }
+ }
+ mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId();
+ checkOddSizeImage(lMediaItem, mClipProperties, previewIndex);
+ adjustVolume(lMediaItem, mClipProperties, previewIndex);
+
+ /*
+ * Adjust media item start time and end time w.r.t to begin
+ * and end transitions associated with media item
+ */
+
+ adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex],
+ mClipProperties.clipProperties[previewIndex], lMediaItem);
+
+ /*
+ * Get all the effects and overlays for that media item and
+ * adjust start time and duration of effects
+ */
+
+ effectIndex = populateEffects(lMediaItem,
+ mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime,
+ endCutTime, storyBoardTime);
+ storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
+ previewIndex++;
+
+ /* Check if there is any end transition at last media item */
+
+ if (i == (mediaItemsList.size() - 1)) {
+ lTransition = lMediaItem.getEndTransition();
+ if (lTransition != null && (lTransition.getDuration() > 0)) {
+ generateTransition(lTransition, mPreviewEditSettings, mClipProperties,
+ previewIndex);
+ break;
+ }
+ }
+ }
+ }
+ if (!mErrorFlagSet) {
+ mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor
+ .getAspectRatio(), maxHeight);
+ /*if (mediaBGMList.size() == 1) //for remove Audio check */ {
+ populateBackgroundMusicProperties(mediaBGMList);
+ }
+ /** call to native populate settings */
+ try {
+ nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
+ } catch (IllegalArgumentException ex) {
+ Log.e("MediaArtistNativeHelper",
+ "Illegal argument exception in nativePopulateSettings");
+ throw ex;
+ } catch (IllegalStateException ex) {
+ Log.e("MediaArtistNativeHelper",
+ "Illegal state exception in nativePopulateSettings");
+ throw ex;
+ } catch (RuntimeException ex) {
+ Log.e("MediaArtistNativeHelper", "Runtime exception in nativePopulateSettings");
+ throw ex;
+ }
+ mInvalidatePreviewArray = false;
+ mProcessingState = PROCESSING_NONE;
+ }
+ if (mErrorFlagSet) {
+ mErrorFlagSet = false;
+ throw new RuntimeException("preview generation cannot be completed");
+ }
+ }
+ } /* END of previewStoryBoard */
+
+ /**
+ * This function is responsible for starting the preview
+ *
+ *
+ * @param surface The surface on which preview has to be displayed
+ * @param fromMs The time in ms from which preview has to be started
+ * @param toMs The time in ms till preview has to be played
+ * @param loop To loop the preview or not
+ * @param callbackAfterFrameCount INdicated after how many frames
+ * the callback is needed
+ * @param listener The PreviewProgressListener
+ *
+ */
+ public void doPreview(Surface surface, long fromMs, long toMs, boolean loop,
+ int callbackAfterFrameCount, PreviewProgressListener listener) {
+ mPreviewProgress = 0;
+ if (listener != null) {
+ mPreviewProgressListener = listener;
+ }
+ if (!mInvalidatePreviewArray) {
+ try {
+ /** Modify the image files names to rgb image files. */
+ for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
+ if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
+ mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
+ }
+ }
+ nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
+ nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop);
+ } catch (IllegalArgumentException ex) {
+ Log.e("MediaArtistNativeHelper",
+ "Illegal argument exception in nativeStartPreview");
+ throw ex;
+ } catch (IllegalStateException ex) {
+ Log.e("MediaArtistNativeHelper", "Illegal state exception in nativeStartPreview");
+ throw ex;
+ } catch (RuntimeException ex) {
+ Log.e("MediaArtistNativeHelper", "Runtime exception in nativeStartPreview");
+ throw ex;
+ }
+
+ } else {
+ return;
+ }
+ }
+
+ /**
+ * This function is responsible for stopping the preview
+ */
+ public long stopPreview() {
+ nativeStopPreview();
+ return mPreviewProgress;
+ }
+
+ /**
+ * This function is responsible for rendering a single frame
+ * from the complete story board on the surface
+ *
+ * @param surface The surface on which frame has to be rendered
+ * @param time The time in ms at which the frame has to be rendered
+ * @param surfaceWidth The surface width
+ * @param surfaceHeight The surface height
+ *
+ * @return The actual time from the story board at which the frame was extracted
+ * and rendered
+ */
+ public long renderPreviewFrame(Surface surface, long time, int surfaceWidth,
+ int surfaceHeight) {
+ long timeMs = 0;
+ if (!mInvalidatePreviewArray) {
+ try {
+ for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
+ if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
+ mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
+ }
+ }
+ nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
+ timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight);
+ } catch (IllegalArgumentException ex) {
+ Log.e("MediaArtistNativeHelper",
+ "Illegal Argument exception in nativeRenderPreviewFrame");
+ throw ex;
+ } catch (IllegalStateException ex) {
+ Log.e("MediaArtistNativeHelper",
+ "Illegal state exception in nativeRenderPreviewFrame");
+ throw ex;
+ } catch (RuntimeException ex) {
+ Log.e("MediaArtistNativeHelper", "Runtime exception in nativeRenderPreviewFrame");
+ throw ex;
+ }
+ return timeMs;
+ } else {
+
+ throw new RuntimeException("Call generate preview first");
+ }
+ }
+
+ /**
+ * This function is responsible for rendering a single frame
+ * from a single media item on the surface
+ *
+ * @param surface The surface on which frame has to be rendered
+ * @param filepath The file path for which the frame needs to be displayed
+ * @param time The time in ms at which the frame has to be rendered
+ * @param framewidth The frame width
+ * @param framewidth The frame height
+ *
+ * @return The actual time from media item at which the frame was extracted
+ * and rendered
+ */
+ public long renderMediaItemPreviewFrame(Surface surface, String filepath,
+ long time, int framewidth,
+ int frameheight) {
+ long timeMs = 0;
+ try {
+
+ timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth,
+ frameheight, 0, 0, time);
+ } catch (IllegalArgumentException ex) {
+ Log.e("MediaArtistNativeHelper",
+ "Illegal Argument exception in renderMediaItemPreviewFrame");
+ throw ex;
+ } catch (IllegalStateException ex) {
+ Log.e("MediaArtistNativeHelper",
+ "Illegal state exception in renderMediaItemPreviewFrame");
+ throw ex;
+ } catch (RuntimeException ex) {
+ Log.e("MediaArtistNativeHelper", "Runtime exception in renderMediaItemPreviewFrame");
+ throw ex;
+ }
+
+ return timeMs;
+ }
+
+ /**
+ * This function sets the flag to invalidate the preview array
+ * and for generating the preview again
+ */
+ void setGeneratePreview(boolean isRequired) {
+ mInvalidatePreviewArray = isRequired;
+ }
+
+ /**
+ * @return Returns the current status of preview invalidation
+ * flag
+ */
+ boolean getGeneratePreview() {
+ return mInvalidatePreviewArray;
+ }
+
+ /**
+ * Calculates the aspect ratio from widht and height
+ *
+ * @param w The width of media item
+ * @param h The height of media item
+ *
+ * @return The calculated aspect ratio
+ */
+ public int getAspectRatio(int w, int h) {
+ double apRatio = (double)(w) / (double)(h);
+ BigDecimal bd = new BigDecimal(apRatio);
+ bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP);
+ apRatio = bd.doubleValue();
+ int var = MediaProperties.ASPECT_RATIO_16_9;
+ if (apRatio >= 1.7) {
+ var = MediaProperties.ASPECT_RATIO_16_9;
+ } else if (apRatio >= 1.6) {
+ var = MediaProperties.ASPECT_RATIO_5_3;
+ } else if (apRatio >= 1.5) {
+ var = MediaProperties.ASPECT_RATIO_3_2;
+ } else if (apRatio > 1.3) {
+ var = MediaProperties.ASPECT_RATIO_4_3;
+ } else if (apRatio >= 1.2) {
+ var = MediaProperties.ASPECT_RATIO_11_9;
+ }
+ return var;
+ }
+
+ /**
+ * Maps the file type used in native layer
+ * to file type used in JAVA layer
+ *
+ * @param fileType The file type in native layer
+ *
+ * @return The File type in JAVA layer
+ */
+ public int getFileType(int fileType) {
+ int retValue = -1;
+ switch (fileType) {
+ case FileType.UNSUPPORTED:
+ retValue = MediaProperties.FILE_UNSUPPORTED;
+ break;
+ case FileType.THREE_GPP:
+ retValue = MediaProperties.FILE_3GP;
+ break;
+ case FileType.MP4:
+ retValue = MediaProperties.FILE_MP4;
+ break;
+ case FileType.JPG:
+ retValue = MediaProperties.FILE_JPEG;
+ break;
+ case FileType.PNG:
+ retValue = MediaProperties.FILE_PNG;
+ break;
+ case FileType.MP3:
+ retValue = MediaProperties.FILE_MP3;
+ break;
+
+ default:
+ retValue = -1;
+ }
+ return retValue;
+ }
+
+ /**
+ * Maps the video codec type used in native layer
+ * to video codec type used in JAVA layer
+ *
+ * @param codecType The video codec type in native layer
+ *
+ * @return The video codec type in JAVA layer
+ */
+ public int getVideoCodecType(int codecType) {
+ int retValue = -1;
+ switch (codecType) {
+ case VideoFormat.H263:
+ retValue = MediaProperties.VCODEC_H263;
+ break;
+ case VideoFormat.H264:
+ retValue = MediaProperties.VCODEC_H264BP;
+ break;
+ case VideoFormat.MPEG4:
+ retValue = MediaProperties.VCODEC_MPEG4;
+ break;
+ case VideoFormat.UNSUPPORTED:
+
+ default:
+ retValue = -1;
+ }
+ return retValue;
+ }
+
+ /**
+ * Maps the audio codec type used in native layer
+ * to audio codec type used in JAVA layer
+ *
+ * @param audioType The audio codec type in native layer
+ *
+ * @return The audio codec type in JAVA layer
+ */
+ public int getAudioCodecType(int codecType) {
+ int retValue = -1;
+ switch (codecType) {
+ case AudioFormat.AMR_NB:
+ retValue = MediaProperties.ACODEC_AMRNB;
+ break;
+ case AudioFormat.AAC:
+ retValue = MediaProperties.ACODEC_AAC_LC;
+ break;
+ case AudioFormat.MP3:
+ retValue = MediaProperties.ACODEC_MP3;
+ break;
+
+ default:
+ retValue = -1;
+ }
+ return retValue;
+ }
+
+ /**
+ * Returns the frame rate as integer
+ *
+ * @param fps The fps as enum
+ *
+ * @return The frame rate as integer
+ */
+ public int getFrameRate(int fps) {
+ int retValue = -1;
+ switch (fps) {
+ case VideoFrameRate.FR_5_FPS:
+ retValue = 5;
+ break;
+ case VideoFrameRate.FR_7_5_FPS:
+ retValue = 8;
+ break;
+ case VideoFrameRate.FR_10_FPS:
+ retValue = 10;
+ break;
+ case VideoFrameRate.FR_12_5_FPS:
+ retValue = 13;
+ break;
+ case VideoFrameRate.FR_15_FPS:
+ retValue = 15;
+ break;
+ case VideoFrameRate.FR_20_FPS:
+ retValue = 20;
+ break;
+ case VideoFrameRate.FR_25_FPS:
+ retValue = 25;
+ break;
+ case VideoFrameRate.FR_30_FPS:
+ retValue = 30;
+ break;
+
+ default:
+ retValue = -1;
+ }
+ return retValue;
+ }
+
+ /**
+ * Maps the file type used in JAVA layer
+ * to file type used in native layer
+ *
+ * @param fileType The file type in JAVA layer
+ *
+ * @return The File type in native layer
+ */
+ int getMediaItemFileType(int fileType) {
+ int retValue = -1;
+
+ switch (fileType) {
+ case MediaProperties.FILE_UNSUPPORTED:
+ retValue = FileType.UNSUPPORTED;
+ break;
+ case MediaProperties.FILE_3GP:
+ retValue = FileType.THREE_GPP;
+ break;
+ case MediaProperties.FILE_MP4:
+ retValue = FileType.MP4;
+ break;
+ case MediaProperties.FILE_JPEG:
+ retValue = FileType.JPG;
+ break;
+ case MediaProperties.FILE_PNG:
+ retValue = FileType.PNG;
+ break;
+
+ default:
+ retValue = -1;
+ }
+ return retValue;
+
+ }
+
+ /**
+ * Maps the rendering mode used in native layer
+ * to rendering mode used in JAVA layer
+ *
+ * @param renderingMode The rendering mode in JAVA layer
+ *
+ * @return The rendering mode in native layer
+ */
+ int getMediaItemRenderingMode(int renderingMode) {
+ int retValue = -1;
+ switch (renderingMode) {
+ case MediaItem.RENDERING_MODE_BLACK_BORDER:
+ retValue = MediaRendering.BLACK_BORDERS;
+ break;
+ case MediaItem.RENDERING_MODE_STRETCH:
+ retValue = MediaRendering.RESIZING;
+ break;
+ case MediaItem.RENDERING_MODE_CROPPING:
+ retValue = MediaRendering.CROPPING;
+ break;
+
+ default:
+ retValue = -1;
+ }
+ return retValue;
+ }
+
+ /**
+ * Maps the transition behavior used in JAVA layer
+ * to transition behavior used in native layer
+ *
+ * @param transitionType The transition behavior in JAVA layer
+ *
+ * @return The transition behavior in native layer
+ */
+ int getVideoTransitionBehaviour(int transitionType) {
+ int retValue = -1;
+ switch (transitionType) {
+ case Transition.BEHAVIOR_SPEED_UP:
+ retValue = TransitionBehaviour.SPEED_UP;
+ break;
+ case Transition.BEHAVIOR_SPEED_DOWN:
+ retValue = TransitionBehaviour.SPEED_DOWN;
+ break;
+ case Transition.BEHAVIOR_LINEAR:
+ retValue = TransitionBehaviour.LINEAR;
+ break;
+ case Transition.BEHAVIOR_MIDDLE_SLOW:
+ retValue = TransitionBehaviour.SLOW_MIDDLE;
+ break;
+ case Transition.BEHAVIOR_MIDDLE_FAST:
+ retValue = TransitionBehaviour.FAST_MIDDLE;
+ break;
+
+ default:
+ retValue = -1;
+ }
+ return retValue;
+ }
+
+ /**
+ * Maps the transition slide direction used in JAVA layer
+ * to transition slide direction used in native layer
+ *
+ * @param slideDirection The transition slide direction
+ * in JAVA layer
+ *
+ * @return The transition slide direction in native layer
+ */
+ int getSlideSettingsDirection(int slideDirection) {
+ int retValue = -1;
+ switch (slideDirection) {
+ case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN:
+ retValue = SlideDirection.RIGHT_OUT_LEFT_IN;
+ break;
+ case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN:
+ retValue = SlideDirection.LEFT_OUT_RIGTH_IN;
+ break;
+ case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN:
+ retValue = SlideDirection.TOP_OUT_BOTTOM_IN;
+ break;
+ case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN:
+ retValue = SlideDirection.BOTTOM_OUT_TOP_IN;
+ break;
+
+ default:
+ retValue = -1;
+ }
+ return retValue;
+ }
+
+ /**
+ * Maps the effect color type used in JAVA layer
+ * to effect color type used in native layer
+ *
+ * @param effect The EffectColor reference
+ *
+ * @return The color effect value from native layer
+ */
+ private int getEffectColorType(EffectColor effect) {
+ int retValue = -1;
+ switch (effect.getType()) {
+ case EffectColor.TYPE_COLOR:
+ if (effect.getColor() == EffectColor.GREEN) {
+ retValue = VideoEffect.GREEN;
+ } else if (effect.getColor() == EffectColor.PINK) {
+ retValue = VideoEffect.PINK;
+ } else if (effect.getColor() == EffectColor.GRAY) {
+ retValue = VideoEffect.BLACK_AND_WHITE;
+ } else {
+ retValue = VideoEffect.COLORRGB16;
+ }
+ break;
+ case EffectColor.TYPE_GRADIENT:
+ retValue = VideoEffect.GRADIENT;
+ break;
+ case EffectColor.TYPE_SEPIA:
+ retValue = VideoEffect.SEPIA;
+ break;
+ case EffectColor.TYPE_NEGATIVE:
+ retValue = VideoEffect.NEGATIVE;
+ break;
+ case EffectColor.TYPE_FIFTIES:
+ retValue = VideoEffect.FIFTIES;
+ break;
+
+ default:
+ retValue = -1;
+ }
+ return retValue;
+ }
+
+ /**
+ * Calculates videdo resolution for output clip
+ * based on clip's height and aspect ratio of storyboard
+ *
+ * @param aspectRatio The aspect ratio of story board
+ * @param height The height of clip
+ *
+ * @return The video resolution
+ */
+ private int findVideoResolution(int aspectRatio, int height) {
+ final Pair<Integer, Integer>[] resolutions;
+ final Pair<Integer, Integer> maxResolution;
+ int retValue = VideoFrameSize.SIZE_UNDEFINED;
+ switch (aspectRatio) {
+ case MediaProperties.ASPECT_RATIO_3_2:
+ if (height == MediaProperties.HEIGHT_480)
+ retValue = VideoFrameSize.NTSC;
+ else if (height == MediaProperties.HEIGHT_720)
+ retValue = VideoFrameSize.W720p;
+ break;
+ case MediaProperties.ASPECT_RATIO_16_9:
+ if (height == MediaProperties.HEIGHT_480)
+ retValue = VideoFrameSize.WVGA16x9;
+ else if (height == MediaProperties.HEIGHT_720)
+ retValue = VideoFrameSize.V720p;
+ break;
+ case MediaProperties.ASPECT_RATIO_4_3:
+ if (height == MediaProperties.HEIGHT_480)
+ retValue = VideoFrameSize.VGA;
+ if (height == MediaProperties.HEIGHT_720)
+ retValue = VideoFrameSize.S720p;
+ break;
+ case MediaProperties.ASPECT_RATIO_5_3:
+ if (height == MediaProperties.HEIGHT_480)
+ retValue = VideoFrameSize.WVGA;
+ break;
+ case MediaProperties.ASPECT_RATIO_11_9:
+ if (height == MediaProperties.HEIGHT_144)
+ retValue = VideoFrameSize.QCIF;
+ break;
+ }
+ if (retValue == VideoFrameSize.SIZE_UNDEFINED) {
+ resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio());
+ // Get the highest resolution
+ maxResolution = resolutions[resolutions.length - 1];
+ retValue = findVideoResolution(mVideoEditor.getAspectRatio(),
+ maxResolution.second);
+ }
+
+ return retValue;
+ }
+
+ /**
+ * This method is responsible for exporting a movie
+ *
+ * @param filePath The output file path
+ * @param projectDir The output project directory
+ * @param height The height of clip
+ * @param bitrate The bitrate at which the movie should be exported
+ * @param mediaItemsList The media items list
+ * @param mediaTransitionList The transitons list
+ * @param mediaBGMList The background track list
+ * @param listener The ExportProgressListener
+ *
+ */
+ public void export(String filePath, String projectDir, int height, int bitrate,
+ List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList,
+ List<AudioTrack> mediaBGMList, ExportProgressListener listener) {
+
+ int outBitrate = 0;
+ mExportFilename = filePath;
+ previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
+ if (listener != null) {
+ mExportProgressListener = listener;
+ }
+ mProgressToApp = 0;
+
+ switch (bitrate) {
+ case MediaProperties.BITRATE_28K:
+ outBitrate = Bitrate.BR_32_KBPS;
+ break;
+ case MediaProperties.BITRATE_40K:
+ outBitrate = Bitrate.BR_48_KBPS;
+ break;
+ case MediaProperties.BITRATE_64K:
+ outBitrate = Bitrate.BR_64_KBPS;
+ break;
+ case MediaProperties.BITRATE_96K:
+ outBitrate = Bitrate.BR_96_KBPS;
+ break;
+ case MediaProperties.BITRATE_128K:
+ outBitrate = Bitrate.BR_128_KBPS;
+ break;
+ case MediaProperties.BITRATE_192K:
+ outBitrate = Bitrate.BR_192_KBPS;
+ break;
+ case MediaProperties.BITRATE_256K:
+ outBitrate = Bitrate.BR_256_KBPS;
+ break;
+ case MediaProperties.BITRATE_384K:
+ outBitrate = Bitrate.BR_384_KBPS;
+ break;
+ case MediaProperties.BITRATE_512K:
+ outBitrate = Bitrate.BR_512_KBPS;
+ break;
+ case MediaProperties.BITRATE_800K:
+ outBitrate = Bitrate.BR_800_KBPS;
+ break;
+ case MediaProperties.BITRATE_2M:
+ outBitrate = Bitrate.BR_2_MBPS;
+ break;
+
+ case MediaProperties.BITRATE_5M:
+ outBitrate = Bitrate.BR_5_MBPS;
+ break;
+ case MediaProperties.BITRATE_8M:
+ outBitrate = Bitrate.BR_8_MBPS;
+ break;
+
+ default:
+ throw new IllegalArgumentException("Argument Bitrate incorrect");
+ }
+ mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
+ mPreviewEditSettings.outputFile = mOutputFilename = filePath;
+
+ int aspectRatio = mVideoEditor.getAspectRatio();
+ mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
+ mPreviewEditSettings.videoFormat = VideoFormat.H264;
+ mPreviewEditSettings.audioFormat = AudioFormat.AAC;
+ mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
+ mPreviewEditSettings.maxFileSize = 0;
+ mPreviewEditSettings.audioChannels = 2;
+ mPreviewEditSettings.videoBitrate = outBitrate;
+ mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
+
+ mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1];
+ for (int index = 0; index < mTotalClips - 1; index++) {
+ mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings();
+ mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType = VideoTransition.NONE;
+ mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType = AudioTransition.NONE;
+ }
+ for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
+ if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
+ mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
+ mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
+ }
+ }
+ nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
+
+ int err = 0;
+ try {
+ mProcessingState = PROCESSING_EXPORT;
+ mProcessingObject = null;
+ err = generateClip(mPreviewEditSettings);
+ mProcessingState = PROCESSING_NONE;
+ } catch (IllegalArgumentException ex) {
+ Log.e("MediaArtistNativeHelper", "IllegalArgument for generateClip");
+ throw ex;
+ } catch (IllegalStateException ex) {
+ Log.e("MediaArtistNativeHelper", "IllegalStateExceptiont for generateClip");
+ throw ex;
+ } catch (RuntimeException ex) {
+ Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
+ throw ex;
+ }
+
+ if (err != 0) {
+ Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
+ throw new RuntimeException("generateClip failed with error="+err );
+ }
+
+ mExportDone = true;
+ setGeneratePreview(true);
+ mExportProgressListener = null;
+ }
+
+ /**
+ * This method is responsible for exporting a movie
+ *
+ * @param filePath The output file path
+ * @param projectDir The output project directory
+ * @param height The height of clip
+ * @param bitrate The bitrate at which the movie should be exported
+ * @param audioCodec The audio codec to use
+ * @param videoCodec The video codec to use
+ * @param mediaItemsList The media items list
+ * @param mediaTransitionList The transitons list
+ * @param mediaBGMList The background track list
+ * @param listener The ExportProgressListener
+ *
+ */
+ public void export(String filePath, String projectDir,int height,int bitrate,
+ int audioCodec,int videoCodec,List<MediaItem> mediaItemsList,
+ List<Transition> mediaTransitionList,List<AudioTrack> mediaBGMList,
+ ExportProgressListener listener) {
+
+ int outBitrate = 0;
+ mExportFilename = filePath;
+ previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
+ if (listener != null) {
+ mExportProgressListener = listener;
+ }
+ mProgressToApp = 0;
+
+ switch (bitrate) {
+ case MediaProperties.BITRATE_28K:
+ outBitrate = Bitrate.BR_32_KBPS;
+ break;
+ case MediaProperties.BITRATE_40K:
+ outBitrate = Bitrate.BR_48_KBPS;
+ break;
+ case MediaProperties.BITRATE_64K:
+ outBitrate = Bitrate.BR_64_KBPS;
+ break;
+ case MediaProperties.BITRATE_96K:
+ outBitrate = Bitrate.BR_96_KBPS;
+ break;
+ case MediaProperties.BITRATE_128K:
+ outBitrate = Bitrate.BR_128_KBPS;
+ break;
+ case MediaProperties.BITRATE_192K:
+ outBitrate = Bitrate.BR_192_KBPS;
+ break;
+ case MediaProperties.BITRATE_256K:
+ outBitrate = Bitrate.BR_256_KBPS;
+ break;
+ case MediaProperties.BITRATE_384K:
+ outBitrate = Bitrate.BR_384_KBPS;
+ break;
+ case MediaProperties.BITRATE_512K:
+ outBitrate = Bitrate.BR_512_KBPS;
+ break;
+ case MediaProperties.BITRATE_800K:
+ outBitrate = Bitrate.BR_800_KBPS;
+ break;
+ case MediaProperties.BITRATE_2M:
+ outBitrate = Bitrate.BR_2_MBPS;
+ break;
+ case MediaProperties.BITRATE_5M:
+ outBitrate = Bitrate.BR_5_MBPS;
+ break;
+ case MediaProperties.BITRATE_8M:
+ outBitrate = Bitrate.BR_8_MBPS;
+ break;
+
+ default:
+ throw new IllegalArgumentException("Argument Bitrate incorrect");
+ }
+ mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
+ mPreviewEditSettings.outputFile = mOutputFilename = filePath;
+
+ int aspectRatio = mVideoEditor.getAspectRatio();
+ mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
+ switch (audioCodec) {
+ case MediaProperties.ACODEC_AAC_LC:
+ mPreviewEditSettings.audioFormat = AudioFormat.AAC;
+ break;
+ case MediaProperties.ACODEC_AMRNB:
+ mPreviewEditSettings.audioFormat = AudioFormat.AMR_NB;
+ break;
+ }
+
+ switch (videoCodec) {
+ case MediaProperties.VCODEC_H263:
+ mPreviewEditSettings.videoFormat = VideoFormat.H263;
+ break;
+ case MediaProperties.VCODEC_H264BP:
+ mPreviewEditSettings.videoFormat = VideoFormat.H264;
+ break;
+ case MediaProperties.VCODEC_MPEG4:
+ mPreviewEditSettings.videoFormat = VideoFormat.MPEG4;
+ break;
+ }
+
+ mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
+ mPreviewEditSettings.maxFileSize = 0;
+ mPreviewEditSettings.audioChannels = 2;
+ mPreviewEditSettings.videoBitrate = outBitrate;
+ mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
+
+ mPreviewEditSettings.transitionSettingsArray =
+ new TransitionSettings[mTotalClips - 1];
+ for (int index = 0; index < mTotalClips - 1; index++) {
+ mPreviewEditSettings.transitionSettingsArray[index] =
+ new TransitionSettings();
+ mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType =
+ VideoTransition.NONE;
+ mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType =
+ AudioTransition.NONE;
+ }
+ for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
+ if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
+ mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
+ mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
+ }
+ }
+ nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
+
+ int err = 0;
+ try {
+ mProcessingState = PROCESSING_EXPORT;
+ mProcessingObject = null;
+ err = generateClip(mPreviewEditSettings);
+ mProcessingState = PROCESSING_NONE;
+ } catch (IllegalArgumentException ex) {
+ Log.e("MediaArtistNativeHelper", "IllegalArgument for generateClip");
+ throw ex;
+ } catch (IllegalStateException ex) {
+ Log.e("MediaArtistNativeHelper", "IllegalStateExceptiont for generateClip");
+ throw ex;
+ } catch (RuntimeException ex) {
+ Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
+ throw ex;
+ }
+
+ if (err != 0) {
+ Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
+ throw new RuntimeException("generateClip failed with error="+err );
+ }
+
+ mExportDone = true;
+ setGeneratePreview(true);
+ mExportProgressListener = null;
+ }
+
+
+ /**
+ * This methods takes care of stopping the Export process
+ *
+ * @param The input file name for which export has to be stopped
+ */
+ public void stop(String filename) {
+ if (!mExportDone) {
+ try {
+ stopEncoding();
+ } catch (IllegalStateException ex) {
+ Log.e("MediaArtistNativeHelper", "Illegal state exception in unload settings");
+ throw ex;
+ } catch (RuntimeException ex) {
+ Log.e("MediaArtistNativeHelper", "Runtime exception in unload settings");
+ throw ex;
+ }
+
+ new File(mExportFilename).delete();
+ }
+ }
+
+ /**
+ * This method extracts a frame from the input file
+ * and returns the frame as a bitmap
+ *
+ * @param inputFile The inputFile
+ * @param width The width of the output frame
+ * @param height The height of the output frame
+ * @param timeMS The time in ms at which the frame hass to be extracted
+ */
+ public Bitmap getPixels(String inputFile, int width, int height, long timeMS) {
+ if (inputFile == null) {
+ throw new IllegalArgumentException();
+ }
+
+ IntBuffer rgb888 = IntBuffer.allocate(width * height * 4);
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ nativeGetPixels(inputFile, rgb888.array(), width, height, timeMS);
+ bitmap.copyPixelsFromBuffer(rgb888);
+
+ return bitmap;
+ }
+
+ /**
+ * This method extracts a list of frame from the
+ * input file and returns the frame in bitmap array
+ *
+ * @param filename The inputFile
+ * @param width The width of the output frame
+ * @param height The height of the output frame
+ * @param startMs The starting time in ms
+ * @param endMs The end time in ms
+ * @param thumbnailCount The number of frames to be extracted
+ * from startMs to endMs
+ *
+ * @return The frames as bitmaps in bitmap array
+ **/
+ public Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs,
+ int thumbnailCount) {
+ int[] rgb888 = null;
+ int thumbnailSize = width * height * 4;
+
+ int i = 0;
+ int deltaTime = (int)(endMs - startMs) / thumbnailCount;
+ Bitmap[] bitmap = null;
+ try {
+ // This may result in out of Memory Error
+ rgb888 = new int[thumbnailSize * thumbnailCount];
+ bitmap = new Bitmap[thumbnailCount];
+ } catch (Throwable e) {
+ // Allocating to new size with Fixed count
+ try {
+ System.gc();
+ rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED];
+ bitmap = new Bitmap[MAX_THUMBNAIL_PERMITTED];
+ thumbnailCount = MAX_THUMBNAIL_PERMITTED;
+ } catch (Throwable ex) {
+ throw new RuntimeException("Memory allocation fails,reduce nos of thumbanail count");
+ }
+ }
+ IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize);
+ nativeGetPixelsList(filename, rgb888, width, height, deltaTime, thumbnailCount, startMs,
+ endMs);
+ for (; i < thumbnailCount; i++) {
+ bitmap[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize);
+ tmpBuffer.rewind();
+ bitmap[i].copyPixelsFromBuffer(tmpBuffer);
+ }
+
+ return bitmap;
+ }
+
+ /**
+ * This method generates the audio graph
+ *
+ * @param uniqueId The unique id
+ * @param inFileName The inputFile
+ * @param OutAudiGraphFileName output filename
+ * @param frameDuration The each frame duration
+ * @param audioChannels The number of audio channels
+ * @param samplesCount Total number of samples count
+ * @param listener ExtractAudioWaveformProgressListener reference
+ * @param isVideo The flag to indicate if the file is video file or not
+ *
+ **/
+ public void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName,
+ int frameDuration, int audioChannels, int samplesCount,
+ ExtractAudioWaveformProgressListener listener, boolean isVideo) {
+ String tempPCMFileName;
+
+ if (listener != null) {
+ mExtractAudioWaveformProgressListener = listener;
+ }
+ /**
+ * in case of Video , first call will generate the PCM file to make the
+ * audio graph
+ */
+ if (isVideo) {
+ tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm");
+ } else {
+ tempPCMFileName = mAudioTrackPCMFilePath;
+ }
+ /**
+ * For Video item, generate the PCM
+ */
+ if (isVideo) {
+ nativeGenerateRawAudio(inFileName, tempPCMFileName);
+ }
+
+ nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration,
+ audioChannels, samplesCount);
+
+ /* once the audio graph file is generated, delete the pcm file */
+ if (isVideo) {
+ new File(tempPCMFileName).delete();
+ }
+ }
+
+ /** Native Methods */
+
+ public native Properties getMediaProperties(String file) throws IllegalArgumentException,
+ IllegalStateException, RuntimeException, Exception;
+
+ /**
+ * Get the version of ManualEdit.
+ *
+ * @return version of ManualEdit
+ * @throws RuntimeException if an error occurred
+ * @see Version
+ */
+ public static native Version getVersion() throws RuntimeException;
+
+ /**
+ * Returns the video thumbnail in an array of integers. Output format is
+ * ARGB8888.
+ *
+ * @param pixelArray the array that receives the pixelvalues
+ * @param width width of the video thumbnail
+ * @param height height of the video thumbnail
+ * @param timeMS desired time of the thumbnail in ms
+ * @return actual time in ms of the thumbnail generated
+ * @throws IllegalStateException if the class has not been initialized
+ * @throws IllegalArgumentException if the pixelArray is not available or
+ * one of the dimensions is negative or zero or the time is
+ * negative
+ * @throws RuntimeException on runtime errors in native code
+ */
+ public native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
+ long timeMS);
+
+ public native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height,
+ int timeMS, int nosofTN, long startTimeMs, long endTimeMs);
+
+ /**
+ * Releases the JNI and cleans up the core native module.. Should be called
+ * only after init( )
+ *
+ * @throws IllegalStateException if the method could not be called
+ */
+ public native void release() throws IllegalStateException, RuntimeException;
+
+
+
+
+ /**
+ * Stops the encoding. This method should only be called after encoding has
+ * started using method <code> startEncoding</code>
+ *
+ * @throws IllegalStateException if the method could not be called
+ */
+ public native void stopEncoding() throws IllegalStateException, RuntimeException;
+
+
+
+ private native void _init(String tempPath, String libraryPath)
+ throws IllegalArgumentException, IllegalStateException, RuntimeException;
+
+ private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs,
+ int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException,
+ IllegalStateException, RuntimeException;
+
+ private native void nativePopulateSettings(EditSettings mEditSettings,
+ PreviewClipProperties mProperties, AudioSettings mAudioSettings)
+ throws IllegalArgumentException, IllegalStateException, RuntimeException;
+
+ private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs,
+ int surfaceWidth, int surfaceHeight)
+ throws IllegalArgumentException,
+ IllegalStateException, RuntimeException;
+
+ private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath,
+ int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs)
+ throws IllegalArgumentException, IllegalStateException, RuntimeException;
+
+ private native void nativeStopPreview();
+
+ public native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath,
+ int frameDuration, int channels, int sampleCount);
+
+ public native int nativeGenerateRawAudio(String InFileName, String PCMFileName);
+
+ public native int nativeGenerateClip(EditSettings editSettings)
+ throws IllegalArgumentException, IllegalStateException, RuntimeException;
+
+}
diff --git a/media/java/android/media/videoeditor/MediaImageItem.java b/media/java/android/media/videoeditor/MediaImageItem.java
index e6e9bc24e801..b03588fdf462 100755
--- a/media/java/android/media/videoeditor/MediaImageItem.java
+++ b/media/java/android/media/videoeditor/MediaImageItem.java
@@ -1,492 +1,1033 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import android.graphics.Bitmap;
-import android.graphics.BitmapFactory;
-import android.graphics.Canvas;
-import android.graphics.Paint;
-import android.graphics.Rect;
-import android.util.Log;
-import android.util.Pair;
-
-/**
- * This class represents an image item on the storyboard. Note that images are
- * scaled down to the maximum supported resolution by preserving the native
- * aspect ratio. To learn the scaled image dimensions use
- * {@link #getScaledWidth()} and {@link #getScaledHeight()} respectively.
- *
- * {@hide}
- */
-public class MediaImageItem extends MediaItem {
- // Logging
- private static final String TAG = "MediaImageItem";
-
- // The resize paint
- private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
-
- // Instance variables
- private final int mWidth;
- private final int mHeight;
- private final int mAspectRatio;
- private long mDurationMs;
- private int mScaledWidth, mScaledHeight;
-
- /**
- * This class cannot be instantiated by using the default constructor
- */
- @SuppressWarnings("unused")
- private MediaImageItem() throws IOException {
- this(null, null, null, 0, RENDERING_MODE_BLACK_BORDER);
- }
-
- /**
- * Constructor
- *
- * @param editor The video editor reference
- * @param mediaItemId The media item id
- * @param filename The image file name
- * @param durationMs The duration of the image on the storyboard
- * @param renderingMode The rendering mode
- *
- * @throws IOException
- */
- public MediaImageItem(VideoEditor editor, String mediaItemId, String filename, long durationMs,
- int renderingMode)
- throws IOException {
- super(editor, mediaItemId, filename, renderingMode);
-
- // Determine the dimensions of the image
- final BitmapFactory.Options dbo = new BitmapFactory.Options();
- dbo.inJustDecodeBounds = true;
- BitmapFactory.decodeFile(filename, dbo);
-
- mWidth = dbo.outWidth;
- mHeight = dbo.outHeight;
- mDurationMs = durationMs;
-
- // TODO: Determine the aspect ratio from the width and height
- mAspectRatio = MediaProperties.ASPECT_RATIO_4_3;
-
- // Images are stored in memory scaled to the maximum resolution to
- // save memory.
- final Pair<Integer, Integer>[] resolutions =
- MediaProperties.getSupportedResolutions(mAspectRatio);
- // Get the highest resolution
- final Pair<Integer, Integer> maxResolution = resolutions[resolutions.length - 1];
- if (mHeight > maxResolution.second) {
- // We need to scale the image
- scaleImage(filename, maxResolution.first, maxResolution.second);
- mScaledWidth = maxResolution.first;
- mScaledHeight = maxResolution.second;
- } else {
- mScaledWidth = mWidth;
- mScaledHeight = mHeight;
- }
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int getFileType() {
- if (mFilename.endsWith(".jpg") || mFilename.endsWith(".jpeg")) {
- return MediaProperties.FILE_JPEG;
- } else if (mFilename.endsWith(".png")) {
- return MediaProperties.FILE_PNG;
- } else {
- return MediaProperties.FILE_UNSUPPORTED;
- }
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int getWidth() {
- return mWidth;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int getHeight() {
- return mHeight;
- }
-
- /**
- * @return The scaled width of the image.
- */
- public int getScaledWidth() {
- return mScaledWidth;
- }
-
- /**
- * @return The scaled height of the image.
- */
- public int getScaledHeight() {
- return mScaledHeight;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int getAspectRatio() {
- return mAspectRatio;
- }
-
- /**
- * This method will adjust the duration of bounding transitions, effects
- * and overlays if the current duration of the transactions become greater
- * than the maximum allowable duration.
- *
- * @param durationMs The duration of the image in the storyboard timeline
- */
- public void setDuration(long durationMs) {
- if (durationMs == mDurationMs) {
- return;
- }
-
- // Invalidate the end transitions if necessary.
- // This invalidation is necessary for the case in which an effect or
- // an overlay is overlapping with the end transition
- // (before the duration is changed) and it no longer overlaps with the
- // transition after the duration is increased.
-
- // The beginning transition does not need to be invalidated at this time
- // because an effect or an overlay overlaps with the beginning
- // transition, the begin transition is unaffected by a media item
- // duration change.
- invalidateEndTransition();
-
- mDurationMs = durationMs;
-
- adjustTransitions();
- final List<Overlay> adjustedOverlays = adjustOverlays();
- final List<Effect> adjustedEffects = adjustEffects();
-
- // Invalidate the beginning and end transitions after adjustments.
- // This invalidation is necessary for the case in which an effect or
- // an overlay was not overlapping with the beginning or end transitions
- // before the setDuration reduces the duration of the media item and
- // causes an overlap of the beginning and/or end transition with the
- // effect.
- invalidateBeginTransition(adjustedEffects, adjustedOverlays);
- invalidateEndTransition();
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public long getDuration() {
- return mDurationMs;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public long getTimelineDuration() {
- return mDurationMs;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public Bitmap getThumbnail(int width, int height, long timeMs) throws IOException {
- return scaleImage(mFilename, width, height);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public Bitmap[] getThumbnailList(int width, int height, long startMs, long endMs,
- int thumbnailCount) throws IOException {
- final Bitmap thumbnail = scaleImage(mFilename, width, height);
- final Bitmap[] thumbnailArray = new Bitmap[thumbnailCount];
- for (int i = 0; i < thumbnailCount; i++) {
- thumbnailArray[i] = thumbnail;
- }
- return thumbnailArray;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- void invalidateTransitions(long startTimeMs, long durationMs) {
- // Check if the item overlaps with the beginning and end transitions
- if (mBeginTransition != null) {
- if (isOverlapping(startTimeMs, durationMs, 0, mBeginTransition.getDuration())) {
- mBeginTransition.invalidate();
- }
- }
-
- if (mEndTransition != null) {
- final long transitionDurationMs = mEndTransition.getDuration();
- if (isOverlapping(startTimeMs, durationMs,
- getDuration() - transitionDurationMs, transitionDurationMs)) {
- mEndTransition.invalidate();
- }
- }
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- void invalidateTransitions(long oldStartTimeMs, long oldDurationMs, long newStartTimeMs,
- long newDurationMs) {
- // Check if the item overlaps with the beginning and end transitions
- if (mBeginTransition != null) {
- final long transitionDurationMs = mBeginTransition.getDuration();
- // If the start time has changed and if the old or the new item
- // overlaps with the begin transition, invalidate the transition.
- if (oldStartTimeMs != newStartTimeMs &&
- (isOverlapping(oldStartTimeMs, oldDurationMs, 0, transitionDurationMs) ||
- isOverlapping(newStartTimeMs, newDurationMs, 0, transitionDurationMs))) {
- mBeginTransition.invalidate();
- }
- }
-
- if (mEndTransition != null) {
- final long transitionDurationMs = mEndTransition.getDuration();
- // If the start time + duration has changed and if the old or the new
- // item overlaps the end transition, invalidate the transition/
- if (oldStartTimeMs + oldDurationMs != newStartTimeMs + newDurationMs &&
- (isOverlapping(oldStartTimeMs, oldDurationMs,
- mDurationMs - transitionDurationMs, transitionDurationMs) ||
- isOverlapping(newStartTimeMs, newDurationMs,
- mDurationMs - transitionDurationMs, transitionDurationMs))) {
- mEndTransition.invalidate();
- }
- }
- }
-
- /**
- * Invalidate the begin transition if any effects and overlays overlap
- * with the begin transition.
- *
- * @param effects List of effects to check for transition overlap
- * @param overlays List of overlays to check for transition overlap
- */
- private void invalidateBeginTransition(List<Effect> effects, List<Overlay> overlays) {
- if (mBeginTransition != null && mBeginTransition.isGenerated()) {
- final long transitionDurationMs = mBeginTransition.getDuration();
-
- // The begin transition must be invalidated if it overlaps with
- // an effect.
- for (Effect effect : effects) {
- // Check if the effect overlaps with the begin transition
- if (effect.getStartTime() < transitionDurationMs) {
- mBeginTransition.invalidate();
- break;
- }
- }
-
- if (mBeginTransition.isGenerated()) {
- // The end transition must be invalidated if it overlaps with
- // an overlay.
- for (Overlay overlay : overlays) {
- // Check if the overlay overlaps with the end transition
- if (overlay.getStartTime() < transitionDurationMs) {
- mBeginTransition.invalidate();
- break;
- }
- }
- }
- }
- }
-
- /**
- * Invalidate the end transition if any effects and overlays overlap
- * with the end transition.
- */
- private void invalidateEndTransition() {
- if (mEndTransition != null && mEndTransition.isGenerated()) {
- final long transitionDurationMs = mEndTransition.getDuration();
-
- // The end transition must be invalidated if it overlaps with
- // an effect.
- final List<Effect> effects = getAllEffects();
- for (Effect effect : effects) {
- // Check if the effect overlaps with the end transition
- if (effect.getStartTime() + effect.getDuration() >
- mDurationMs - transitionDurationMs) {
- mEndTransition.invalidate();
- break;
- }
- }
-
- if (mEndTransition.isGenerated()) {
- // The end transition must be invalidated if it overlaps with
- // an overlay.
- final List<Overlay> overlays = getAllOverlays();
- for (Overlay overlay : overlays) {
- // Check if the overlay overlaps with the end transition
- if (overlay.getStartTime() + overlay.getDuration() >
- mDurationMs - transitionDurationMs) {
- mEndTransition.invalidate();
- break;
- }
- }
- }
- }
- }
-
- /**
- * Adjust the start time and/or duration of effects.
- *
- * @return The list of effects which were adjusted
- */
- private List<Effect> adjustEffects() {
- final List<Effect> adjustedEffects = new ArrayList<Effect>();
- final List<Effect> effects = getAllEffects();
- for (Effect effect : effects) {
- // Adjust the start time if necessary
- final long effectStartTimeMs;
- if (effect.getStartTime() > getDuration()) {
- effectStartTimeMs = 0;
- } else {
- effectStartTimeMs = effect.getStartTime();
- }
-
- // Adjust the duration if necessary
- final long effectDurationMs;
- if (effectStartTimeMs + effect.getDuration() > getDuration()) {
- effectDurationMs = getDuration() - effectStartTimeMs;
- } else {
- effectDurationMs = effect.getDuration();
- }
-
- if (effectStartTimeMs != effect.getStartTime() ||
- effectDurationMs != effect.getDuration()) {
- effect.setStartTimeAndDuration(effectStartTimeMs, effectDurationMs);
- adjustedEffects.add(effect);
- }
- }
-
- return adjustedEffects;
- }
-
- /**
- * Adjust the start time and/or duration of overlays.
- *
- * @return The list of overlays which were adjusted
- */
- private List<Overlay> adjustOverlays() {
- final List<Overlay> adjustedOverlays = new ArrayList<Overlay>();
- final List<Overlay> overlays = getAllOverlays();
- for (Overlay overlay : overlays) {
- // Adjust the start time if necessary
- final long overlayStartTimeMs;
- if (overlay.getStartTime() > getDuration()) {
- overlayStartTimeMs = 0;
- } else {
- overlayStartTimeMs = overlay.getStartTime();
- }
-
- // Adjust the duration if necessary
- final long overlayDurationMs;
- if (overlayStartTimeMs + overlay.getDuration() > getDuration()) {
- overlayDurationMs = getDuration() - overlayStartTimeMs;
- } else {
- overlayDurationMs = overlay.getDuration();
- }
-
- if (overlayStartTimeMs != overlay.getStartTime() ||
- overlayDurationMs != overlay.getDuration()) {
- overlay.setStartTimeAndDuration(overlayStartTimeMs, overlayDurationMs);
- adjustedOverlays.add(overlay);
- }
- }
-
- return adjustedOverlays;
- }
-
- /**
- * Resize a bitmap to the specified width and height
- *
- * @param filename The filename
- * @param width The thumbnail width
- * @param height The thumbnail height
- *
- * @return The resized bitmap
- */
- private Bitmap scaleImage(String filename, int width, int height) throws IOException {
- final BitmapFactory.Options dbo = new BitmapFactory.Options();
- dbo.inJustDecodeBounds = true;
- BitmapFactory.decodeFile(filename, dbo);
-
- final int nativeWidth = dbo.outWidth;
- final int nativeHeight = dbo.outHeight;
- if (Log.isLoggable(TAG, Log.DEBUG)) {
- Log.d(TAG, "generateThumbnail: Input: " + nativeWidth + "x" + nativeHeight
- + ", resize to: " + width + "x" + height);
- }
-
- final Bitmap srcBitmap;
- float bitmapWidth, bitmapHeight;
- if (nativeWidth > width || nativeHeight > height) {
- float dx = ((float)nativeWidth) / ((float)width);
- float dy = ((float)nativeHeight) / ((float)height);
- if (dx > dy) {
- bitmapWidth = width;
- bitmapHeight = nativeHeight / dx;
- } else {
- bitmapWidth = nativeWidth / dy;
- bitmapHeight = height;
- }
- // Create the bitmap from file
- if (nativeWidth / bitmapWidth > 1) {
- final BitmapFactory.Options options = new BitmapFactory.Options();
- options.inSampleSize = nativeWidth / (int)bitmapWidth;
- srcBitmap = BitmapFactory.decodeFile(filename, options);
- } else {
- srcBitmap = BitmapFactory.decodeFile(filename);
- }
- } else {
- bitmapWidth = width;
- bitmapHeight = height;
- srcBitmap = BitmapFactory.decodeFile(filename);
- }
-
- if (srcBitmap == null) {
- Log.e(TAG, "generateThumbnail: Cannot decode image bytes");
- throw new IOException("Cannot decode file: " + mFilename);
- }
-
- // Create the canvas bitmap
- final Bitmap bitmap = Bitmap.createBitmap((int)bitmapWidth, (int)bitmapHeight,
- Bitmap.Config.ARGB_8888);
- final Canvas canvas = new Canvas(bitmap);
- canvas.drawBitmap(srcBitmap, new Rect(0, 0, srcBitmap.getWidth(), srcBitmap.getHeight()),
- new Rect(0, 0, (int)bitmapWidth, (int)bitmapHeight), sResizePaint);
- // Release the source bitmap
- srcBitmap.recycle();
- return bitmap;
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Canvas;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import java.util.ArrayList;
+import android.media.videoeditor.MediaArtistNativeHelper.ClipSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.EditSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.FileType;
+import android.media.videoeditor.MediaArtistNativeHelper.Properties;
+import android.util.Log;
+import android.util.Pair;
+
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.IntBuffer;
+import java.lang.Math;
+import java.util.List;
+
+/**
+ * This class represents an image item on the storyboard. Note that images are
+ * scaled down to the maximum supported resolution by preserving the native
+ * aspect ratio. To learn the scaled image dimensions use
+ * {@link #getScaledWidth()} and {@link #getScaledHeight()} respectively.
+ *
+ * {@hide}
+ */
+public class MediaImageItem extends MediaItem {
+ /**
+ * Logging
+ */
+ private static final String TAG = "MediaImageItem";
+
+ /**
+ * The resize paint
+ */
+ private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
+
+ /**
+ * Instance variables
+ */
+ private final int mWidth;
+ private final int mHeight;
+ private final int mAspectRatio;
+ private long mDurationMs;
+ private int mScaledWidth, mScaledHeight;
+ private String mScaledFilename;
+ private final VideoEditorImpl mVideoEditor;
+ private String mDecodedFilename;
+ private int mGeneratedClipHeight;
+ private int mGeneratedClipWidth;
+
+ private final MediaArtistNativeHelper mMANativeHelper;
+
+ /**
+ * This class cannot be instantiated by using the default constructor
+ */
+ @SuppressWarnings("unused")
+ private MediaImageItem() throws IOException {
+ this(null, null, null, 0, RENDERING_MODE_BLACK_BORDER);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param mediaItemId The media item id
+ * @param filename The image file name
+ * @param durationMs The duration of the image on the storyboard
+ * @param renderingMode The rendering mode
+ *
+ * @throws IOException
+ */
+ public MediaImageItem(VideoEditor editor, String mediaItemId,
+ String filename, long durationMs,
+ int renderingMode) throws IOException {
+
+ super(editor, mediaItemId, filename, renderingMode);
+
+ mMANativeHelper = ((VideoEditorImpl)editor).getNativeContext();
+ mVideoEditor = ((VideoEditorImpl)editor);
+ try {
+ final Properties properties =
+ mMANativeHelper.getMediaProperties(filename);
+
+ switch (mMANativeHelper.getFileType(properties.fileType)) {
+ case MediaProperties.FILE_JPEG:
+ break;
+ case MediaProperties.FILE_PNG:
+ break;
+
+ default:
+ throw new IllegalArgumentException("Unsupported Input File Type");
+ }
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Unsupported file or file not found");
+ }
+
+ /**
+ * Determine the dimensions of the image
+ */
+ final BitmapFactory.Options dbo = new BitmapFactory.Options();
+ dbo.inJustDecodeBounds = true;
+ BitmapFactory.decodeFile(filename, dbo);
+
+ mWidth = dbo.outWidth;
+ mHeight = dbo.outHeight;
+ mDurationMs = durationMs;
+ mDecodedFilename = String.format(mMANativeHelper.getProjectPath() +
+ "/" + "decoded" + getId()+ ".rgb");
+ final FileOutputStream fl = new FileOutputStream(mDecodedFilename);
+ final DataOutputStream dos = new DataOutputStream(fl);
+ try {
+ mAspectRatio = mMANativeHelper.getAspectRatio(mWidth, mHeight);
+ } catch(IllegalArgumentException e) {
+ throw new IllegalArgumentException ("Null width and height");
+ }
+ mGeneratedClipHeight = 0;
+ mGeneratedClipWidth = 0;
+
+ /**
+ * Images are stored in memory scaled to the maximum resolution to
+ * save memory.
+ */
+ final Pair<Integer, Integer>[] resolutions =
+ MediaProperties.getSupportedResolutions(mAspectRatio);
+ /**
+ * Get the highest resolution
+ */
+ final Pair<Integer, Integer> maxResolution = resolutions[resolutions.length - 1];
+ if (mHeight > maxResolution.second) {
+ /**
+ * We need to scale the image
+ */
+ final Bitmap scaledImage = scaleImage(filename, maxResolution.first,
+ maxResolution.second);
+ mScaledFilename = String.format(mMANativeHelper.getProjectPath() +
+ "/" + "scaled" + getId()+ ".JPG");
+ if (!((new File(mScaledFilename)).exists())) {
+ super.mRegenerateClip = true;
+ final FileOutputStream f1 = new FileOutputStream(mScaledFilename);
+ scaledImage.compress(Bitmap.CompressFormat.JPEG, 50,f1);
+ f1.close();
+ }
+ mScaledWidth = scaledImage.getWidth();
+ mScaledHeight = scaledImage.getHeight();
+
+ int mNewWidth = 0;
+ int mNewHeight = 0;
+ if ((mScaledWidth % 2 ) != 0) {
+ mNewWidth = mScaledWidth - 1;
+ }
+ else {
+ mNewWidth = mScaledWidth;
+ }
+ if ((mScaledHeight % 2 ) != 0) {
+ mNewHeight = mScaledHeight - 1;
+ }
+ else {
+ mNewHeight = mScaledHeight;
+ }
+ final int [] framingBuffer = new int[mNewWidth];
+ final ByteBuffer byteBuffer = ByteBuffer.allocate(framingBuffer.length * 4);
+ IntBuffer intBuffer;
+
+ final byte[] array = byteBuffer.array();
+ int tmp = 0;
+ while (tmp < mNewHeight) {
+ scaledImage.getPixels(framingBuffer,0,mScaledWidth,0,
+ tmp,mNewWidth,1);
+ intBuffer = byteBuffer.asIntBuffer();
+ intBuffer.put(framingBuffer,0,mNewWidth);
+ dos.write(array);
+ tmp += 1;
+ }
+ mScaledWidth = mNewWidth;
+ mScaledHeight = mNewHeight;
+ scaledImage.recycle();
+ } else {
+ final Bitmap scaledImage = BitmapFactory.decodeFile(filename);
+ mScaledFilename = String.format(mMANativeHelper.getProjectPath()
+ + "/" + "scaled" + getId()+ ".JPG");
+ if (!((new File(mScaledFilename)).exists())) {
+ super.mRegenerateClip = true;
+ FileOutputStream f1 = new FileOutputStream(mScaledFilename);
+ scaledImage.compress(Bitmap.CompressFormat.JPEG, 50,f1);
+ f1.close();
+ }
+ mScaledWidth = scaledImage.getWidth();
+ mScaledHeight = scaledImage.getHeight();
+
+ int mNewWidth = 0;
+ int mNewheight = 0;
+ if ((mScaledWidth % 2 ) != 0) {
+ mNewWidth = mScaledWidth - 1;
+ }
+ else {
+ mNewWidth = mScaledWidth;
+ }
+ if ((mScaledHeight % 2 ) != 0) {
+ mNewheight = mScaledHeight - 1;
+ }
+ else {
+ mNewheight = mScaledHeight;
+ }
+ Bitmap imageBitmap = BitmapFactory.decodeFile(mScaledFilename);
+ final int [] framingBuffer = new int[mNewWidth];
+ ByteBuffer byteBuffer = ByteBuffer.allocate(framingBuffer.length * 4);
+ IntBuffer intBuffer;
+
+ byte[] array = byteBuffer.array();
+ int tmp = 0;
+ while (tmp < mNewheight) {
+ imageBitmap.getPixels(framingBuffer,0,mScaledWidth,0,
+ tmp,mNewWidth,1);
+ intBuffer = byteBuffer.asIntBuffer();
+ intBuffer.put(framingBuffer,0,mNewWidth);
+ dos.write(array);
+ tmp += 1;
+ }
+ mScaledWidth = mNewWidth;
+ mScaledHeight = mNewheight;
+ imageBitmap.recycle();
+ }
+ fl.close();
+ System.gc();
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getFileType() {
+ if (mFilename.endsWith(".jpg") || mFilename.endsWith(".jpeg")
+ || mFilename.endsWith(".JPG") || mFilename.endsWith(".JPEG")) {
+ return MediaProperties.FILE_JPEG;
+ } else if (mFilename.endsWith(".png") || mFilename.endsWith(".PNG")) {
+ return MediaProperties.FILE_PNG;
+ } else {
+ return MediaProperties.FILE_UNSUPPORTED;
+ }
+ }
+
+ /**
+ * @return The scaled image file name
+ */
+ String getScaledImageFileName() {
+ return mScaledFilename;
+ }
+
+ /**
+ * @return The generated Kenburns clip height.
+ */
+ int getGeneratedClipHeight() {
+ return mGeneratedClipHeight;
+ }
+
+ /**
+ * @return The generated Kenburns clip width.
+ */
+ int getGeneratedClipWidth() {
+ return mGeneratedClipWidth;
+ }
+
+ /**
+ * @return The file name of image which is decoded and stored
+ * in rgb format
+ */
+ String getDecodedImageFileName() {
+ return mDecodedFilename;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * @return The scaled width of the image.
+ */
+ public int getScaledWidth() {
+ return mScaledWidth;
+ }
+
+ /**
+ * @return The scaled height of the image.
+ */
+ public int getScaledHeight() {
+ return mScaledHeight;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getAspectRatio() {
+ return mAspectRatio;
+ }
+
+ /**
+ * This method will adjust the duration of bounding transitions, effects
+ * and overlays if the current duration of the transactions become greater
+ * than the maximum allowable duration.
+ *
+ * @param durationMs The duration of the image in the storyboard timeline
+ */
+ public void setDuration(long durationMs) {
+ if (durationMs == mDurationMs) {
+ return;
+ }
+
+ /**
+ * Invalidate the end transitions if necessary.
+ * This invalidation is necessary for the case in which an effect or
+ * an overlay is overlapping with the end transition
+ * (before the duration is changed) and it no longer overlaps with the
+ * transition after the duration is increased.
+ *
+ * The beginning transition does not need to be invalidated at this time
+ * because an effect or an overlay overlaps with the beginning
+ * transition, the begin transition is unaffected by a media item
+ * duration change.
+ */
+ invalidateEndTransition();
+
+ mDurationMs = durationMs;
+
+ adjustTransitions();
+ final List<Overlay> adjustedOverlays = adjustOverlays();
+ final List<Effect> adjustedEffects = adjustEffects();
+
+ /**
+ * Invalidate the beginning and end transitions after adjustments.
+ * This invalidation is necessary for the case in which an effect or
+ * an overlay was not overlapping with the beginning or end transitions
+ * before the setDuration reduces the duration of the media item and
+ * causes an overlap of the beginning and/or end transition with the
+ * effect.
+ */
+ invalidateBeginTransition(adjustedEffects, adjustedOverlays);
+ invalidateEndTransition();
+ if (getGeneratedImageClip() != null) {
+ /*
+ * Delete the file
+ */
+ new File(getGeneratedImageClip()).delete();
+ /*
+ * Invalidate the filename
+ */
+ setGeneratedImageClip(null);
+ super.setRegenerateClip(true);
+ }
+ mVideoEditor.updateTimelineDuration();
+ }
+
+ /**
+ * Invalidate the begin transition if any effects and overlays overlap
+ * with the begin transition.
+ *
+ * @param effects List of effects to check for transition overlap
+ * @param overlays List of overlays to check for transition overlap
+ */
+ private void invalidateBeginTransition(List<Effect> effects, List<Overlay> overlays) {
+ if (mBeginTransition != null && mBeginTransition.isGenerated()) {
+ final long transitionDurationMs = mBeginTransition.getDuration();
+
+ /**
+ * The begin transition must be invalidated if it overlaps with
+ * an effect.
+ */
+ for (Effect effect : effects) {
+ /**
+ * Check if the effect overlaps with the begin transition
+ */
+ if (effect.getStartTime() < transitionDurationMs) {
+ mBeginTransition.invalidate();
+ break;
+ }
+ }
+
+ if (mBeginTransition.isGenerated()) {
+ /**
+ * The end transition must be invalidated if it overlaps with
+ * an overlay.
+ */
+ for (Overlay overlay : overlays) {
+ /**
+ * Check if the overlay overlaps with the end transition
+ */
+ if (overlay.getStartTime() < transitionDurationMs) {
+ mBeginTransition.invalidate();
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Invalidate the end transition if any effects and overlays overlap
+ * with the end transition.
+ */
+ private void invalidateEndTransition() {
+ if (mEndTransition != null && mEndTransition.isGenerated()) {
+ final long transitionDurationMs = mEndTransition.getDuration();
+
+ /**
+ * The end transition must be invalidated if it overlaps with
+ * an effect.
+ */
+ final List<Effect> effects = getAllEffects();
+ for (Effect effect : effects) {
+ /**
+ * Check if the effect overlaps with the end transition
+ */
+ if (effect.getStartTime() + effect.getDuration() >
+ mDurationMs - transitionDurationMs) {
+ mEndTransition.invalidate();
+ break;
+ }
+ }
+
+ if (mEndTransition.isGenerated()) {
+ /**
+ * The end transition must be invalidated if it overlaps with
+ * an overlay.
+ */
+ final List<Overlay> overlays = getAllOverlays();
+ for (Overlay overlay : overlays) {
+ /**
+ * Check if the overlay overlaps with the end transition
+ */
+ if (overlay.getStartTime() + overlay.getDuration() >
+ mDurationMs - transitionDurationMs) {
+ mEndTransition.invalidate();
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Adjust the start time and/or duration of effects.
+ *
+ * @return The list of effects which were adjusted
+ */
+ private List<Effect> adjustEffects() {
+ final List<Effect> adjustedEffects = new ArrayList<Effect>();
+ final List<Effect> effects = getAllEffects();
+ for (Effect effect : effects) {
+ /**
+ * Adjust the start time if necessary
+ */
+ final long effectStartTimeMs;
+ if (effect.getStartTime() > getDuration()) {
+ effectStartTimeMs = 0;
+ } else {
+ effectStartTimeMs = effect.getStartTime();
+ }
+
+ /**
+ * Adjust the duration if necessary
+ */
+ final long effectDurationMs;
+ if (effectStartTimeMs + effect.getDuration() > getDuration()) {
+ effectDurationMs = getDuration() - effectStartTimeMs;
+ } else {
+ effectDurationMs = effect.getDuration();
+ }
+
+ if (effectStartTimeMs != effect.getStartTime() ||
+ effectDurationMs != effect.getDuration()) {
+ effect.setStartTimeAndDuration(effectStartTimeMs, effectDurationMs);
+ adjustedEffects.add(effect);
+ }
+ }
+
+ return adjustedEffects;
+ }
+
+ /**
+ * Adjust the start time and/or duration of overlays.
+ *
+ * @return The list of overlays which were adjusted
+ */
+ private List<Overlay> adjustOverlays() {
+ final List<Overlay> adjustedOverlays = new ArrayList<Overlay>();
+ final List<Overlay> overlays = getAllOverlays();
+ for (Overlay overlay : overlays) {
+ /**
+ * Adjust the start time if necessary
+ */
+ final long overlayStartTimeMs;
+ if (overlay.getStartTime() > getDuration()) {
+ overlayStartTimeMs = 0;
+ } else {
+ overlayStartTimeMs = overlay.getStartTime();
+ }
+
+ /**
+ * Adjust the duration if necessary
+ */
+ final long overlayDurationMs;
+ if (overlayStartTimeMs + overlay.getDuration() > getDuration()) {
+ overlayDurationMs = getDuration() - overlayStartTimeMs;
+ } else {
+ overlayDurationMs = overlay.getDuration();
+ }
+
+ if (overlayStartTimeMs != overlay.getStartTime() ||
+ overlayDurationMs != overlay.getDuration()) {
+ overlay.setStartTimeAndDuration(overlayStartTimeMs, overlayDurationMs);
+ adjustedOverlays.add(overlay);
+ }
+ }
+
+ return adjustedOverlays;
+ }
+
+
+ /**
+ * This function sets the Ken Burn effect generated clip
+ * name.
+ *
+ * @param generatedFilePath The name of the generated clip
+ */
+ @Override
+ void setGeneratedImageClip(String generatedFilePath) {
+ super.setGeneratedImageClip(generatedFilePath);
+
+
+ // set the Kenburns clip width and height
+ mGeneratedClipHeight = getScaledHeight();
+ switch (mVideoEditor.getAspectRatio()) {
+ case MediaProperties.ASPECT_RATIO_3_2:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_480)
+ mGeneratedClipWidth = 720;
+ else if (mGeneratedClipHeight == MediaProperties.HEIGHT_720)
+ mGeneratedClipWidth = 1080;
+ break;
+ case MediaProperties.ASPECT_RATIO_16_9:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_360)
+ mGeneratedClipWidth = 640;
+ else if (mGeneratedClipHeight == MediaProperties.HEIGHT_480)
+ mGeneratedClipWidth = 854;
+ else if (mGeneratedClipHeight == MediaProperties.HEIGHT_720)
+ mGeneratedClipWidth = 1280;
+ break;
+ case MediaProperties.ASPECT_RATIO_4_3:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_480)
+ mGeneratedClipWidth = 640;
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_720)
+ mGeneratedClipWidth = 960;
+ break;
+ case MediaProperties.ASPECT_RATIO_5_3:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_480)
+ mGeneratedClipWidth = 800;
+ break;
+ case MediaProperties.ASPECT_RATIO_11_9:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_144)
+ mGeneratedClipWidth = 176;
+ break;
+ }
+ }
+
+ /**
+ * @return The name of the image clip
+ * generated with ken burns effect.
+ */
+ @Override
+ String getGeneratedImageClip() {
+ return super.getGeneratedImageClip();
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public long getTimelineDuration() {
+ return mDurationMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public Bitmap getThumbnail(int width, int height, long timeMs) throws IOException {
+ if (getGeneratedImageClip() != null) {
+ return mMANativeHelper.getPixels(getGeneratedImageClip(),
+ width, height,timeMs);
+ } else {
+ return scaleImage(mFilename, width, height);
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public Bitmap[] getThumbnailList(int width, int height, long startMs, long endMs,
+ int thumbnailCount) throws IOException {
+ //KenBurns was not applied on this.
+ if (getGeneratedImageClip() == null) {
+ final Bitmap thumbnail = scaleImage(mFilename, width, height);
+ final Bitmap[] thumbnailArray = new Bitmap[thumbnailCount];
+ for (int i = 0; i < thumbnailCount; i++) {
+ thumbnailArray[i] = thumbnail;
+ }
+ return thumbnailArray;
+
+
+ }
+ else {
+ if (startMs > endMs) {
+ throw new IllegalArgumentException("Start time is greater than end time");
+ }
+ if (endMs > mDurationMs) {
+ throw new IllegalArgumentException("End time is greater than file duration");
+ }
+ if (startMs == endMs) {
+ Bitmap[] bitmap = new Bitmap[1];
+ bitmap[0] = mMANativeHelper.getPixels(getGeneratedImageClip(),
+ width, height,startMs);
+ return bitmap;
+ }
+ return mMANativeHelper.getPixelsList(getGeneratedImageClip(), width,
+ height,startMs,endMs,thumbnailCount);
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void invalidateTransitions(long startTimeMs, long durationMs) {
+ /**
+ * Check if the item overlaps with the beginning and end transitions
+ */
+ if (mBeginTransition != null) {
+ if (isOverlapping(startTimeMs, durationMs, 0, mBeginTransition.getDuration())) {
+ mBeginTransition.invalidate();
+ }
+ }
+
+ if (mEndTransition != null) {
+ final long transitionDurationMs = mEndTransition.getDuration();
+ if (isOverlapping(startTimeMs, durationMs,
+ getDuration() - transitionDurationMs, transitionDurationMs)) {
+ mEndTransition.invalidate();
+ }
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void invalidateTransitions(long oldStartTimeMs, long oldDurationMs, long newStartTimeMs,
+ long newDurationMs) {
+ /**
+ * Check if the item overlaps with the beginning and end transitions
+ */
+ if (mBeginTransition != null) {
+ final long transitionDurationMs = mBeginTransition.getDuration();
+ /**
+ * If the start time has changed and if the old or the new item
+ * overlaps with the begin transition, invalidate the transition.
+ */
+ if (((oldStartTimeMs != newStartTimeMs)
+ || (oldDurationMs != newDurationMs) )&&
+ (isOverlapping(oldStartTimeMs, oldDurationMs, 0, transitionDurationMs) ||
+ isOverlapping(newStartTimeMs, newDurationMs, 0,
+ transitionDurationMs))) {
+ mBeginTransition.invalidate();
+ }
+ }
+
+ if (mEndTransition != null) {
+ final long transitionDurationMs = mEndTransition.getDuration();
+ /**
+ * If the start time + duration has changed and if the old or the new
+ * item overlaps the end transition, invalidate the transition
+ */
+ if (oldStartTimeMs + oldDurationMs != newStartTimeMs + newDurationMs &&
+ (isOverlapping(oldStartTimeMs, oldDurationMs,
+ mDurationMs - transitionDurationMs, transitionDurationMs) ||
+ isOverlapping(newStartTimeMs, newDurationMs,
+ mDurationMs - transitionDurationMs, transitionDurationMs))) {
+ mEndTransition.invalidate();
+ }
+ }
+ }
+
+ /**
+ * This function invalidates the rgb image clip,ken burns effect clip,
+ * and scaled image clip
+ */
+ void invalidate() {
+ if (getGeneratedImageClip() != null) {
+ new File(getGeneratedImageClip()).delete();
+ setGeneratedImageClip(null);
+ setRegenerateClip(true);
+ }
+ if (mScaledFilename != null) {
+ new File(mScaledFilename).delete();
+ mScaledFilename = null;
+ }
+ if (mDecodedFilename != null) {
+ new File(mDecodedFilename).delete();
+ mDecodedFilename = null;
+ }
+ }
+
+ /**
+ * @param KenBurnEffect object.
+ * @return an Object of {@link ClipSettings} with Ken Burn settings
+ * needed to generate the clip
+ */
+ private ClipSettings getKenBurns(EffectKenBurns effectKB) {
+ int PanZoomXa;
+ int PanZoomXb;
+ int width = 0, height = 0;
+ Rect start = new Rect();
+ Rect end = new Rect();
+ ClipSettings clipSettings = null;
+ clipSettings = new ClipSettings();
+ /**
+ * image:
+ ---------------------------------------
+ | Xa |
+ | Ya --------------- |
+ | | | |
+ | | | |
+ | --------------- Xb ratioB |
+ | ratioA ------- |
+ | Yb | | |
+ | | | |
+ | ------- |
+ ---------------------------------------
+ */
+
+ effectKB.getKenBurnsSettings(start, end);
+ width = getWidth();
+ height = getHeight();
+ if ((start.left < 0) || (start.left > width) || (start.right < 0) || (start.right > width)
+ || (start.top < 0) || (start.top > height) || (start.bottom < 0)
+ || (start.bottom > height) || (end.left < 0) || (end.left > width)
+ || (end.right < 0) || (end.right > width) || (end.top < 0) || (end.top > height)
+ || (end.bottom < 0) || (end.bottom > height)) {
+ throw new IllegalArgumentException("Illegal arguments for KebBurns");
+ }
+
+ if (((width - (start.right - start.left) == 0) || (height - (start.bottom - start.top) == 0))
+ && ((width - (end.right - end.left) == 0) || (height - (end.bottom - end.top) == 0))) {
+ setRegenerateClip(false);
+ clipSettings.clipPath = getDecodedImageFileName();
+ clipSettings.fileType = FileType.JPG;
+ clipSettings.beginCutTime = 0;
+ clipSettings.endCutTime = (int)getTimelineDuration();
+ clipSettings.beginCutPercent = 0;
+ clipSettings.endCutPercent = 0;
+ clipSettings.panZoomEnabled = false;
+ clipSettings.panZoomPercentStart = 0;
+ clipSettings.panZoomTopLeftXStart = 0;
+ clipSettings.panZoomTopLeftYStart = 0;
+ clipSettings.panZoomPercentEnd = 0;
+ clipSettings.panZoomTopLeftXEnd = 0;
+ clipSettings.panZoomTopLeftYEnd = 0;
+ clipSettings.mediaRendering = mMANativeHelper
+ .getMediaItemRenderingMode(getRenderingMode());
+
+ clipSettings.rgbWidth = getScaledWidth();
+ clipSettings.rgbHeight = getScaledHeight();
+
+ return clipSettings;
+ }
+
+ PanZoomXa = (100 * start.width()) / width;
+ PanZoomXb = (100 * end.width()) / width;
+
+ clipSettings.clipPath = getDecodedImageFileName();
+ clipSettings.fileType = mMANativeHelper.getMediaItemFileType(getFileType());
+ clipSettings.beginCutTime = 0;
+ clipSettings.endCutTime = (int)getTimelineDuration();
+ clipSettings.beginCutPercent = 0;
+ clipSettings.endCutPercent = 0;
+ clipSettings.panZoomEnabled = true;
+ clipSettings.panZoomPercentStart = PanZoomXa;
+ clipSettings.panZoomTopLeftXStart = (start.left * 100) / width;
+ clipSettings.panZoomTopLeftYStart = (start.top * 100) / height;
+ clipSettings.panZoomPercentEnd = PanZoomXb;
+ clipSettings.panZoomTopLeftXEnd = (end.left * 100) / width;
+ clipSettings.panZoomTopLeftYEnd = (end.top * 100) / height;
+ clipSettings.mediaRendering
+ = mMANativeHelper.getMediaItemRenderingMode(getRenderingMode());
+
+ clipSettings.rgbWidth = getScaledWidth();
+ clipSettings.rgbHeight = getScaledHeight();
+
+ return clipSettings;
+ }
+
+
+ /**
+ * @param KenBurnEffect object.
+ * @return an Object of {@link ClipSettings} with Ken Burns
+ * generated clip name
+ */
+ ClipSettings generateKenburnsClip(EffectKenBurns effectKB) {
+ EditSettings editSettings = new EditSettings();
+ editSettings.clipSettingsArray = new ClipSettings[1];
+ String output = null;
+ ClipSettings clipSettings = new ClipSettings();
+ initClipSettings(clipSettings);
+ editSettings.clipSettingsArray[0] = getKenBurns(effectKB);
+ if ((getGeneratedImageClip() == null) && (getRegenerateClip())) {
+ output = mMANativeHelper.generateKenBurnsClip(editSettings, this);
+ setGeneratedImageClip(output);
+ setRegenerateClip(false);
+ clipSettings.clipPath = output;
+ clipSettings.fileType = FileType.THREE_GPP;
+
+ mGeneratedClipHeight = getScaledHeight();
+ switch (mVideoEditor.getAspectRatio()) {
+ case MediaProperties.ASPECT_RATIO_3_2:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_480)
+ mGeneratedClipWidth = 720;
+ else if (mGeneratedClipHeight == MediaProperties.HEIGHT_720)
+ mGeneratedClipWidth = 1080;
+ break;
+ case MediaProperties.ASPECT_RATIO_16_9:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_360)
+ mGeneratedClipWidth = 640;
+ else if (mGeneratedClipHeight == MediaProperties.HEIGHT_480)
+ mGeneratedClipWidth = 854;
+ else if (mGeneratedClipHeight == MediaProperties.HEIGHT_720)
+ mGeneratedClipWidth = 1280;
+ break;
+ case MediaProperties.ASPECT_RATIO_4_3:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_480)
+ mGeneratedClipWidth = 640;
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_720)
+ mGeneratedClipWidth = 960;
+ break;
+ case MediaProperties.ASPECT_RATIO_5_3:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_480)
+ mGeneratedClipWidth = 800;
+ break;
+ case MediaProperties.ASPECT_RATIO_11_9:
+ if (mGeneratedClipHeight == MediaProperties.HEIGHT_144)
+ mGeneratedClipWidth = 176;
+ break;
+ }
+
+ } else {
+ if (getGeneratedImageClip() == null) {
+ clipSettings.clipPath = getDecodedImageFileName();
+ clipSettings.fileType = FileType.JPG;
+
+ clipSettings.rgbWidth = getScaledWidth();
+ clipSettings.rgbHeight = getScaledHeight();
+
+ } else {
+ clipSettings.clipPath = getGeneratedImageClip();
+ clipSettings.fileType = FileType.THREE_GPP;
+ }
+ }
+ clipSettings.mediaRendering = mMANativeHelper.getMediaItemRenderingMode(getRenderingMode());
+ clipSettings.beginCutTime = 0;
+ clipSettings.endCutTime = (int)getTimelineDuration();
+
+ return clipSettings;
+ }
+
+ /**
+ * @return an Object of {@link ClipSettings} with Image Clip
+ * properties data populated.If the image has Ken Burns effect applied,
+ * then file path contains generated image clip name with Ken Burns effect
+ */
+ ClipSettings getImageClipProperties() {
+ ClipSettings clipSettings = new ClipSettings();
+ List<Effect> effects = null;
+ EffectKenBurns effectKB = null;
+ boolean effectKBPresent = false;
+
+ effects = getAllEffects();
+ for (Effect effect : effects) {
+ if (effect instanceof EffectKenBurns) {
+ effectKB = (EffectKenBurns)effect;
+ effectKBPresent = true;
+ break;
+ }
+ }
+
+ if (effectKBPresent) {
+ clipSettings = generateKenburnsClip(effectKB);
+ } else {
+ /**
+ * Init the clip settings object
+ */
+ initClipSettings(clipSettings);
+ clipSettings.clipPath = getDecodedImageFileName();
+ clipSettings.fileType = FileType.JPG;
+ clipSettings.beginCutTime = 0;
+ clipSettings.endCutTime = (int)getTimelineDuration();
+ clipSettings.mediaRendering = mMANativeHelper
+ .getMediaItemRenderingMode(getRenderingMode());
+ clipSettings.rgbWidth = getScaledWidth();
+ clipSettings.rgbHeight = getScaledHeight();
+
+ }
+ return clipSettings;
+ }
+
+ /**
+ * Resize a bitmap to the specified width and height
+ *
+ * @param filename The filename
+ * @param width The thumbnail width
+ * @param height The thumbnail height
+ *
+ * @return The resized bitmap
+ */
+ private Bitmap scaleImage(String filename, int width, int height)
+ throws IOException {
+ final BitmapFactory.Options dbo = new BitmapFactory.Options();
+ dbo.inJustDecodeBounds = true;
+ BitmapFactory.decodeFile(filename, dbo);
+
+ final int nativeWidth = dbo.outWidth;
+ final int nativeHeight = dbo.outHeight;
+ if (Log.isLoggable(TAG, Log.DEBUG)) {
+ Log.d(TAG, "generateThumbnail: Input: " + nativeWidth + "x" + nativeHeight
+ + ", resize to: " + width + "x" + height);
+ }
+
+ final Bitmap srcBitmap;
+ float bitmapWidth, bitmapHeight;
+ if (nativeWidth > width || nativeHeight > height) {
+ float dx = ((float)nativeWidth) / ((float)width);
+ float dy = ((float)nativeHeight) / ((float)height);
+
+ if (dx > dy) {
+ bitmapWidth = width;
+ bitmapHeight = Math.round(nativeHeight / dx);
+ } else {
+ bitmapWidth = Math.round(nativeWidth / dy);
+ bitmapHeight = height;
+ }
+
+ /**
+ * Create the bitmap from file
+ */
+ if (nativeWidth / bitmapWidth > 1) {
+
+ final BitmapFactory.Options options = new BitmapFactory.Options();
+ options.inSampleSize = nativeWidth / (int)bitmapWidth;
+ srcBitmap = BitmapFactory.decodeFile(filename, options);
+ } else {
+ srcBitmap = BitmapFactory.decodeFile(filename);
+ }
+ } else {
+ bitmapWidth = width;
+ bitmapHeight = height;
+ srcBitmap = BitmapFactory.decodeFile(filename);
+
+ }
+
+ if (srcBitmap == null) {
+ Log.e(TAG, "generateThumbnail: Cannot decode image bytes");
+ throw new IOException("Cannot decode file: " + mFilename);
+ }
+
+ /**
+ * Create the canvas bitmap
+ */
+ final Bitmap bitmap = Bitmap.createBitmap((int)bitmapWidth,
+ (int)bitmapHeight,
+ Bitmap.Config.ARGB_8888);
+ final Canvas canvas = new Canvas(bitmap);
+ canvas.drawBitmap(srcBitmap, new Rect(0, 0, srcBitmap.getWidth(),
+ srcBitmap.getHeight()),
+ new Rect(0, 0, (int)bitmapWidth,
+ (int)bitmapHeight), sResizePaint);
+ /**
+ * Release the source bitmap
+ */
+ srcBitmap.recycle();
+ return bitmap;
+ }
+}
diff --git a/media/java/android/media/videoeditor/MediaItem.java b/media/java/android/media/videoeditor/MediaItem.java
index 20fd6c996f53..a24b46e230ad 100755
--- a/media/java/android/media/videoeditor/MediaItem.java
+++ b/media/java/android/media/videoeditor/MediaItem.java
@@ -1,510 +1,775 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import android.graphics.Bitmap;
-
-/**
- * This abstract class describes the base class for any MediaItem. Objects are
- * defined with a file path as a source data.
- * {@hide}
-s */
-public abstract class MediaItem {
- // A constant which can be used to specify the end of the file (instead of
- // providing the actual duration of the media item).
- public final static int END_OF_FILE = -1;
-
- // Rendering modes
- /**
- * When using the RENDERING_MODE_BLACK_BORDER rendering mode video frames
- * are resized by preserving the aspect ratio until the movie matches one of
- * the dimensions of the output movie. The areas outside the resized video
- * clip are rendered black.
- */
- public static final int RENDERING_MODE_BLACK_BORDER = 0;
-
- /**
- * When using the RENDERING_MODE_STRETCH rendering mode video frames are
- * stretched horizontally or vertically to match the current aspect ratio of
- * the video editor.
- */
- public static final int RENDERING_MODE_STRETCH = 1;
-
- /**
- * When using the RENDERING_MODE_CROPPING rendering mode video frames are
- * scaled horizontally or vertically by preserving the original aspect
- * ratio of the media item.
- */
- public static final int RENDERING_MODE_CROPPING = 2;
-
-
- // The unique id of the MediaItem
- private final String mUniqueId;
-
- // The name of the file associated with the MediaItem
- protected final String mFilename;
-
- // List of effects
- private final List<Effect> mEffects;
-
- // List of overlays
- private final List<Overlay> mOverlays;
-
- // The rendering mode
- private int mRenderingMode;
-
- // Beginning and end transitions
- protected Transition mBeginTransition;
- protected Transition mEndTransition;
-
- /**
- * Constructor
- *
- * @param editor The video editor reference
- * @param mediaItemId The MediaItem id
- * @param filename name of the media file.
- * @param renderingMode The rendering mode
- *
- * @throws IOException if file is not found
- * @throws IllegalArgumentException if a capability such as file format is not
- * supported the exception object contains the unsupported
- * capability
- */
- protected MediaItem(VideoEditor editor, String mediaItemId, String filename,
- int renderingMode) throws IOException {
- mUniqueId = mediaItemId;
- mFilename = filename;
- mRenderingMode = renderingMode;
- mEffects = new ArrayList<Effect>();
- mOverlays = new ArrayList<Overlay>();
- mBeginTransition = null;
- mEndTransition = null;
- }
-
- /**
- * @return The id of the media item
- */
- public String getId() {
- return mUniqueId;
- }
-
- /**
- * @return The media source file name
- */
- public String getFilename() {
- return mFilename;
- }
-
- /**
- * If aspect ratio of the MediaItem is different from the aspect ratio of
- * the editor then this API controls the rendering mode.
- *
- * @param renderingMode rendering mode. It is one of:
- * {@link #RENDERING_MODE_BLACK_BORDER},
- * {@link #RENDERING_MODE_STRETCH}
- */
- public void setRenderingMode(int renderingMode) {
- mRenderingMode = renderingMode;
- if (mBeginTransition != null) {
- mBeginTransition.invalidate();
- }
-
- if (mEndTransition != null) {
- mEndTransition.invalidate();
- }
- }
-
- /**
- * @return The rendering mode
- */
- public int getRenderingMode() {
- return mRenderingMode;
- }
-
- /**
- * @param transition The beginning transition
- */
- void setBeginTransition(Transition transition) {
- mBeginTransition = transition;
- }
-
- /**
- * @return The begin transition
- */
- public Transition getBeginTransition() {
- return mBeginTransition;
- }
-
- /**
- * @param transition The end transition
- */
- void setEndTransition(Transition transition) {
- mEndTransition = transition;
- }
-
- /**
- * @return The end transition
- */
- public Transition getEndTransition() {
- return mEndTransition;
- }
-
- /**
- * @return The timeline duration. This is the actual duration in the
- * timeline (trimmed duration)
- */
- public abstract long getTimelineDuration();
-
- /**
- * @return The is the full duration of the media item (not trimmed)
- */
- public abstract long getDuration();
-
- /**
- * @return The source file type
- */
- public abstract int getFileType();
-
- /**
- * @return Get the native width of the media item
- */
- public abstract int getWidth();
-
- /**
- * @return Get the native height of the media item
- */
- public abstract int getHeight();
-
- /**
- * Get aspect ratio of the source media item.
- *
- * @return the aspect ratio as described in MediaProperties.
- * MediaProperties.ASPECT_RATIO_UNDEFINED if aspect ratio is not
- * supported as in MediaProperties
- */
- public abstract int getAspectRatio();
-
- /**
- * Add the specified effect to this media item.
- *
- * Note that certain types of effects cannot be applied to video and to
- * image media items. For example in certain implementation a Ken Burns
- * implementation cannot be applied to video media item.
- *
- * This method invalidates transition video clips if the
- * effect overlaps with the beginning and/or the end transition.
- *
- * @param effect The effect to apply
- * @throws IllegalStateException if a preview or an export is in progress
- * @throws IllegalArgumentException if the effect start and/or duration are
- * invalid or if the effect cannot be applied to this type of media
- * item or if the effect id is not unique across all the Effects
- * added.
- */
- public void addEffect(Effect effect) {
- if (effect.getMediaItem() != this) {
- throw new IllegalArgumentException("Media item mismatch");
- }
-
- if (mEffects.contains(effect)) {
- throw new IllegalArgumentException("Effect already exists: " + effect.getId());
- }
-
- if (effect.getStartTime() + effect.getDuration() > getDuration()) {
- throw new IllegalArgumentException(
- "Effect start time + effect duration > media clip duration");
- }
-
- mEffects.add(effect);
- invalidateTransitions(effect.getStartTime(), effect.getDuration());
- }
-
- /**
- * Remove the effect with the specified id.
- *
- * This method invalidates a transition video clip if the effect overlaps
- * with a transition.
- *
- * @param effectId The id of the effect to be removed
- *
- * @return The effect that was removed
- * @throws IllegalStateException if a preview or an export is in progress
- */
- public Effect removeEffect(String effectId) {
- for (Effect effect : mEffects) {
- if (effect.getId().equals(effectId)) {
- mEffects.remove(effect);
- invalidateTransitions(effect.getStartTime(), effect.getDuration());
- return effect;
- }
- }
-
- return null;
- }
-
- /**
- * Find the effect with the specified id
- *
- * @param effectId The effect id
- *
- * @return The effect with the specified id (null if it does not exist)
- */
- public Effect getEffect(String effectId) {
- for (Effect effect : mEffects) {
- if (effect.getId().equals(effectId)) {
- return effect;
- }
- }
-
- return null;
- }
-
- /**
- * Get the list of effects.
- *
- * @return the effects list. If no effects exist an empty list will be returned.
- */
- public List<Effect> getAllEffects() {
- return mEffects;
- }
-
- /**
- * Add an overlay to the storyboard. This method invalidates a transition
- * video clip if the overlay overlaps with a transition.
- *
- * @param overlay The overlay to add
- * @throws IllegalStateException if a preview or an export is in progress or
- * if the overlay id is not unique across all the overlays
- * added or if the bitmap is not specified or if the dimensions of
- * the bitmap do not match the dimensions of the media item
- */
- public void addOverlay(Overlay overlay) {
- if (overlay.getMediaItem() != this) {
- throw new IllegalArgumentException("Media item mismatch");
- }
-
- if (mOverlays.contains(overlay)) {
- throw new IllegalArgumentException("Overlay already exists: " + overlay.getId());
- }
-
- if (overlay.getStartTime() + overlay.getDuration() > getDuration()) {
- throw new IllegalArgumentException(
- "Overlay start time + overlay duration > media clip duration");
- }
-
- if (overlay instanceof OverlayFrame) {
- final OverlayFrame frame = (OverlayFrame)overlay;
- final Bitmap bitmap = frame.getBitmap();
- if (bitmap == null) {
- throw new IllegalArgumentException("Overlay bitmap not specified");
- }
-
- final int scaledWidth, scaledHeight;
- if (this instanceof MediaVideoItem) {
- scaledWidth = getWidth();
- scaledHeight = getHeight();
- } else {
- scaledWidth = ((MediaImageItem)this).getScaledWidth();
- scaledHeight = ((MediaImageItem)this).getScaledHeight();
- }
-
- // The dimensions of the overlay bitmap must be the same as the
- // media item dimensions
- if (bitmap.getWidth() != scaledWidth || bitmap.getHeight() != scaledHeight) {
- throw new IllegalArgumentException(
- "Bitmap dimensions must match media item dimensions");
- }
- } else {
- throw new IllegalArgumentException("Overlay not supported");
- }
-
- mOverlays.add(overlay);
- invalidateTransitions(overlay.getStartTime(), overlay.getDuration());
- }
-
- /**
- * Remove the overlay with the specified id.
- *
- * This method invalidates a transition video clip if the overlay overlaps
- * with a transition.
- *
- * @param overlayId The id of the overlay to be removed
- *
- * @return The overlay that was removed
- * @throws IllegalStateException if a preview or an export is in progress
- */
- public Overlay removeOverlay(String overlayId) {
- for (Overlay overlay : mOverlays) {
- if (overlay.getId().equals(overlayId)) {
- mOverlays.remove(overlay);
- if (overlay instanceof OverlayFrame) {
- ((OverlayFrame)overlay).invalidate();
- }
- invalidateTransitions(overlay.getStartTime(), overlay.getDuration());
- return overlay;
- }
- }
-
- return null;
- }
-
- /**
- * Find the overlay with the specified id
- *
- * @param overlayId The overlay id
- *
- * @return The overlay with the specified id (null if it does not exist)
- */
- public Overlay getOverlay(String overlayId) {
- for (Overlay overlay : mOverlays) {
- if (overlay.getId().equals(overlayId)) {
- return overlay;
- }
- }
-
- return null;
- }
-
- /**
- * Get the list of overlays associated with this media item
- *
- * Note that if any overlay source files are not accessible anymore,
- * this method will still provide the full list of overlays.
- *
- * @return The list of overlays. If no overlays exist an empty list will
- * be returned.
- */
- public List<Overlay> getAllOverlays() {
- return mOverlays;
- }
-
- /**
- * Create a thumbnail at specified time in a video stream in Bitmap format
- *
- * @param width width of the thumbnail in pixels
- * @param height height of the thumbnail in pixels
- * @param timeMs The time in the source video file at which the thumbnail is
- * requested (even if trimmed).
- *
- * @return The thumbnail as a Bitmap.
- *
- * @throws IOException if a file error occurs
- * @throws IllegalArgumentException if time is out of video duration
- */
- public abstract Bitmap getThumbnail(int width, int height, long timeMs) throws IOException;
-
- /**
- * Get the array of Bitmap thumbnails between start and end.
- *
- * @param width width of the thumbnail in pixels
- * @param height height of the thumbnail in pixels
- * @param startMs The start of time range in milliseconds
- * @param endMs The end of the time range in milliseconds
- * @param thumbnailCount The thumbnail count
- *
- * @return The array of Bitmaps
- *
- * @throws IOException if a file error occurs
- */
- public abstract Bitmap[] getThumbnailList(int width, int height, long startMs, long endMs,
- int thumbnailCount) throws IOException;
-
- /*
- * {@inheritDoc}
- */
- @Override
- public boolean equals(Object object) {
- if (!(object instanceof MediaItem)) {
- return false;
- }
- return mUniqueId.equals(((MediaItem)object).mUniqueId);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int hashCode() {
- return mUniqueId.hashCode();
- }
-
- /**
- * Invalidate the start and end transitions if necessary
- *
- * @param startTimeMs The start time of the effect or overlay
- * @param durationMs The duration of the effect or overlay
- */
- abstract void invalidateTransitions(long startTimeMs, long durationMs);
-
- /**
- * Invalidate the start and end transitions if necessary. This method is
- * typically called when the start time and/or duration of an overlay or
- * effect is changing.
- *
- * @param oldStartTimeMs The old start time of the effect or overlay
- * @param oldDurationMs The old duration of the effect or overlay
- * @param newStartTimeMs The new start time of the effect or overlay
- * @param newDurationMs The new duration of the effect or overlay
- */
- abstract void invalidateTransitions(long oldStartTimeMs, long oldDurationMs,
- long newStartTimeMs, long newDurationMs);
-
- /**
- * Check if two items overlap in time
- *
- * @param startTimeMs1 Item 1 start time
- * @param durationMs1 Item 1 duration
- * @param startTimeMs2 Item 2 start time
- * @param durationMs2 Item 2 end time
- *
- * @return true if the two items overlap
- */
- protected boolean isOverlapping(long startTimeMs1, long durationMs1,
- long startTimeMs2, long durationMs2) {
- if (startTimeMs1 + durationMs1 <= startTimeMs2) {
- return false;
- } else if (startTimeMs1 >= startTimeMs2 + durationMs2) {
- return false;
- }
-
- return true;
- }
-
- /**
- * Adjust the duration transitions.
- */
- protected void adjustTransitions() {
- // Check if the duration of transitions need to be adjusted
- if (mBeginTransition != null) {
- final long maxDurationMs = mBeginTransition.getMaximumDuration();
- if (mBeginTransition.getDuration() > maxDurationMs) {
- mBeginTransition.setDuration(maxDurationMs);
- }
- }
-
- if (mEndTransition != null) {
- final long maxDurationMs = mEndTransition.getMaximumDuration();
- if (mEndTransition.getDuration() > maxDurationMs) {
- mEndTransition.setDuration(maxDurationMs);
- }
- }
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import java.io.DataOutputStream;
+import java.nio.ByteBuffer;
+import java.nio.IntBuffer;
+
+import android.graphics.Bitmap;
+import android.media.videoeditor.MediaArtistNativeHelper.ClipSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.FileType;
+import android.media.videoeditor.MediaArtistNativeHelper.MediaRendering;
+
+/**
+ * This abstract class describes the base class for any MediaItem. Objects are
+ * defined with a file path as a source data.
+ * {@hide}
+ */
+public abstract class MediaItem {
+ /**
+ * A constant which can be used to specify the end of the file (instead of
+ * providing the actual duration of the media item).
+ */
+ public final static int END_OF_FILE = -1;
+
+ /**
+ * Rendering modes
+ */
+ /**
+ * When using the RENDERING_MODE_BLACK_BORDER rendering mode video frames
+ * are resized by preserving the aspect ratio until the movie matches one of
+ * the dimensions of the output movie. The areas outside the resized video
+ * clip are rendered black.
+ */
+ public static final int RENDERING_MODE_BLACK_BORDER = 0;
+
+ /**
+ * When using the RENDERING_MODE_STRETCH rendering mode video frames are
+ * stretched horizontally or vertically to match the current aspect ratio of
+ * the video editor.
+ */
+ public static final int RENDERING_MODE_STRETCH = 1;
+
+ /**
+ * When using the RENDERING_MODE_CROPPING rendering mode video frames are
+ * scaled horizontally or vertically by preserving the original aspect ratio
+ * of the media item.
+ */
+ public static final int RENDERING_MODE_CROPPING = 2;
+
+ /**
+ * The unique id of the MediaItem
+ */
+ private final String mUniqueId;
+
+ /**
+ * The name of the file associated with the MediaItem
+ */
+ protected final String mFilename;
+
+ /**
+ * List of effects
+ */
+ private final List<Effect> mEffects;
+
+ /**
+ * List of overlays
+ */
+ private final List<Overlay> mOverlays;
+
+ /**
+ * The rendering mode
+ */
+ private int mRenderingMode;
+
+ private final MediaArtistNativeHelper mMANativeHelper;
+
+ private final String mProjectPath;
+
+ /**
+ * Beginning and end transitions
+ */
+ protected Transition mBeginTransition;
+
+ protected Transition mEndTransition;
+
+ protected String mGeneratedImageClip;
+
+ protected boolean mRegenerateClip;
+
+ private boolean mBlankFrameGenerated = false;
+
+ private String mBlankFrameFilename = null;
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param mediaItemId The MediaItem id
+ * @param filename name of the media file.
+ * @param renderingMode The rendering mode
+ * @throws IOException if file is not found
+ * @throws IllegalArgumentException if a capability such as file format is
+ * not supported the exception object contains the unsupported
+ * capability
+ */
+ protected MediaItem(VideoEditor editor, String mediaItemId, String filename,
+ int renderingMode) throws IOException {
+ if (filename == null) {
+ throw new IllegalArgumentException("MediaItem : filename is null");
+ }
+ mUniqueId = mediaItemId;
+ mFilename = filename;
+ mRenderingMode = renderingMode;
+ mEffects = new ArrayList<Effect>();
+ mOverlays = new ArrayList<Overlay>();
+ mBeginTransition = null;
+ mEndTransition = null;
+ mMANativeHelper = ((VideoEditorImpl)editor).getNativeContext();
+ mProjectPath = editor.getPath();
+ mRegenerateClip = false;
+ mGeneratedImageClip = null;
+ }
+
+ /**
+ * @return The id of the media item
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * @return The media source file name
+ */
+ public String getFilename() {
+ return mFilename;
+ }
+
+ /**
+ * If aspect ratio of the MediaItem is different from the aspect ratio of
+ * the editor then this API controls the rendering mode.
+ *
+ * @param renderingMode rendering mode. It is one of:
+ * {@link #RENDERING_MODE_BLACK_BORDER},
+ * {@link #RENDERING_MODE_STRETCH}
+ */
+ public void setRenderingMode(int renderingMode) {
+ switch (renderingMode) {
+ case RENDERING_MODE_BLACK_BORDER:
+ case RENDERING_MODE_STRETCH:
+ case RENDERING_MODE_CROPPING:
+ break;
+
+ default:
+ throw new IllegalArgumentException("Invalid Rendering Mode");
+ }
+ mRenderingMode = renderingMode;
+ if (mBeginTransition != null) {
+ mBeginTransition.invalidate();
+ }
+
+ if (mEndTransition != null) {
+ mEndTransition.invalidate();
+ }
+ mMANativeHelper.setGeneratePreview(true);
+ }
+
+ /**
+ * @return The rendering mode
+ */
+ public int getRenderingMode() {
+ return mRenderingMode;
+ }
+
+ /**
+ * @param transition The beginning transition
+ */
+ void setBeginTransition(Transition transition) {
+ mBeginTransition = transition;
+ }
+
+ /**
+ * @return The begin transition
+ */
+ public Transition getBeginTransition() {
+ return mBeginTransition;
+ }
+
+ /**
+ * @param transition The end transition
+ */
+ void setEndTransition(Transition transition) {
+ mEndTransition = transition;
+ }
+
+ /**
+ * @return The end transition
+ */
+ public Transition getEndTransition() {
+ return mEndTransition;
+ }
+
+ /**
+ * @return The timeline duration. This is the actual duration in the
+ * timeline (trimmed duration)
+ */
+ public abstract long getTimelineDuration();
+
+ /**
+ * @return The is the full duration of the media item (not trimmed)
+ */
+ public abstract long getDuration();
+
+ /**
+ * @return The source file type
+ */
+ public abstract int getFileType();
+
+ /**
+ * @return Get the native width of the media item
+ */
+ public abstract int getWidth();
+
+ /**
+ * @return Get the native height of the media item
+ */
+ public abstract int getHeight();
+
+ /**
+ * Get aspect ratio of the source media item.
+ *
+ * @return the aspect ratio as described in MediaProperties.
+ * MediaProperties.ASPECT_RATIO_UNDEFINED if aspect ratio is not
+ * supported as in MediaProperties
+ */
+ public abstract int getAspectRatio();
+
+ /**
+ * Add the specified effect to this media item.
+ *
+ * Note that certain types of effects cannot be applied to video and to
+ * image media items. For example in certain implementation a Ken Burns
+ * implementation cannot be applied to video media item.
+ *
+ * This method invalidates transition video clips if the
+ * effect overlaps with the beginning and/or the end transition.
+ *
+ * @param effect The effect to apply
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if the effect start and/or duration are
+ * invalid or if the effect cannot be applied to this type of media
+ * item or if the effect id is not unique across all the Effects
+ * added.
+ */
+ public void addEffect(Effect effect) {
+
+ if (effect == null) {
+ throw new IllegalArgumentException("NULL effect cannot be applied");
+ }
+
+ if (effect.getMediaItem() != this) {
+ throw new IllegalArgumentException("Media item mismatch");
+ }
+
+ if (mEffects.contains(effect)) {
+ throw new IllegalArgumentException("Effect already exists: " + effect.getId());
+ }
+
+ if (effect.getStartTime() + effect.getDuration() > getDuration()) {
+ throw new IllegalArgumentException(
+ "Effect start time + effect duration > media clip duration");
+ }
+
+ mEffects.add(effect);
+
+ invalidateTransitions(effect.getStartTime(), effect.getDuration());
+ if (mMANativeHelper != null) {
+ if (effect instanceof EffectKenBurns) {
+ mRegenerateClip = true;
+ }
+ mMANativeHelper.setGeneratePreview(true);
+ }
+ }
+
+ /**
+ * Remove the effect with the specified id.
+ *
+ * This method invalidates a transition video clip if the effect overlaps
+ * with a transition.
+ *
+ * @param effectId The id of the effect to be removed
+ *
+ * @return The effect that was removed
+ * @throws IllegalStateException if a preview or an export is in progress
+ */
+ public Effect removeEffect(String effectId) {
+ for (Effect effect : mEffects) {
+ if (effect.getId().equals(effectId)) {
+ mEffects.remove(effect);
+ invalidateTransitions(effect.getStartTime(), effect.getDuration());
+ if (mMANativeHelper != null) {
+ if (effect instanceof EffectKenBurns) {
+ if (mGeneratedImageClip != null) {
+ /**
+ * Delete the file
+ */
+ new File(mGeneratedImageClip).delete();
+ /**
+ * Invalidate the filename
+ */
+ mGeneratedImageClip = null;
+ }
+ mRegenerateClip = false;
+ }
+ mMANativeHelper.setGeneratePreview(true);
+ }
+ return effect;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Set the filepath of the generated image clip when the effect is added.
+ *
+ * @param The filepath of the generated image clip.
+ */
+ void setGeneratedImageClip(String generatedFilePath) {
+ mGeneratedImageClip = generatedFilePath;
+ }
+
+ /**
+ * Get the filepath of the generated image clip when the effect is added.
+ *
+ * @return The filepath of the generated image clip (null if it does not
+ * exist)
+ */
+ String getGeneratedImageClip() {
+ return mGeneratedImageClip;
+ }
+
+ /**
+ * Find the effect with the specified id
+ *
+ * @param effectId The effect id
+ * @return The effect with the specified id (null if it does not exist)
+ */
+ public Effect getEffect(String effectId) {
+ for (Effect effect : mEffects) {
+ if (effect.getId().equals(effectId)) {
+ return effect;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Get the list of effects.
+ *
+ * @return the effects list. If no effects exist an empty list will be
+ * returned.
+ */
+ public List<Effect> getAllEffects() {
+ return mEffects;
+ }
+
+ /**
+ * Add an overlay to the storyboard. This method invalidates a transition
+ * video clip if the overlay overlaps with a transition.
+ *
+ * @param overlay The overlay to add
+ * @throws IllegalStateException if a preview or an export is in progress or
+ * if the overlay id is not unique across all the overlays added
+ * or if the bitmap is not specified or if the dimensions of the
+ * bitmap do not match the dimensions of the media item
+ * @throws FileNotFoundException, IOException if overlay could not be saved
+ * to project path
+ */
+ public void addOverlay(Overlay overlay) throws FileNotFoundException, IOException {
+ if (overlay == null) {
+ throw new IllegalArgumentException("NULL Overlay cannot be applied");
+ }
+
+ if (overlay.getMediaItem() != this) {
+ throw new IllegalArgumentException("Media item mismatch");
+ }
+
+ if (mOverlays.contains(overlay)) {
+ throw new IllegalArgumentException("Overlay already exists: " + overlay.getId());
+ }
+
+ if (overlay.getStartTime() + overlay.getDuration() > getDuration()) {
+ throw new IllegalArgumentException(
+ "Overlay start time + overlay duration > media clip duration");
+ }
+
+ if (overlay instanceof OverlayFrame) {
+ final OverlayFrame frame = (OverlayFrame)overlay;
+ final Bitmap bitmap = frame.getBitmap();
+ if (bitmap == null) {
+ throw new IllegalArgumentException("Overlay bitmap not specified");
+ }
+
+ ((OverlayFrame)overlay).save(mProjectPath);
+
+ final int scaledWidth, scaledHeight;
+ if (this instanceof MediaVideoItem) {
+ scaledWidth = getWidth();
+ scaledHeight = getHeight();
+ } else {
+ scaledWidth = ((MediaImageItem)this).getScaledWidth();
+ scaledHeight = ((MediaImageItem)this).getScaledHeight();
+ }
+
+ /**
+ * The dimensions of the overlay bitmap must be the same as the
+ * media item dimensions
+ */
+ if (bitmap.getWidth() != scaledWidth || bitmap.getHeight() != scaledHeight) {
+ throw new IllegalArgumentException(
+ "Bitmap dimensions must match media item dimensions");
+ }
+ } else {
+ throw new IllegalArgumentException("Overlay not supported");
+ }
+
+ mOverlays.add(overlay);
+ invalidateTransitions(overlay.getStartTime(), overlay.getDuration());
+ if (mMANativeHelper != null) {
+ mMANativeHelper.setGeneratePreview(true);
+ }
+ }
+
+ /**
+ * @param flag The flag to indicate if regeneration of clip is true or
+ * false.
+ */
+ void setRegenerateClip(boolean flag) {
+ mRegenerateClip = flag;
+ }
+
+ /**
+ * @return flag The flag to indicate if regeneration of clip is true or
+ * false.
+ */
+ boolean getRegenerateClip() {
+ return mRegenerateClip;
+ }
+
+ /**
+ * Remove the overlay with the specified id.
+ *
+ * This method invalidates a transition video clip if the overlay overlaps
+ * with a transition.
+ *
+ * @param overlayId The id of the overlay to be removed
+ *
+ * @return The overlay that was removed
+ * @throws IllegalStateException if a preview or an export is in progress
+ */
+ public Overlay removeOverlay(String overlayId) {
+ for (Overlay overlay : mOverlays) {
+ if (overlay.getId().equals(overlayId)) {
+ mOverlays.remove(overlay);
+ if (mMANativeHelper != null) {
+ mMANativeHelper.setGeneratePreview(true);
+ }
+ if (overlay instanceof OverlayFrame) {
+ ((OverlayFrame)overlay).invalidate();
+ }
+ invalidateTransitions(overlay.getStartTime(), overlay.getDuration());
+ return overlay;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Find the overlay with the specified id
+ *
+ * @param overlayId The overlay id
+ *
+ * @return The overlay with the specified id (null if it does not exist)
+ */
+ public Overlay getOverlay(String overlayId) {
+ for (Overlay overlay : mOverlays) {
+ if (overlay.getId().equals(overlayId)) {
+ return overlay;
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Get the list of overlays associated with this media item
+ *
+ * Note that if any overlay source files are not accessible anymore,
+ * this method will still provide the full list of overlays.
+ *
+ * @return The list of overlays. If no overlays exist an empty list will
+ * be returned.
+ */
+ public List<Overlay> getAllOverlays() {
+ return mOverlays;
+ }
+
+ /**
+ * Create a thumbnail at specified time in a video stream in Bitmap format
+ *
+ * @param width width of the thumbnail in pixels
+ * @param height height of the thumbnail in pixels
+ * @param timeMs The time in the source video file at which the thumbnail is
+ * requested (even if trimmed).
+ *
+ * @return The thumbnail as a Bitmap.
+ *
+ * @throws IOException if a file error occurs
+ * @throws IllegalArgumentException if time is out of video duration
+ */
+ public abstract Bitmap getThumbnail(int width, int height, long timeMs)
+ throws IOException;
+
+ /**
+ * Get the array of Bitmap thumbnails between start and end.
+ *
+ * @param width width of the thumbnail in pixels
+ * @param height height of the thumbnail in pixels
+ * @param startMs The start of time range in milliseconds
+ * @param endMs The end of the time range in milliseconds
+ * @param thumbnailCount The thumbnail count
+ *
+ * @return The array of Bitmaps
+ *
+ * @throws IOException if a file error occurs
+ */
+ public abstract Bitmap[] getThumbnailList(int width, int height,
+ long startMs, long endMs,
+ int thumbnailCount)
+ throws IOException;
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof MediaItem)) {
+ return false;
+ }
+ return mUniqueId.equals(((MediaItem)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+
+ /**
+ * Invalidate the start and end transitions if necessary
+ *
+ * @param startTimeMs The start time of the effect or overlay
+ * @param durationMs The duration of the effect or overlay
+ */
+ abstract void invalidateTransitions(long startTimeMs, long durationMs);
+
+ /**
+ * Invalidate the start and end transitions if necessary. This method is
+ * typically called when the start time and/or duration of an overlay or
+ * effect is changing.
+ *
+ * @param oldStartTimeMs The old start time of the effect or overlay
+ * @param oldDurationMs The old duration of the effect or overlay
+ * @param newStartTimeMs The new start time of the effect or overlay
+ * @param newDurationMs The new duration of the effect or overlay
+ */
+ abstract void invalidateTransitions(long oldStartTimeMs, long oldDurationMs,
+ long newStartTimeMs, long newDurationMs);
+
+ /**
+ * Check if two items overlap in time
+ *
+ * @param startTimeMs1 Item 1 start time
+ * @param durationMs1 Item 1 duration
+ * @param startTimeMs2 Item 2 start time
+ * @param durationMs2 Item 2 end time
+ * @return true if the two items overlap
+ */
+ protected boolean isOverlapping(long startTimeMs1, long durationMs1,
+ long startTimeMs2, long durationMs2) {
+ if (startTimeMs1 + durationMs1 <= startTimeMs2) {
+ return false;
+ } else if (startTimeMs1 >= startTimeMs2 + durationMs2) {
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Adjust the duration transitions.
+ */
+ protected void adjustTransitions() {
+ /**
+ * Check if the duration of transitions need to be adjusted
+ */
+ if (mBeginTransition != null) {
+ final long maxDurationMs = mBeginTransition.getMaximumDuration();
+ if (mBeginTransition.getDuration() > maxDurationMs) {
+ mBeginTransition.setDuration(maxDurationMs);
+ }
+ }
+
+ if (mEndTransition != null) {
+ final long maxDurationMs = mEndTransition.getMaximumDuration();
+ if (mEndTransition.getDuration() > maxDurationMs) {
+ mEndTransition.setDuration(maxDurationMs);
+ }
+ }
+ }
+
+ /**
+ * @return MediaArtistNativeHleper context
+ */
+ MediaArtistNativeHelper getNativeContext() {
+ return mMANativeHelper;
+ }
+
+ /**
+ * Initialises ClipSettings fields to default value
+ *
+ * @param ClipSettings object
+ *{@link android.media.videoeditor.MediaArtistNativeHelper.ClipSettings}
+ */
+ void initClipSettings(ClipSettings clipSettings) {
+ clipSettings.clipPath = null;
+ clipSettings.clipDecodedPath = null;
+ clipSettings.clipOriginalPath = null;
+ clipSettings.fileType = 0;
+ clipSettings.endCutTime = 0;
+ clipSettings.beginCutTime = 0;
+ clipSettings.beginCutPercent = 0;
+ clipSettings.endCutPercent = 0;
+ clipSettings.panZoomEnabled = false;
+ clipSettings.panZoomPercentStart = 0;
+ clipSettings.panZoomTopLeftXStart = 0;
+ clipSettings.panZoomTopLeftYStart = 0;
+ clipSettings.panZoomPercentEnd = 0;
+ clipSettings.panZoomTopLeftXEnd = 0;
+ clipSettings.panZoomTopLeftYEnd = 0;
+ clipSettings.mediaRendering = 0;
+ clipSettings.rgbWidth = 0;
+ clipSettings.rgbHeight = 0;
+ }
+
+ /**
+ * @return ClipSettings object with populated data
+ *{@link android.media.videoeditor.MediaArtistNativeHelper.ClipSettings}
+ */
+ ClipSettings getClipSettings() {
+ MediaVideoItem mVI = null;
+ MediaImageItem mII = null;
+ ClipSettings clipSettings = new ClipSettings();
+ initClipSettings(clipSettings);
+ if (this instanceof MediaVideoItem) {
+ mVI = (MediaVideoItem)this;
+ clipSettings.clipPath = mVI.getFilename();
+ clipSettings.fileType = mMANativeHelper.getMediaItemFileType(mVI.
+ getFileType());
+ clipSettings.beginCutTime = (int)mVI.getBoundaryBeginTime();
+ clipSettings.endCutTime = (int)mVI.getBoundaryEndTime();
+ clipSettings.mediaRendering = mMANativeHelper.
+ getMediaItemRenderingMode(mVI
+ .getRenderingMode());
+ } else if (this instanceof MediaImageItem) {
+ mII = (MediaImageItem)this;
+ clipSettings = mII.getImageClipProperties();
+ }
+ return clipSettings;
+ }
+
+ /**
+ * Generates a black frame to be used for generating
+ * begin transition at first media item in storyboard
+ * or end transition at last media item in storyboard
+ *
+ * @param ClipSettings object
+ *{@link android.media.videoeditor.MediaArtistNativeHelper.ClipSettings}
+ */
+ void generateBlankFrame(ClipSettings clipSettings) {
+ if (!mBlankFrameGenerated) {
+ int mWidth = 64;
+ int mHeight = 64;
+ mBlankFrameFilename = String.format(mProjectPath + "/" + "ghost.rgb");
+ FileOutputStream fl = null;
+ try {
+ fl = new FileOutputStream(mBlankFrameFilename);
+ } catch (IOException e) {
+ /* catch IO exception */
+ }
+ final DataOutputStream dos = new DataOutputStream(fl);
+
+ final int [] framingBuffer = new int[mWidth];
+
+ ByteBuffer byteBuffer = ByteBuffer.allocate(framingBuffer.length * 4);
+ IntBuffer intBuffer;
+
+ byte[] array = byteBuffer.array();
+ int tmp = 0;
+ while(tmp < mHeight) {
+ intBuffer = byteBuffer.asIntBuffer();
+ intBuffer.put(framingBuffer,0,mWidth);
+ try {
+ dos.write(array);
+ } catch (IOException e) {
+ /* catch file write error */
+ }
+ tmp += 1;
+ }
+
+ try {
+ fl.close();
+ } catch (IOException e) {
+ /* file close error */
+ }
+ mBlankFrameGenerated = true;
+ }
+
+ clipSettings.clipPath = mBlankFrameFilename;
+ clipSettings.fileType = FileType.JPG;
+ clipSettings.beginCutTime = 0;
+ clipSettings.endCutTime = 0;
+ clipSettings.mediaRendering = MediaRendering.RESIZING;
+
+ clipSettings.rgbWidth = 64;
+ clipSettings.rgbHeight = 64;
+ }
+
+ /**
+ * Invalidates the blank frame generated
+ */
+ void invalidateBlankFrame() {
+ if (mBlankFrameFilename != null) {
+ if (new File(mBlankFrameFilename).exists()) {
+ new File(mBlankFrameFilename).delete();
+ mBlankFrameFilename = null;
+ }
+ }
+ }
+
+}
diff --git a/media/java/android/media/videoeditor/MediaProperties.java b/media/java/android/media/videoeditor/MediaProperties.java
index 0bb83eb47ce9..9654a6a7416f 100755
--- a/media/java/android/media/videoeditor/MediaProperties.java
+++ b/media/java/android/media/videoeditor/MediaProperties.java
@@ -1,260 +1,311 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import android.util.Pair;
-
-/**
- * This class defines all properties of a media file such as supported height, aspect ratio,
- * bitrate for export function.
- * {@hide}
- */
-public class MediaProperties {
- // Supported heights
- public static final int HEIGHT_144 = 144;
- public static final int HEIGHT_360 = 360;
- public static final int HEIGHT_480 = 480;
- public static final int HEIGHT_720 = 720;
- public static final int HEIGHT_1080 = 1080;
-
- // Supported aspect ratios
- public static final int ASPECT_RATIO_UNDEFINED = 0;
- public static final int ASPECT_RATIO_3_2 = 1;
- public static final int ASPECT_RATIO_16_9 = 2;
- public static final int ASPECT_RATIO_4_3 = 3;
- public static final int ASPECT_RATIO_5_3 = 4;
- public static final int ASPECT_RATIO_11_9 = 5;
-
- // The array of supported aspect ratios
- private static final int[] ASPECT_RATIOS = new int[] {
- ASPECT_RATIO_3_2,
- ASPECT_RATIO_16_9,
- ASPECT_RATIO_4_3,
- ASPECT_RATIO_5_3,
- ASPECT_RATIO_11_9
- };
-
- // Supported resolutions for specific aspect ratios
- @SuppressWarnings({"unchecked"})
- private static final Pair<Integer, Integer>[] ASPECT_RATIO_3_2_RESOLUTIONS =
- new Pair[] {
- new Pair<Integer, Integer>(720, HEIGHT_480),
- new Pair<Integer, Integer>(1080, HEIGHT_720)
- };
-
- @SuppressWarnings({"unchecked"})
- private static final Pair<Integer, Integer>[] ASPECT_RATIO_4_3_RESOLUTIONS =
- new Pair[] {
- new Pair<Integer, Integer>(640, HEIGHT_480),
- new Pair<Integer, Integer>(960, HEIGHT_720)
- };
-
- @SuppressWarnings({"unchecked"})
- private static final Pair<Integer, Integer>[] ASPECT_RATIO_5_3_RESOLUTIONS =
- new Pair[] {
- new Pair<Integer, Integer>(800, HEIGHT_480)
- };
-
- @SuppressWarnings({"unchecked"})
- private static final Pair<Integer, Integer>[] ASPECT_RATIO_11_9_RESOLUTIONS =
- new Pair[] {
- new Pair<Integer, Integer>(176, HEIGHT_144)
- };
-
- @SuppressWarnings({"unchecked"})
- private static final Pair<Integer, Integer>[] ASPECT_RATIO_16_9_RESOLUTIONS =
- new Pair[] {
- new Pair<Integer, Integer>(640, HEIGHT_360),
- new Pair<Integer, Integer>(854, HEIGHT_480),
- new Pair<Integer, Integer>(1280, HEIGHT_720),
- };
-
-
- // Bitrate values (in bits per second)
- public static final int BITRATE_28K = 28000;
- public static final int BITRATE_40K = 40000;
- public static final int BITRATE_64K = 64000;
- public static final int BITRATE_96K = 96000;
- public static final int BITRATE_128K = 128000;
- public static final int BITRATE_192K = 192000;
- public static final int BITRATE_256K = 256000;
- public static final int BITRATE_384K = 384000;
- public static final int BITRATE_512K = 512000;
- public static final int BITRATE_800K = 800000;
- public static final int BITRATE_2M = 2000000;
- public static final int BITRATE_5M = 5000000;
- public static final int BITRATE_8M = 8000000;
-
- // The array of supported bitrates
- private static final int[] SUPPORTED_BITRATES = new int[] {
- BITRATE_28K,
- BITRATE_40K,
- BITRATE_64K,
- BITRATE_96K,
- BITRATE_128K,
- BITRATE_192K,
- BITRATE_256K,
- BITRATE_384K,
- BITRATE_512K,
- BITRATE_800K
- };
-
- // Video codec types
- public static final int VCODEC_H264BP = 1;
- public static final int VCODEC_H264MP = 2;
- public static final int VCODEC_H263 = 3;
- public static final int VCODEC_MPEG4 = 4;
-
- // The array of supported video codecs
- private static final int[] SUPPORTED_VCODECS = new int[] {
- VCODEC_H264BP,
- VCODEC_H263,
- VCODEC_MPEG4,
- };
-
- // Audio codec types
- public static final int ACODEC_AAC_LC = 1;
- public static final int ACODEC_AMRNB = 2;
- public static final int ACODEC_AMRWB = 3;
- public static final int ACODEC_MP3 = 4;
- public static final int ACODEC_OGG = 5;
-
- // The array of supported video codecs
- private static final int[] SUPPORTED_ACODECS = new int[] {
- ACODEC_AAC_LC,
- ACODEC_AMRNB,
- ACODEC_AMRWB
- };
-
- // File format types
- public static final int FILE_UNSUPPORTED = 0;
- public static final int FILE_3GP = 1;
- public static final int FILE_MP4 = 2;
- public static final int FILE_JPEG = 3;
- public static final int FILE_PNG = 4;
-
- // The array of the supported file formats
- private static final int[] SUPPORTED_VIDEO_FILE_FORMATS = new int[] {
- FILE_3GP,
- FILE_MP4
- };
-
- // The maximum count of audio tracks supported
- public static final int AUDIO_MAX_TRACK_COUNT = 1;
-
- // The maximum volume supported (100 means that no amplification is
- // supported, i.e. attenuation only)
- public static final int AUDIO_MAX_VOLUME_PERCENT = 100;
-
- /**
- * This class cannot be instantiated
- */
- private MediaProperties() {
- }
-
- /**
- * @return The array of supported aspect ratios
- */
- public static int[] getAllSupportedAspectRatios() {
- return ASPECT_RATIOS;
- }
-
- /**
- * Get the supported resolutions for the specified aspect ratio.
- *
- * @param aspectRatio The aspect ratio for which the resolutions are requested
- *
- * @return The array of width and height pairs
- */
- public static Pair<Integer, Integer>[] getSupportedResolutions(int aspectRatio) {
- final Pair<Integer, Integer>[] resolutions;
- switch(aspectRatio) {
- case ASPECT_RATIO_3_2: {
- resolutions = ASPECT_RATIO_3_2_RESOLUTIONS;
- break;
- }
-
- case ASPECT_RATIO_4_3: {
- resolutions = ASPECT_RATIO_4_3_RESOLUTIONS;
- break;
- }
-
- case ASPECT_RATIO_5_3: {
- resolutions = ASPECT_RATIO_5_3_RESOLUTIONS;
- break;
- }
-
- case ASPECT_RATIO_11_9: {
- resolutions = ASPECT_RATIO_11_9_RESOLUTIONS;
- break;
- }
-
- case ASPECT_RATIO_16_9: {
- resolutions = ASPECT_RATIO_16_9_RESOLUTIONS;
- break;
- }
-
- default: {
- throw new IllegalArgumentException("Unknown aspect ratio: " + aspectRatio);
- }
- }
-
- return resolutions;
- }
-
- /**
- * @return The array of supported video codecs
- */
- public static int[] getSupportedVideoCodecs() {
- return SUPPORTED_VCODECS;
- }
-
- /**
- * @return The array of supported audio codecs
- */
- public static int[] getSupportedAudioCodecs() {
- return SUPPORTED_ACODECS;
- }
-
- /**
- * @return The array of supported file formats
- */
- public static int[] getSupportedVideoFileFormat() {
- return SUPPORTED_VIDEO_FILE_FORMATS;
- }
-
- /**
- * @return The array of supported video bitrates
- */
- public static int[] getSupportedVideoBitrates() {
- return SUPPORTED_BITRATES;
- }
-
- /**
- * @return The maximum value for the audio volume
- */
- public static int getSupportedMaxVolume() {
- return MediaProperties.AUDIO_MAX_VOLUME_PERCENT;
- }
-
- /**
- * @return The maximum number of audio tracks supported
- */
- public static int getSupportedAudioTrackCount() {
- return MediaProperties.AUDIO_MAX_TRACK_COUNT;
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import android.util.Pair;
+
+/**
+ * This class defines all properties of a media file such as supported height,
+ * aspect ratio, bitrate for export function.
+ * {@hide}
+ */
+public class MediaProperties {
+ /**
+ * Supported heights
+ */
+ public static final int HEIGHT_144 = 144;
+ public static final int HEIGHT_360 = 360;
+ public static final int HEIGHT_480 = 480;
+ public static final int HEIGHT_720 = 720;
+ public static final int HEIGHT_1088 = 1088;
+
+ /**
+ * Supported aspect ratios
+ */
+ public static final int ASPECT_RATIO_UNDEFINED = 0;
+ public static final int ASPECT_RATIO_3_2 = 1;
+ public static final int ASPECT_RATIO_16_9 = 2;
+ public static final int ASPECT_RATIO_4_3 = 3;
+ public static final int ASPECT_RATIO_5_3 = 4;
+ public static final int ASPECT_RATIO_11_9 = 5;
+
+ /**
+ * The array of supported aspect ratios
+ */
+ private static final int[] ASPECT_RATIOS = new int[] {
+ ASPECT_RATIO_3_2,
+ ASPECT_RATIO_16_9,
+ ASPECT_RATIO_4_3,
+ ASPECT_RATIO_5_3,
+ ASPECT_RATIO_11_9
+ };
+
+ /**
+ * Supported resolutions for specific aspect ratios
+ */
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_3_2_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(720, HEIGHT_480),
+//*tmpLSA*/ new Pair<Integer, Integer>(1080, HEIGHT_720)
+/*tmpLSA*/ new Pair<Integer, Integer>(1088, HEIGHT_720)
+ };
+
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_4_3_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(640, HEIGHT_480),
+ new Pair<Integer, Integer>(960, HEIGHT_720)
+ };
+
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_5_3_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(800, HEIGHT_480)
+ };
+
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_11_9_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(176, HEIGHT_144)
+ };
+
+ @SuppressWarnings({"unchecked"})
+ private static final Pair<Integer, Integer>[] ASPECT_RATIO_16_9_RESOLUTIONS =
+ new Pair[] {
+ new Pair<Integer, Integer>(848, HEIGHT_480),
+ new Pair<Integer, Integer>(1280, HEIGHT_720),
+ };
+
+ /**
+ * Bitrate values (in bits per second)
+ */
+ public static final int BITRATE_28K = 28000;
+ public static final int BITRATE_40K = 40000;
+ public static final int BITRATE_64K = 64000;
+ public static final int BITRATE_96K = 96000;
+ public static final int BITRATE_128K = 128000;
+ public static final int BITRATE_192K = 192000;
+ public static final int BITRATE_256K = 256000;
+ public static final int BITRATE_384K = 384000;
+ public static final int BITRATE_512K = 512000;
+ public static final int BITRATE_800K = 800000;
+ public static final int BITRATE_2M = 2000000;
+ public static final int BITRATE_5M = 5000000;
+ public static final int BITRATE_8M = 8000000;
+
+ /**
+ * The array of supported bitrates
+ */
+ private static final int[] SUPPORTED_BITRATES = new int[] {
+ BITRATE_28K,
+ BITRATE_40K,
+ BITRATE_64K,
+ BITRATE_96K,
+ BITRATE_128K,
+ BITRATE_192K,
+ BITRATE_256K,
+ BITRATE_384K,
+ BITRATE_512K,
+ BITRATE_800K,
+ BITRATE_2M,
+ BITRATE_5M,
+ BITRATE_8M
+ };
+
+ /**
+ * Video codec types
+ */
+ public static final int VCODEC_H263 = 1;
+ public static final int VCODEC_MPEG4 = 2;
+ // 3 Value is used for MPEG4_EMP
+ public static final int VCODEC_H264BP = 4;
+ public static final int VCODEC_H264MP = 5; // Unsupported
+
+ /**
+ * The array of supported video codecs
+ */
+ private static final int[] SUPPORTED_VCODECS = new int[] {
+ VCODEC_H264BP,
+ VCODEC_H263,
+ VCODEC_MPEG4,
+ };
+
+ /**
+ * Audio codec types
+ */
+ public static final int ACODEC_NO_AUDIO = 0;
+ public static final int ACODEC_AMRNB = 1;
+ public static final int ACODEC_AAC_LC = 2;
+ public static final int ACODEC_AAC_PLUS = 3;
+ public static final int ACODEC_ENHANCED_AAC_PLUS = 4;
+ public static final int ACODEC_MP3 = 5;
+ public static final int ACODEC_EVRC = 6;
+ // 7 value is used for PCM
+ public static final int ACODEC_AMRWB = 8;
+ public static final int ACODEC_OGG = 9;
+
+ /**
+ * The array of supported video codecs
+ */
+ private static final int[] SUPPORTED_ACODECS = new int[] {
+ ACODEC_AAC_LC,
+ ACODEC_AMRNB,
+ ACODEC_AMRWB
+ };
+
+
+ /**
+ * Samples per frame for each audio codec
+ */
+ public static final int SAMPLES_PER_FRAME_AAC = 1024;
+ public static final int SAMPLES_PER_FRAME_MP3 = 1152;
+ public static final int SAMPLES_PER_FRAME_AMRNB = 160;
+ public static final int SAMPLES_PER_FRAME_AMRWB = 320;
+
+ public static final int DEFAULT_SAMPLING_FREQUENCY = 32000;
+ public static final int DEFAULT_CHANNEL_COUNT = 2;
+
+ /**
+ * File format types
+ */
+ public static final int FILE_3GP = 0;
+ public static final int FILE_MP4 = 1;
+ // 2 is for AMRNB
+ public static final int FILE_MP3 = 3;
+ // 4 is for PCM
+ public static final int FILE_JPEG = 5;
+ // 6 is for GIF
+ public static final int FILE_PNG = 7;
+ public static final int FILE_UNSUPPORTED = 255;
+ /**
+ * The array of the supported file formats
+ */
+ private static final int[] SUPPORTED_VIDEO_FILE_FORMATS = new int[] {
+ FILE_3GP,
+ FILE_MP4
+ };
+
+ /**
+ * The maximum count of audio tracks supported
+ */
+ public static final int AUDIO_MAX_TRACK_COUNT = 1;
+
+ /** The maximum volume supported (100 means that no amplification is
+ * supported, i.e. attenuation only)
+ */
+ public static final int AUDIO_MAX_VOLUME_PERCENT = 100;
+
+ /**
+ * This class cannot be instantiated
+ */
+ private MediaProperties() {
+ }
+
+ /**
+ * @return The array of supported aspect ratios
+ */
+ public static int[] getAllSupportedAspectRatios() {
+ return ASPECT_RATIOS;
+ }
+
+ /**
+ * Get the supported resolutions for the specified aspect ratio.
+ *
+ * @param aspectRatio The aspect ratio for which the resolutions are
+ * requested
+ * @return The array of width and height pairs
+ */
+ public static Pair<Integer, Integer>[] getSupportedResolutions(int aspectRatio) {
+ final Pair<Integer, Integer>[] resolutions;
+ switch (aspectRatio) {
+ case ASPECT_RATIO_3_2: {
+ resolutions = ASPECT_RATIO_3_2_RESOLUTIONS;
+ break;
+ }
+
+ case ASPECT_RATIO_4_3: {
+ resolutions = ASPECT_RATIO_4_3_RESOLUTIONS;
+ break;
+ }
+
+ case ASPECT_RATIO_5_3: {
+ resolutions = ASPECT_RATIO_5_3_RESOLUTIONS;
+ break;
+ }
+
+ case ASPECT_RATIO_11_9: {
+ resolutions = ASPECT_RATIO_11_9_RESOLUTIONS;
+ break;
+ }
+
+ case ASPECT_RATIO_16_9: {
+ resolutions = ASPECT_RATIO_16_9_RESOLUTIONS;
+ break;
+ }
+
+ default: {
+ throw new IllegalArgumentException("Unknown aspect ratio: " + aspectRatio);
+ }
+ }
+
+ return resolutions;
+ }
+
+ /**
+ * @return The array of supported video codecs
+ */
+ public static int[] getSupportedVideoCodecs() {
+ return SUPPORTED_VCODECS;
+ }
+
+ /**
+ * @return The array of supported audio codecs
+ */
+ public static int[] getSupportedAudioCodecs() {
+ return SUPPORTED_ACODECS;
+ }
+
+ /**
+ * @return The array of supported file formats
+ */
+ public static int[] getSupportedVideoFileFormat() {
+ return SUPPORTED_VIDEO_FILE_FORMATS;
+ }
+
+ /**
+ * @return The array of supported video bitrates
+ */
+ public static int[] getSupportedVideoBitrates() {
+ return SUPPORTED_BITRATES;
+ }
+
+ /**
+ * @return The maximum value for the audio volume
+ */
+ public static int getSupportedMaxVolume() {
+ return MediaProperties.AUDIO_MAX_VOLUME_PERCENT;
+ }
+
+ /**
+ * @return The maximum number of audio tracks supported
+ */
+ public static int getSupportedAudioTrackCount() {
+ return MediaProperties.AUDIO_MAX_TRACK_COUNT;
+ }
+}
diff --git a/media/java/android/media/videoeditor/MediaVideoItem.java b/media/java/android/media/videoeditor/MediaVideoItem.java
index 5fcfe3c11214..772b360590b4 100755
--- a/media/java/android/media/videoeditor/MediaVideoItem.java
+++ b/media/java/android/media/videoeditor/MediaVideoItem.java
@@ -1,482 +1,686 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import java.io.IOException;
-import java.lang.ref.SoftReference;
-
-import android.graphics.Bitmap;
-import android.view.SurfaceHolder;
-
-/**
- * This class represents a video clip item on the storyboard
- * {@hide}
- */
-public class MediaVideoItem extends MediaItem {
- // Instance variables
- private final int mWidth;
- private final int mHeight;
- private final int mAspectRatio;
- private final int mFileType;
- private final int mVideoType;
- private final int mVideoProfile;
- private final int mVideoBitrate;
- private final long mDurationMs;
- private final int mAudioBitrate;
- private final int mFps;
- private final int mAudioType;
- private final int mAudioChannels;
- private final int mAudioSamplingFrequency;
-
- private long mBeginBoundaryTimeMs;
- private long mEndBoundaryTimeMs;
- private int mVolumePercentage;
- private boolean mMuted;
- private String mAudioWaveformFilename;
- // The audio waveform data
- private SoftReference<WaveformData> mWaveformData;
-
- /**
- * An object of this type cannot be instantiated with a default constructor
- */
- @SuppressWarnings("unused")
- private MediaVideoItem() throws IOException {
- this(null, null, null, RENDERING_MODE_BLACK_BORDER);
- }
-
- /**
- * Constructor
- *
- * @param editor The video editor reference
- * @param mediaItemId The MediaItem id
- * @param filename The image file name
- * @param renderingMode The rendering mode
- *
- * @throws IOException if the file cannot be opened for reading
- */
- public MediaVideoItem(VideoEditor editor, String mediaItemId, String filename,
- int renderingMode)
- throws IOException {
- this(editor, mediaItemId, filename, renderingMode, 0, END_OF_FILE, 100, false, null);
- }
-
- /**
- * Constructor
- *
- * @param editor The video editor reference
- * @param mediaItemId The MediaItem id
- * @param filename The image file name
- * @param renderingMode The rendering mode
- * @param beginMs Start time in milliseconds. Set to 0 to extract from the
- * beginning
- * @param endMs End time in milliseconds. Set to {@link #END_OF_FILE} to
- * extract until the end
- * @param volumePercent in %/. 100% means no change; 50% means half value, 200%
- * means double, 0% means silent.
- * @param muted true if the audio is muted
- * @param audioWaveformFilename The name of the audio waveform file
- *
- * @throws IOException if the file cannot be opened for reading
- */
- MediaVideoItem(VideoEditor editor, String mediaItemId, String filename, int renderingMode,
- long beginMs, long endMs, int volumePercent, boolean muted,
- String audioWaveformFilename) throws IOException {
- super(editor, mediaItemId, filename, renderingMode);
- // TODO: Set these variables correctly
- mWidth = 1080;
- mHeight = 720;
- mAspectRatio = MediaProperties.ASPECT_RATIO_3_2;
- mFileType = MediaProperties.FILE_MP4;
- mVideoType = MediaProperties.VCODEC_H264BP;
- // Do we have predefined values for this variable?
- mVideoProfile = 0;
- // Can video and audio duration be different?
- mDurationMs = 10000;
- mVideoBitrate = 800000;
- mAudioBitrate = 30000;
- mFps = 30;
- mAudioType = MediaProperties.ACODEC_AAC_LC;
- mAudioChannels = 2;
- mAudioSamplingFrequency = 16000;
-
- mBeginBoundaryTimeMs = beginMs;
- mEndBoundaryTimeMs = endMs == END_OF_FILE ? mDurationMs : endMs;
- mVolumePercentage = volumePercent;
- mMuted = muted;
- mAudioWaveformFilename = audioWaveformFilename;
- if (audioWaveformFilename != null) {
- mWaveformData =
- new SoftReference<WaveformData>(new WaveformData(audioWaveformFilename));
- } else {
- mWaveformData = null;
- }
- }
-
- /**
- * Sets the start and end marks for trimming a video media item.
- * This method will adjust the duration of bounding transitions, effects
- * and overlays if the current duration of the transactions become greater
- * than the maximum allowable duration.
- *
- * @param beginMs Start time in milliseconds. Set to 0 to extract from the
- * beginning
- * @param endMs End time in milliseconds. Set to {@link #END_OF_FILE} to
- * extract until the end
- *
- * @throws IllegalArgumentException if the start time is greater or equal than
- * end time, the end time is beyond the file duration, the start time
- * is negative
- */
- public void setExtractBoundaries(long beginMs, long endMs) {
- if (beginMs > mDurationMs) {
- throw new IllegalArgumentException("Invalid start time");
- }
- if (endMs > mDurationMs) {
- throw new IllegalArgumentException("Invalid end time");
- }
-
- if (beginMs != mBeginBoundaryTimeMs) {
- if (mBeginTransition != null) {
- mBeginTransition.invalidate();
- }
- }
-
- if (endMs != mEndBoundaryTimeMs) {
- if (mEndTransition != null) {
- mEndTransition.invalidate();
- }
- }
-
- mBeginBoundaryTimeMs = beginMs;
- mEndBoundaryTimeMs = endMs;
-
- adjustTransitions();
-
- // Note that the start and duration of any effects and overlays are
- // not adjusted nor are they automatically removed if they fall
- // outside the new boundaries.
- }
-
- /**
- * @return The boundary begin time
- */
- public long getBoundaryBeginTime() {
- return mBeginBoundaryTimeMs;
- }
-
- /**
- * @return The boundary end time
- */
- public long getBoundaryEndTime() {
- return mEndBoundaryTimeMs;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public void addEffect(Effect effect) {
- if (effect instanceof EffectKenBurns) {
- throw new IllegalArgumentException("Ken Burns effects cannot be applied to MediaVideoItem");
- }
- super.addEffect(effect);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public Bitmap getThumbnail(int width, int height, long timeMs) {
- return null;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public Bitmap[] getThumbnailList(int width, int height, long startMs, long endMs,
- int thumbnailCount) throws IOException {
- return null;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- void invalidateTransitions(long startTimeMs, long durationMs) {
- // Check if the item overlaps with the beginning and end transitions
- if (mBeginTransition != null) {
- if (isOverlapping(startTimeMs, durationMs,
- mBeginBoundaryTimeMs, mBeginTransition.getDuration())) {
- mBeginTransition.invalidate();
- }
- }
-
- if (mEndTransition != null) {
- final long transitionDurationMs = mEndTransition.getDuration();
- if (isOverlapping(startTimeMs, durationMs,
- mEndBoundaryTimeMs - transitionDurationMs, transitionDurationMs)) {
- mEndTransition.invalidate();
- }
- }
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- void invalidateTransitions(long oldStartTimeMs, long oldDurationMs, long newStartTimeMs,
- long newDurationMs) {
- // Check if the item overlaps with the beginning and end transitions
- if (mBeginTransition != null) {
- final long transitionDurationMs = mBeginTransition.getDuration();
- // If the start time has changed and if the old or the new item
- // overlaps with the begin transition, invalidate the transition.
- if (oldStartTimeMs != newStartTimeMs &&
- (isOverlapping(oldStartTimeMs, oldDurationMs,
- mBeginBoundaryTimeMs, transitionDurationMs) ||
- isOverlapping(newStartTimeMs, newDurationMs,
- mBeginBoundaryTimeMs, transitionDurationMs))) {
- mBeginTransition.invalidate();
- }
- }
-
- if (mEndTransition != null) {
- final long transitionDurationMs = mEndTransition.getDuration();
- // If the start time + duration has changed and if the old or the new
- // item overlaps the end transition, invalidate the transition/
- if (oldStartTimeMs + oldDurationMs != newStartTimeMs + newDurationMs &&
- (isOverlapping(oldStartTimeMs, oldDurationMs,
- mEndBoundaryTimeMs - transitionDurationMs, transitionDurationMs) ||
- isOverlapping(newStartTimeMs, newDurationMs,
- mEndBoundaryTimeMs - transitionDurationMs, transitionDurationMs))) {
- mEndTransition.invalidate();
- }
- }
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int getAspectRatio() {
- return mAspectRatio;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int getFileType() {
- return mFileType;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int getWidth() {
- return mWidth;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int getHeight() {
- return mHeight;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public long getDuration() {
- return mDurationMs;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public long getTimelineDuration() {
- return mEndBoundaryTimeMs - mBeginBoundaryTimeMs;
- }
-
- /**
- * Render a frame according to the playback (in the native aspect ratio) for
- * the specified media item. All effects and overlays applied to the media
- * item are ignored. The extract boundaries are also ignored. This method
- * can be used to playback frames when implementing trimming functionality.
- *
- * @param surfaceHolder SurfaceHolder used by the application
- * @param timeMs time corresponding to the frame to display (relative to the
- * the beginning of the media item).
- * @return The accurate time stamp of the frame that is rendered .
- * @throws IllegalStateException if a playback, preview or an export is
- * already in progress
- * @throws IllegalArgumentException if time is negative or greater than the
- * media item duration
- */
- public long renderFrame(SurfaceHolder surfaceHolder, long timeMs) {
- return timeMs;
- }
-
- /**
- * This API allows to generate a file containing the sample volume levels of
- * the Audio track of this media item. This function may take significant
- * time and is blocking. The file can be retrieved using
- * getAudioWaveformFilename().
- *
- * @param listener The progress listener
- *
- * @throws IOException if the output file cannot be created
- * @throws IllegalArgumentException if the mediaItem does not have a valid
- * Audio track
- */
- public void extractAudioWaveform(ExtractAudioWaveformProgressListener listener)
- throws IOException {
- // TODO: Set mAudioWaveformFilename at the end once the export is complete
- mWaveformData = new SoftReference<WaveformData>(new WaveformData(mAudioWaveformFilename));
- }
-
- /**
- * Get the audio waveform file name if {@link #extractAudioWaveform()} was
- * successful. The file format is as following:
- * <ul>
- * <li>first 4 bytes provide the number of samples for each value, as big-endian signed</li>
- * <li>4 following bytes is the total number of values in the file, as big-endian signed</li>
- * <li>all values follow as bytes Name is unique.</li>
- *</ul>
- * @return the name of the file, null if the file has not been computed or
- * if there is no Audio track in the mediaItem
- */
- String getAudioWaveformFilename() {
- return mAudioWaveformFilename;
- }
-
- /**
- * @return The waveform data
- */
- public WaveformData getWaveformData() throws IOException {
- if (mWaveformData == null) {
- return null;
- }
-
- WaveformData waveformData = mWaveformData.get();
- if (waveformData != null) {
- return waveformData;
- } else if (mAudioWaveformFilename != null) {
- waveformData = new WaveformData(mAudioWaveformFilename);
- mWaveformData = new SoftReference<WaveformData>(waveformData);
- return waveformData;
- } else {
- return null;
- }
- }
-
- /**
- * Set volume of the Audio track of this mediaItem
- *
- * @param volumePercent in %/. 100% means no change; 50% means half value, 200%
- * means double, 0% means silent.
- * @throws UsupportedOperationException if volume value is not supported
- */
- public void setVolume(int volumePercent) {
- mVolumePercentage = volumePercent;
- }
-
- /**
- * Get the volume value of the audio track as percentage. Call of this
- * method before calling setVolume will always return 100%
- *
- * @return the volume in percentage
- */
- public int getVolume() {
- return mVolumePercentage;
- }
-
- /**
- * @param muted true to mute the media item
- */
- public void setMute(boolean muted) {
- mMuted = muted;
- }
-
- /**
- * @return true if the media item is muted
- */
- public boolean isMuted() {
- return mMuted;
- }
-
- /**
- * @return The video type
- */
- public int getVideoType() {
- return mVideoType;
- }
-
- /**
- * @return The video profile
- */
- public int getVideoProfile() {
- return mVideoProfile;
- }
-
- /**
- * @return The video bitrate
- */
- public int getVideoBitrate() {
- return mVideoBitrate;
- }
-
- /**
- * @return The audio bitrate
- */
- public int getAudioBitrate() {
- return mAudioBitrate;
- }
-
- /**
- * @return The number of frames per second
- */
- public int getFps() {
- return mFps;
- }
-
- /**
- * @return The audio codec
- */
- public int getAudioType() {
- return mAudioType;
- }
-
- /**
- * @return The number of audio channels
- */
- public int getAudioChannels() {
- return mAudioChannels;
- }
-
- /**
- * @return The audio sample frequency
- */
- public int getAudioSamplingFrequency() {
- return mAudioSamplingFrequency;
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.ref.SoftReference;
+import android.graphics.Bitmap;
+import android.media.videoeditor.MediaArtistNativeHelper.ClipSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.Properties;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+/**
+ * This class represents a video clip item on the storyboard
+ * {@hide}
+ */
+public class MediaVideoItem extends MediaItem {
+
+ /**
+ * Instance variables
+ */
+ private final int mWidth;
+ private final int mHeight;
+ private final int mAspectRatio;
+ private final int mFileType;
+ private final int mVideoType;
+ private final int mVideoProfile;
+ private final int mVideoBitrate;
+ private final long mDurationMs;
+ private final int mAudioBitrate;
+ private final int mFps;
+ private final int mAudioType;
+ private final int mAudioChannels;
+ private final int mAudioSamplingFrequency;
+ private long mBeginBoundaryTimeMs;
+ private long mEndBoundaryTimeMs;
+ private int mVolumePercentage;
+ private boolean mMuted;
+ private String mAudioWaveformFilename;
+ private MediaArtistNativeHelper mMANativeHelper;
+ private VideoEditorImpl mVideoEditor;
+ /**
+ * The audio waveform data
+ */
+ private SoftReference<WaveformData> mWaveformData;
+
+ /**
+ * An object of this type cannot be instantiated with a default constructor
+ */
+ @SuppressWarnings("unused")
+ private MediaVideoItem() throws IOException {
+ this(null, null, null, RENDERING_MODE_BLACK_BORDER);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param mediaItemId The MediaItem id
+ * @param filename The image file name
+ * @param renderingMode The rendering mode
+ *
+ * @throws IOException if the file cannot be opened for reading
+ */
+ public MediaVideoItem(VideoEditor editor, String mediaItemId,
+ String filename,
+ int renderingMode)
+ throws IOException {
+ this(editor, mediaItemId, filename, renderingMode, 0, END_OF_FILE,
+ 100, false, null);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param editor The video editor reference
+ * @param mediaItemId The MediaItem id
+ * @param filename The image file name
+ * @param renderingMode The rendering mode
+ * @param beginMs Start time in milliseconds. Set to 0 to extract from the
+ * beginning
+ * @param endMs End time in milliseconds. Set to {@link #END_OF_FILE} to
+ * extract until the end
+ * @param volumePercent in %/. 100% means no change; 50% means half value, 200%
+ * means double, 0% means silent.
+ * @param muted true if the audio is muted
+ * @param audioWaveformFilename The name of the audio waveform file
+ *
+ * @throws IOException if the file cannot be opened for reading
+ */
+ MediaVideoItem(VideoEditor editor, String mediaItemId, String filename,
+ int renderingMode,
+ long beginMs, long endMs, int volumePercent, boolean muted,
+ String audioWaveformFilename) throws IOException {
+ super(editor, mediaItemId, filename, renderingMode);
+ if (editor instanceof VideoEditorImpl) {
+ mMANativeHelper = ((VideoEditorImpl)editor).getNativeContext();
+ mVideoEditor = ((VideoEditorImpl)editor);
+ }
+ Properties properties = null;
+ try {
+ properties = mMANativeHelper.getMediaProperties(filename);
+ } catch ( Exception e) {
+ throw new IllegalArgumentException("Unsupported file or file not found");
+ }
+ switch (mMANativeHelper.getFileType(properties.fileType)) {
+ case MediaProperties.FILE_3GP:
+ break;
+ case MediaProperties.FILE_MP4:
+ break;
+
+ default:
+ throw new IllegalArgumentException("Unsupported Input File Type");
+ }
+
+ switch (mMANativeHelper.getVideoCodecType(properties.videoFormat)) {
+ case MediaProperties.VCODEC_H263:
+ break;
+ case MediaProperties.VCODEC_H264BP:
+ break;
+ case MediaProperties.VCODEC_H264MP:
+ break;
+ case MediaProperties.VCODEC_MPEG4:
+ break;
+
+ default:
+ throw new IllegalArgumentException("Unsupported Video Codec Format in Input File");
+ }
+
+ mWidth = properties.width;
+ mHeight = properties.height;
+ mAspectRatio = mMANativeHelper.getAspectRatio(properties.width,
+ properties.height);
+ mFileType = mMANativeHelper.getFileType(properties.fileType);
+ mVideoType = mMANativeHelper.getVideoCodecType(properties.videoFormat);
+ mVideoProfile = 0;
+ mDurationMs = properties.videoDuration;
+ mVideoBitrate = properties.videoBitrate;
+ mAudioBitrate = properties.audioBitrate;
+ mFps = (int)properties.averageFrameRate;
+ mAudioType = mMANativeHelper.getAudioCodecType(properties.audioFormat);
+ mAudioChannels = properties.audioChannels;
+ mAudioSamplingFrequency = properties.audioSamplingFrequency;
+ mBeginBoundaryTimeMs = beginMs;
+ mEndBoundaryTimeMs = endMs == END_OF_FILE ? mDurationMs : endMs;
+ mVolumePercentage = volumePercent;
+ mMuted = muted;
+ mAudioWaveformFilename = audioWaveformFilename;
+ if (audioWaveformFilename != null) {
+ mWaveformData =
+ new SoftReference<WaveformData>(
+ new WaveformData(audioWaveformFilename));
+ } else {
+ mWaveformData = null;
+ }
+ }
+
+ /**
+ * Sets the start and end marks for trimming a video media item.
+ * This method will adjust the duration of bounding transitions, effects
+ * and overlays if the current duration of the transactions become greater
+ * than the maximum allowable duration.
+ *
+ * @param beginMs Start time in milliseconds. Set to 0 to extract from the
+ * beginning
+ * @param endMs End time in milliseconds. Set to {@link #END_OF_FILE} to
+ * extract until the end
+ *
+ * @throws IllegalArgumentException if the start time is greater or equal than
+ * end time, the end time is beyond the file duration, the start time
+ * is negative
+ */
+ public void setExtractBoundaries(long beginMs, long endMs) {
+ if (beginMs > mDurationMs) {
+ throw new IllegalArgumentException("setExtractBoundaries: Invalid start time");
+ }
+ if (endMs > mDurationMs) {
+ throw new IllegalArgumentException("setExtractBoundaries: Invalid end time");
+ }
+ if ((endMs != -1) && (beginMs >= endMs) ) {
+ throw new IllegalArgumentException("setExtractBoundaries: Start time is greater than end time");
+ }
+
+ if ((beginMs < 0) || ((endMs != -1) && (endMs < 0))) {
+ throw new IllegalArgumentException("setExtractBoundaries: Start time or end time is negative");
+ }
+
+ if (beginMs != mBeginBoundaryTimeMs) {
+ if (mBeginTransition != null) {
+ mBeginTransition.invalidate();
+ }
+ }
+
+ if (endMs != mEndBoundaryTimeMs) {
+ if (mEndTransition != null) {
+ mEndTransition.invalidate();
+ }
+ }
+
+ mBeginBoundaryTimeMs = beginMs;
+ mEndBoundaryTimeMs = endMs;
+ mMANativeHelper.setGeneratePreview(true);
+ adjustTransitions();
+ mVideoEditor.updateTimelineDuration();
+ /**
+ * Note that the start and duration of any effects and overlays are
+ * not adjusted nor are they automatically removed if they fall
+ * outside the new boundaries.
+ */
+ }
+
+ /**
+ * @return The boundary begin time
+ */
+ public long getBoundaryBeginTime() {
+ return mBeginBoundaryTimeMs;
+ }
+
+ /**
+ * @return The boundary end time
+ */
+ public long getBoundaryEndTime() {
+ return mEndBoundaryTimeMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public void addEffect(Effect effect) {
+ if (effect instanceof EffectKenBurns) {
+ throw new IllegalArgumentException("Ken Burns effects cannot be applied to MediaVideoItem");
+ }
+ super.addEffect(effect);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public Bitmap getThumbnail(int width, int height, long timeMs) {
+ if (timeMs > mDurationMs)
+ {
+ throw new IllegalArgumentException("Time Exceeds duration");
+ }
+ if (timeMs < 0)
+ {
+ throw new IllegalArgumentException("Invalid Time duration");
+ }
+ if ((width <=0) || (height <= 0))
+ {
+ throw new IllegalArgumentException("Invalid Dimensions");
+ }
+ return mMANativeHelper.getPixels(super.getFilename(),
+ width, height,timeMs);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public Bitmap[] getThumbnailList(int width, int height, long startMs,
+ long endMs, int thumbnailCount) throws IOException {
+ if (startMs > endMs) {
+ throw new IllegalArgumentException("Start time is greater than end time");
+ }
+ if (endMs > mDurationMs) {
+ throw new IllegalArgumentException("End time is greater than file duration");
+ }
+ if ((height <= 0) || (width <= 0)) {
+ throw new IllegalArgumentException("Invalid dimension");
+ }
+ if (startMs == endMs) {
+ Bitmap[] bitmap = new Bitmap[1];
+ bitmap[0] = mMANativeHelper.getPixels(super.getFilename(),
+ width, height,startMs);
+ return bitmap;
+ }
+ return mMANativeHelper.getPixelsList(super.getFilename(), width,
+ height,startMs,endMs,thumbnailCount);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void invalidateTransitions(long startTimeMs, long durationMs) {
+ /**
+ * Check if the item overlaps with the beginning and end transitions
+ */
+ if (mBeginTransition != null) {
+ if (isOverlapping(startTimeMs, durationMs,
+ mBeginBoundaryTimeMs, mBeginTransition.getDuration())) {
+ mBeginTransition.invalidate();
+ }
+ }
+
+ if (mEndTransition != null) {
+ final long transitionDurationMs = mEndTransition.getDuration();
+ if (isOverlapping(startTimeMs, durationMs,
+ mEndBoundaryTimeMs - transitionDurationMs, transitionDurationMs)) {
+ mEndTransition.invalidate();
+ }
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void invalidateTransitions(long oldStartTimeMs, long oldDurationMs,
+ long newStartTimeMs,
+ long newDurationMs) {
+ /**
+ * Check if the item overlaps with the beginning and end transitions
+ */
+ if (mBeginTransition != null) {
+ final long transitionDurationMs = mBeginTransition.getDuration();
+ /**
+ * If the start time has changed and if the old or the new item
+ * overlaps with the begin transition, invalidate the transition.
+ */
+ if (((oldStartTimeMs != newStartTimeMs)
+ || (oldDurationMs != newDurationMs) )&&
+ (isOverlapping(oldStartTimeMs, oldDurationMs,
+ mBeginBoundaryTimeMs, transitionDurationMs) ||
+ isOverlapping(newStartTimeMs, newDurationMs,
+ mBeginBoundaryTimeMs, transitionDurationMs))) {
+ mBeginTransition.invalidate();
+ }
+ }
+
+ if (mEndTransition != null) {
+ final long transitionDurationMs = mEndTransition.getDuration();
+ /**
+ * If the start time + duration has changed and if the old or the new
+ * item overlaps the end transition, invalidate the transition
+ */
+ if (oldStartTimeMs + oldDurationMs != newStartTimeMs + newDurationMs &&
+ (isOverlapping(oldStartTimeMs, oldDurationMs,
+ mEndBoundaryTimeMs - transitionDurationMs, transitionDurationMs) ||
+ isOverlapping(newStartTimeMs, newDurationMs,
+ mEndBoundaryTimeMs - transitionDurationMs, transitionDurationMs))) {
+ mEndTransition.invalidate();
+ }
+ }
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getAspectRatio() {
+ return mAspectRatio;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getFileType() {
+ return mFileType;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public long getTimelineDuration() {
+ return mEndBoundaryTimeMs - mBeginBoundaryTimeMs;
+ }
+
+ /**
+ * Render a frame according to the playback (in the native aspect ratio) for
+ * the specified media item. All effects and overlays applied to the media
+ * item are ignored. The extract boundaries are also ignored. This method
+ * can be used to playback frames when implementing trimming functionality.
+ *
+ * @param surfaceHolder SurfaceHolder used by the application
+ * @param timeMs time corresponding to the frame to display (relative to the
+ * the beginning of the media item).
+ * @return The accurate time stamp of the frame that is rendered .
+ * @throws IllegalStateException if a playback, preview or an export is
+ * already in progress
+ * @throws IllegalArgumentException if time is negative or greater than the
+ * media item duration
+ */
+ public long renderFrame(SurfaceHolder surfaceHolder, long timeMs) {
+ if (surfaceHolder == null) {
+ throw new IllegalArgumentException("Surface Holder is null");
+ }
+
+ if (timeMs > mDurationMs || timeMs < 0) {
+ throw new IllegalArgumentException("requested time not correct");
+ }
+
+ Surface surface = surfaceHolder.getSurface();
+ if (surface == null) {
+ throw new RuntimeException("Surface could not be retrieved from Surface holder");
+ }
+
+ if (mFilename != null) {
+ return mMANativeHelper.renderMediaItemPreviewFrame(surface,
+ mFilename,timeMs,mWidth,mHeight);
+ }
+ else {
+ return 0;
+ }
+ }
+
+
+ /**
+ * This API allows to generate a file containing the sample volume levels of
+ * the Audio track of this media item. This function may take significant
+ * time and is blocking. The file can be retrieved using
+ * getAudioWaveformFilename().
+ *
+ * @param listener The progress listener
+ *
+ * @throws IOException if the output file cannot be created
+ * @throws IllegalArgumentException if the mediaItem does not have a valid
+ * Audio track
+ */
+ public void extractAudioWaveform(ExtractAudioWaveformProgressListener listener)
+ throws IOException {
+ int frameDuration = 0;
+ int sampleCount = 0;
+ final String projectPath = mMANativeHelper.getProjectPath();
+ /**
+ * Waveform file does not exist
+ */
+ if (mAudioWaveformFilename == null ) {
+ /**
+ * Since audioWaveformFilename will not be supplied,it is generated
+ */
+ String mAudioWaveFileName = null;
+
+ mAudioWaveFileName =
+ String.format(projectPath + "/" + "audioWaveformFile-"+ getId() + ".dat");
+ /**
+ * Logic to get frame duration = (no. of frames per sample * 1000)/
+ * sampling frequency
+ */
+ if ( mMANativeHelper.getAudioCodecType(mAudioType) ==
+ MediaProperties.ACODEC_AMRNB ) {
+ frameDuration = (MediaProperties.SAMPLES_PER_FRAME_AMRNB*1000)/
+ MediaProperties.DEFAULT_SAMPLING_FREQUENCY;
+ sampleCount = MediaProperties.SAMPLES_PER_FRAME_AMRNB;
+ }
+ else if ( mMANativeHelper.getAudioCodecType(mAudioType) ==
+ MediaProperties.ACODEC_AMRWB ) {
+ frameDuration = (MediaProperties.SAMPLES_PER_FRAME_AMRWB * 1000)/
+ MediaProperties.DEFAULT_SAMPLING_FREQUENCY;
+ sampleCount = MediaProperties.SAMPLES_PER_FRAME_AMRWB;
+ }
+ else if ( mMANativeHelper.getAudioCodecType(mAudioType) ==
+ MediaProperties.ACODEC_AAC_LC ) {
+ frameDuration = (MediaProperties.SAMPLES_PER_FRAME_AAC * 1000)/
+ MediaProperties.DEFAULT_SAMPLING_FREQUENCY;
+ sampleCount = MediaProperties.SAMPLES_PER_FRAME_AAC;
+ }
+
+ mMANativeHelper.generateAudioGraph( getId(),
+ mFilename,
+ mAudioWaveFileName,
+ frameDuration,
+ MediaProperties.DEFAULT_CHANNEL_COUNT,
+ sampleCount,
+ listener,
+ true);
+ /**
+ * Record the generated file name
+ */
+ mAudioWaveformFilename = mAudioWaveFileName;
+ }
+ mWaveformData =
+ new SoftReference<WaveformData>(new WaveformData(mAudioWaveformFilename));
+ }
+
+ /**
+ * Get the audio waveform file name if {@link #extractAudioWaveform()} was
+ * successful. The file format is as following:
+ * <ul>
+ * <li>first 4 bytes provide the number of samples for each value, as big-endian signed</li>
+ * <li>4 following bytes is the total number of values in the file, as big-endian signed</li>
+ * <li>all values follow as bytes Name is unique.</li>
+ *</ul>
+ * @return the name of the file, null if the file has not been computed or
+ * if there is no Audio track in the mediaItem
+ */
+ String getAudioWaveformFilename() {
+ return mAudioWaveformFilename;
+ }
+
+ /**
+ * Invalidate the AudioWaveform File
+ */
+ void invalidate() {
+ if (mAudioWaveformFilename != null) {
+ new File(mAudioWaveformFilename).delete();
+ mAudioWaveformFilename = null;
+ }
+ }
+
+ /**
+ * @return The waveform data
+ */
+ public WaveformData getWaveformData() throws IOException {
+ if (mWaveformData == null) {
+ return null;
+ }
+
+ WaveformData waveformData = mWaveformData.get();
+ if (waveformData != null) {
+ return waveformData;
+ } else if (mAudioWaveformFilename != null) {
+ try {
+ waveformData = new WaveformData(mAudioWaveformFilename);
+ } catch(IOException e) {
+ throw e;
+ }
+ mWaveformData = new SoftReference<WaveformData>(waveformData);
+ return waveformData;
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Set volume of the Audio track of this mediaItem
+ *
+ * @param volumePercent in %/. 100% means no change; 50% means half value, 200%
+ * means double, 0% means silent.
+ * @throws UsupportedOperationException if volume value is not supported
+ */
+ public void setVolume(int volumePercent) {
+ if ((volumePercent <0) || (volumePercent >100)) {
+ throw new IllegalArgumentException("Invalid volume");
+ }
+
+ mVolumePercentage = volumePercent;
+ }
+
+ /**
+ * Get the volume value of the audio track as percentage. Call of this
+ * method before calling setVolume will always return 100%
+ *
+ * @return the volume in percentage
+ */
+ public int getVolume() {
+ return mVolumePercentage;
+ }
+
+ /**
+ * @param muted true to mute the media item
+ */
+ public void setMute(boolean muted) {
+ mMuted = muted;
+ if (mBeginTransition != null) {
+ mBeginTransition.invalidate();
+ }
+ if (mEndTransition != null) {
+ mEndTransition.invalidate();
+ }
+ mMANativeHelper.setGeneratePreview(true);
+ }
+
+ /**
+ * @return true if the media item is muted
+ */
+ public boolean isMuted() {
+ return mMuted;
+ }
+
+ /**
+ * @return The video type
+ */
+ public int getVideoType() {
+ return mVideoType;
+ }
+
+ /**
+ * @return The video profile
+ */
+ public int getVideoProfile() {
+ return mVideoProfile;
+ }
+
+ /**
+ * @return The video bitrate
+ */
+ public int getVideoBitrate() {
+ return mVideoBitrate;
+ }
+
+ /**
+ * @return The audio bitrate
+ */
+ public int getAudioBitrate() {
+ return mAudioBitrate;
+ }
+
+ /**
+ * @return The number of frames per second
+ */
+ public int getFps() {
+ return mFps;
+ }
+
+ /**
+ * @return The audio codec
+ */
+ public int getAudioType() {
+ return mAudioType;
+ }
+
+ /**
+ * @return The number of audio channels
+ */
+ public int getAudioChannels() {
+ return mAudioChannels;
+ }
+
+ /**
+ * @return The audio sample frequency
+ */
+ public int getAudioSamplingFrequency() {
+ return mAudioSamplingFrequency;
+ }
+
+ /**
+ * @return The Video media item properties in ClipSettings class object
+ * {@link android.media.videoeditor.MediaArtistNativeHelper.ClipSettings}
+ */
+ ClipSettings getVideoClipProperties() {
+ ClipSettings clipSettings = new ClipSettings();
+ clipSettings.clipPath = getFilename();
+ clipSettings.fileType = mMANativeHelper.getMediaItemFileType(getFileType());
+ clipSettings.beginCutTime = (int)getBoundaryBeginTime();
+ clipSettings.endCutTime = (int)getBoundaryEndTime();
+ clipSettings.mediaRendering = mMANativeHelper.getMediaItemRenderingMode(getRenderingMode());
+
+ return clipSettings;
+ }
+
+}
diff --git a/media/java/android/media/videoeditor/Overlay.java b/media/java/android/media/videoeditor/Overlay.java
index 0174ba8029b4..ec039666f570 100755
--- a/media/java/android/media/videoeditor/Overlay.java
+++ b/media/java/android/media/videoeditor/Overlay.java
@@ -1,192 +1,220 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import java.util.HashMap;
-import java.util.Map;
-
-
-/**
- * This is the super class for all Overlay classes.
- * {@hide}
- */
-public abstract class Overlay {
- // Instance variables
- private final String mUniqueId;
- // The overlay owner
- private final MediaItem mMediaItem;
- // user attributes
- private final Map<String, String> mUserAttributes;
-
- protected long mStartTimeMs;
- protected long mDurationMs;
-
-
- /**
- * Default constructor
- */
- @SuppressWarnings("unused")
- private Overlay() {
- this(null, null, 0, 0);
- }
-
- /**
- * Constructor
- *
- * @param mediaItem The media item owner
- * @param overlayId The overlay id
- * @param startTimeMs The start time relative to the media item start time
- * @param durationMs The duration
- *
- * @throws IllegalArgumentException if the file type is not PNG or the
- * startTimeMs and durationMs are incorrect.
- */
- public Overlay(MediaItem mediaItem, String overlayId, long startTimeMs, long durationMs) {
- if (mediaItem == null) {
- throw new IllegalArgumentException("Media item cannot be null");
- }
-
- if (startTimeMs + durationMs > mediaItem.getDuration()) {
- throw new IllegalArgumentException("Invalid start time and duration");
- }
-
- mMediaItem = mediaItem;
- mUniqueId = overlayId;
- mStartTimeMs = startTimeMs;
- mDurationMs = durationMs;
- mUserAttributes = new HashMap<String, String>();
- }
-
- /**
- * @return The of the overlay
- */
- public String getId() {
- return mUniqueId;
- }
-
- /**
- * @return The duration of the overlay effect
- */
- public long getDuration() {
- return mDurationMs;
- }
-
- /**
- * If a preview or export is in progress, then this change is effective for
- * next preview or export session.
- *
- * @param durationMs The duration in milliseconds
- */
- public void setDuration(long durationMs) {
- if (mStartTimeMs + durationMs > mMediaItem.getDuration()) {
- throw new IllegalArgumentException("Duration is too large");
- }
-
- final long oldDurationMs = mDurationMs;
- mDurationMs = durationMs;
-
- mMediaItem.invalidateTransitions(mStartTimeMs, oldDurationMs, mStartTimeMs, mDurationMs);
- }
-
- /**
- * @return the start time of the overlay
- */
- public long getStartTime() {
- return mStartTimeMs;
- }
-
- /**
- * Set the start time for the overlay. If a preview or export is in
- * progress, then this change is effective for next preview or export
- * session.
- *
- * @param startTimeMs start time in milliseconds
- */
- public void setStartTime(long startTimeMs) {
- if (startTimeMs + mDurationMs > mMediaItem.getDuration()) {
- throw new IllegalArgumentException("Start time is too large");
- }
-
- final long oldStartTimeMs = mStartTimeMs;
- mStartTimeMs = startTimeMs;
-
- mMediaItem.invalidateTransitions(oldStartTimeMs, mDurationMs, mStartTimeMs, mDurationMs);
- }
-
- /**
- * Set the start time and duration
- *
- * @param startTimeMs start time in milliseconds
- * @param durationMs The duration in milliseconds
- */
- public void setStartTimeAndDuration(long startTimeMs, long durationMs) {
- if (startTimeMs + durationMs > mMediaItem.getDuration()) {
- throw new IllegalArgumentException("Invalid start time or duration");
- }
-
- final long oldStartTimeMs = mStartTimeMs;
- final long oldDurationMs = mDurationMs;
-
- mStartTimeMs = startTimeMs;
- mDurationMs = durationMs;
-
- mMediaItem.invalidateTransitions(oldStartTimeMs, oldDurationMs, mStartTimeMs, mDurationMs);
- }
-
- /**
- * @return The media item owner
- */
- public MediaItem getMediaItem() {
- return mMediaItem;
- }
-
- /**
- * Set a user attribute
- *
- * @param name The attribute name
- * @param value The attribute value
- */
- public void setUserAttribute(String name, String value) {
- mUserAttributes.put(name, value);
- }
-
- /**
- * @return The user attributes
- */
- public Map<String, String> getUserAttributes() {
- return mUserAttributes;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public boolean equals(Object object) {
- if (!(object instanceof Overlay)) {
- return false;
- }
- return mUniqueId.equals(((Overlay)object).mUniqueId);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int hashCode() {
- return mUniqueId.hashCode();
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * This is the super class for all Overlay classes.
+ * {@hide}
+ */
+public abstract class Overlay {
+ /**
+ * Instance variables
+ */
+ private final String mUniqueId;
+ /**
+ * The overlay owner
+ */
+ private final MediaItem mMediaItem;
+ /**
+ * user attributes
+ */
+ private final Map<String, String> mUserAttributes;
+
+ protected long mStartTimeMs;
+ protected long mDurationMs;
+
+ /**
+ * Default constructor
+ */
+ @SuppressWarnings("unused")
+ private Overlay() {
+ this(null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param mediaItem The media item owner
+ * @param overlayId The overlay id
+ * @param startTimeMs The start time relative to the media item start time
+ * @param durationMs The duration
+ *
+ * @throws IllegalArgumentException if the file type is not PNG or the
+ * startTimeMs and durationMs are incorrect.
+ */
+ public Overlay(MediaItem mediaItem, String overlayId, long startTimeMs,
+ long durationMs) {
+ if (mediaItem == null) {
+ throw new IllegalArgumentException("Media item cannot be null");
+ }
+
+ if ((startTimeMs<0) || (durationMs<0) ) {
+ throw new IllegalArgumentException("Invalid start time and/OR duration");
+ }
+
+ if (startTimeMs + durationMs > mediaItem.getDuration()) {
+ throw new IllegalArgumentException("Invalid start time and duration");
+ }
+
+ mMediaItem = mediaItem;
+ mUniqueId = overlayId;
+ mStartTimeMs = startTimeMs;
+ mDurationMs = durationMs;
+ mUserAttributes = new HashMap<String, String>();
+ }
+
+ /**
+ * Get the overlay ID.
+ *
+ * @return The of the overlay
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * Get the duration of overlay.
+ *
+ * @return The duration of the overlay effect
+ */
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /**
+ * If a preview or export is in progress, then this change is effective for
+ * next preview or export session.
+ *
+ * @param durationMs The duration in milliseconds
+ */
+ public void setDuration(long durationMs) {
+
+ if (durationMs < 0) {
+ throw new IllegalArgumentException("Invalid duration");
+ }
+
+ if (mStartTimeMs + durationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Duration is too large");
+ }
+
+ final long oldDurationMs = mDurationMs;
+ mDurationMs = durationMs;
+
+ mMediaItem.invalidateTransitions(mStartTimeMs, oldDurationMs,
+ mStartTimeMs, mDurationMs);
+ }
+
+ /**
+ * Get the start time of overlay.
+ *
+ * @return the start time of the overlay
+ */
+ public long getStartTime() {
+ return mStartTimeMs;
+ }
+
+ /**
+ * Set the start time for the overlay. If a preview or export is in
+ * progress, then this change is effective for next preview or export
+ * session.
+ *
+ * @param startTimeMs start time in milliseconds
+ */
+ public void setStartTime(long startTimeMs) {
+ if (startTimeMs + mDurationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Start time is too large");
+ }
+
+ final long oldStartTimeMs = mStartTimeMs;
+ mStartTimeMs = startTimeMs;
+
+ mMediaItem.invalidateTransitions(oldStartTimeMs, mDurationMs,
+ mStartTimeMs, mDurationMs);
+ }
+
+ /**
+ * Set the start time and duration
+ *
+ * @param startTimeMs start time in milliseconds
+ * @param durationMs The duration in milliseconds
+ */
+ public void setStartTimeAndDuration(long startTimeMs, long durationMs) {
+ if (startTimeMs + durationMs > mMediaItem.getDuration()) {
+ throw new IllegalArgumentException("Invalid start time or duration");
+ }
+
+ final long oldStartTimeMs = mStartTimeMs;
+ final long oldDurationMs = mDurationMs;
+
+ mStartTimeMs = startTimeMs;
+ mDurationMs = durationMs;
+
+ mMediaItem.invalidateTransitions(oldStartTimeMs, oldDurationMs,
+ mStartTimeMs, mDurationMs);
+ }
+
+ /**
+ * Get the media item owner.
+ *
+ * @return The media item owner.
+ */
+ public MediaItem getMediaItem() {
+ return mMediaItem;
+ }
+
+ /**
+ * Set a user attribute
+ *
+ * @param name The attribute name
+ * @param value The attribute value
+ */
+ public void setUserAttribute(String name, String value) {
+ mUserAttributes.put(name, value);
+ }
+
+ /**
+ * Get the current user attributes set.
+ *
+ * @return The user attributes
+ */
+ public Map<String, String> getUserAttributes() {
+ return mUserAttributes;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof Overlay)) {
+ return false;
+ }
+ return mUniqueId.equals(((Overlay)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+}
diff --git a/media/java/android/media/videoeditor/OverlayFrame.java b/media/java/android/media/videoeditor/OverlayFrame.java
index a5511f94fe03..834fc6689bc4 100755
--- a/media/java/android/media/videoeditor/OverlayFrame.java
+++ b/media/java/android/media/videoeditor/OverlayFrame.java
@@ -1,149 +1,248 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-
-import android.graphics.Bitmap;
-import android.graphics.BitmapFactory;
-import android.graphics.Bitmap.CompressFormat;
-
-
-/**
- * This class is used to overlay an image on top of a media item.
- * {@hide}
- */
-public class OverlayFrame extends Overlay {
- // Instance variables
- private Bitmap mBitmap;
- private String mFilename;
-
- /**
- * An object of this type cannot be instantiated by using the default
- * constructor
- */
- @SuppressWarnings("unused")
- private OverlayFrame() {
- this(null, null, (String)null, 0, 0);
- }
-
- /**
- * Constructor for an OverlayFrame
- *
- * @param mediaItem The media item owner
- * @param overlayId The overlay id
- * @param bitmap The bitmap to be used as an overlay. The size of the
- * bitmap must equal to the size of the media item to which it is
- * added. The bitmap is typically a decoded PNG file.
- * @param startTimeMs The overlay start time in milliseconds
- * @param durationMs The overlay duration in milliseconds
- *
- * @throws IllegalArgumentException if the file type is not PNG or the
- * startTimeMs and durationMs are incorrect.
- */
- public OverlayFrame(MediaItem mediaItem, String overlayId, Bitmap bitmap, long startTimeMs,
- long durationMs) {
- super(mediaItem, overlayId, startTimeMs, durationMs);
- mBitmap = bitmap;
- mFilename = null;
- }
-
- /**
- * Constructor for an OverlayFrame. This constructor can be used to
- * restore the overlay after it was saved internally by the video editor.
- *
- * @param mediaItem The media item owner
- * @param overlayId The overlay id
- * @param filename The file name that contains the overlay.
- * @param startTimeMs The overlay start time in milliseconds
- * @param durationMs The overlay duration in milliseconds
- *
- * @throws IllegalArgumentException if the file type is not PNG or the
- * startTimeMs and durationMs are incorrect.
- */
- OverlayFrame(MediaItem mediaItem, String overlayId, String filename, long startTimeMs,
- long durationMs) {
- super(mediaItem, overlayId, startTimeMs, durationMs);
- mFilename = filename;
- mBitmap = BitmapFactory.decodeFile(mFilename);
- }
-
- /**
- * @return Get the overlay bitmap
- */
- public Bitmap getBitmap() {
- return mBitmap;
- }
-
- /**
- * @param bitmap The overlay bitmap
- */
- public void setBitmap(Bitmap bitmap) {
- mBitmap = bitmap;
- if (mFilename != null) {
- // Delete the file
- new File(mFilename).delete();
- // Invalidate the filename
- mFilename = null;
- }
-
- // Invalidate the transitions if necessary
- getMediaItem().invalidateTransitions(mStartTimeMs, mDurationMs);
- }
-
- /**
- * Get the file name of this overlay
- */
- String getFilename() {
- return mFilename;
- }
-
- /**
- * Save the overlay to the project folder
- *
- * @param path The path where the overlay will be saved
- *
- * @return The filename
- * @throws FileNotFoundException if the bitmap cannot be saved
- * @throws IOException if the bitmap file cannot be saved
- */
- String save(String path) throws FileNotFoundException, IOException {
- if (mFilename != null) {
- return mFilename;
- }
-
- mFilename = path + "/" + getId() + ".png";
- // Save the image to a local file
- final FileOutputStream out = new FileOutputStream(mFilename);
- mBitmap.compress(CompressFormat.PNG, 100, out);
- out.flush();
- out.close();
- return mFilename;
- }
-
- /**
- * Delete the overlay file
- */
- void invalidate() {
- if (mFilename != null) {
- new File(mFilename).delete();
- }
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Bitmap.CompressFormat;
+
+import java.io.DataOutputStream;
+import java.nio.ByteBuffer;
+import java.nio.IntBuffer;
+
+/**
+ * This class is used to overlay an image on top of a media item.
+ * {@hide}
+ */
+public class OverlayFrame extends Overlay {
+ /**
+ * Instance variables
+ */
+ private Bitmap mBitmap;
+ private String mFilename;
+ private String mBitmapFileName;
+
+ private int mOFWidth;
+ private int mOFHeight;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private OverlayFrame() {
+ this(null, null, (String)null, 0, 0);
+ }
+
+ /**
+ * Constructor for an OverlayFrame
+ *
+ * @param mediaItem The media item owner
+ * @param overlayId The overlay id
+ * @param bitmap The bitmap to be used as an overlay. The size of the
+ * bitmap must equal to the size of the media item to which it is
+ * added. The bitmap is typically a decoded PNG file.
+ * @param startTimeMs The overlay start time in milliseconds
+ * @param durationMs The overlay duration in milliseconds
+ *
+ * @throws IllegalArgumentException if the file type is not PNG or the
+ * startTimeMs and durationMs are incorrect.
+ */
+ public OverlayFrame(MediaItem mediaItem, String overlayId, Bitmap bitmap,
+ long startTimeMs,long durationMs) {
+ super(mediaItem, overlayId, startTimeMs, durationMs);
+ mBitmap = bitmap;
+ mFilename = null;
+ mBitmapFileName = null;
+ }
+
+ /**
+ * Constructor for an OverlayFrame. This constructor can be used to
+ * restore the overlay after it was saved internally by the video editor.
+ *
+ * @param mediaItem The media item owner
+ * @param overlayId The overlay id
+ * @param filename The file name that contains the overlay.
+ * @param startTimeMs The overlay start time in milliseconds
+ * @param durationMs The overlay duration in milliseconds
+ *
+ * @throws IllegalArgumentException if the file type is not PNG or the
+ * startTimeMs and durationMs are incorrect.
+ */
+ OverlayFrame(MediaItem mediaItem, String overlayId, String filename,
+ long startTimeMs,long durationMs) {
+ super(mediaItem, overlayId, startTimeMs, durationMs);
+ mBitmapFileName = filename;
+ mBitmap = BitmapFactory.decodeFile(mBitmapFileName);
+ mFilename = null;
+ }
+
+ /**
+ * Get the overlay bitmap.
+ *
+ * @return Get the overlay bitmap
+ */
+ public Bitmap getBitmap() {
+ return mBitmap;
+ }
+
+ /**
+ * Get the overlay bitmap.
+ *
+ * @return Get the overlay bitmap as png file.
+ */
+ String getBitmapImageFileName() {
+ return mBitmapFileName;
+ }
+ /**
+ * Set the overlay bitmap.
+ *
+ * @param bitmap The overlay bitmap.
+ */
+ public void setBitmap(Bitmap bitmap) {
+ mBitmap = bitmap;
+ if (mFilename != null) {
+ /**
+ * Delete the file
+ */
+ new File(mFilename).delete();
+ /**
+ * Invalidate the filename
+ */
+ mFilename = null;
+ }
+
+ /**
+ * Invalidate the transitions if necessary
+ */
+ getMediaItem().invalidateTransitions(mStartTimeMs, mDurationMs);
+ }
+
+ /**
+ * Get the file name of this overlay
+ */
+ String getFilename() {
+ return mFilename;
+ }
+
+ /*
+ * Set the file name of this overlay
+ */
+ void setFilename(String filename) {
+ mFilename = filename;
+ }
+ /**
+ * Save the overlay to the project folder
+ *
+ * @param path The path where the overlay will be saved
+ *
+ * @return The filename
+ * @throws FileNotFoundException if the bitmap cannot be saved
+ * @throws IOException if the bitmap file cannot be saved
+ */
+ String save(String path) throws FileNotFoundException, IOException {
+ if (mFilename != null) {
+ return mFilename;
+ }
+
+ // Create the compressed PNG file
+ mBitmapFileName = path + "/" + "Overlay" + getId() + ".png";
+ if (!(new File(mBitmapFileName).exists())) {
+ final FileOutputStream out = new FileOutputStream (mBitmapFileName);
+ mBitmap.compress(CompressFormat.PNG, 100, out);
+ out.flush();
+ out.close();
+ }
+
+ mOFWidth = mBitmap.getWidth();
+ mOFHeight = mBitmap.getHeight();
+
+ mFilename = path + "/" + "Overlay" + getId() + ".rgb";
+ if (!(new File(mFilename).exists())) {
+ /**
+ * Save the image to a file ; as a rgb
+ */
+ final FileOutputStream fl = new FileOutputStream(mFilename);
+ final DataOutputStream dos = new DataOutputStream(fl);
+
+ /**
+ * populate the rgb file with bitmap data
+ */
+ final int [] framingBuffer = new int[mOFWidth];
+ ByteBuffer byteBuffer = ByteBuffer.allocate(framingBuffer.length * 4);
+ IntBuffer intBuffer;
+
+ byte[] array = byteBuffer.array();
+ int tmp = 0;
+ while(tmp < mOFHeight) {
+ mBitmap.getPixels(framingBuffer,0,mOFWidth,0,tmp,mOFWidth,1);
+ intBuffer = byteBuffer.asIntBuffer();
+ intBuffer.put(framingBuffer,0,mOFWidth);
+ dos.write(array);
+ tmp += 1;
+ }
+ fl.flush();
+ fl.close();
+ }
+ return mFilename;
+ }
+
+ /**
+ * Get the OverlayFrame Height
+ */
+ int getOverlayFrameHeight() {
+ return mOFHeight;
+ }
+
+ /**
+ * Get the OverlayFrame Width
+ */
+ int getOverlayFrameWidth() {
+ return mOFWidth;
+ }
+
+ /*
+ * Set the OverlayFrame Height
+ */
+ void setOverlayFrameHeight(int height) {
+ mOFHeight = height;
+ }
+
+ /*
+ * Set the OverlayFrame Width
+ */
+ void setOverlayFrameWidth(int width) {
+ mOFWidth = width;
+ }
+ /**
+ * Delete the overlay file
+ */
+ void invalidate() {
+ if (mFilename != null) {
+ new File(mFilename).delete();
+ mFilename = null;
+ mBitmap.recycle();
+ mBitmap = null;
+ }
+ }
+}
diff --git a/media/java/android/media/videoeditor/Transition.java b/media/java/android/media/videoeditor/Transition.java
index 1c82742b08de..feec284bf2bf 100755
--- a/media/java/android/media/videoeditor/Transition.java
+++ b/media/java/android/media/videoeditor/Transition.java
@@ -1,210 +1,483 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import java.io.File;
-
-/**
- * This class is super class for all transitions. Transitions (with the
- * exception of TransitionAtStart and TransitioAtEnd) can only be inserted
- * between media items.
- *
- * Adding a transition between MediaItems makes the
- * duration of the storyboard shorter by the duration of the Transition itself.
- * As a result, if the duration of the transition is larger than the smaller
- * duration of the two MediaItems associated with the Transition, an exception
- * will be thrown.
- *
- * During a transition, the audio track are cross-fading
- * automatically. {@hide}
- */
-public abstract class Transition {
- // The transition behavior
- private static final int BEHAVIOR_MIN_VALUE = 0;
- /** The transition starts slowly and speed up */
- public static final int BEHAVIOR_SPEED_UP = 0;
- /** The transition start fast and speed down */
- public static final int BEHAVIOR_SPEED_DOWN = 1;
- /** The transition speed is constant */
- public static final int BEHAVIOR_LINEAR = 2;
- /** The transition starts fast and ends fast with a slow middle */
- public static final int BEHAVIOR_MIDDLE_SLOW = 3;
- /** The transition starts slowly and ends slowly with a fast middle */
- public static final int BEHAVIOR_MIDDLE_FAST = 4;
-
- private static final int BEHAVIOR_MAX_VALUE = 4;
-
- // The unique id of the transition
- private final String mUniqueId;
-
- // The transition is applied at the end of this media item
- private final MediaItem mAfterMediaItem;
- // The transition is applied at the beginning of this media item
- private final MediaItem mBeforeMediaItem;
-
- // The transition behavior
- protected final int mBehavior;
-
- // The transition duration
- protected long mDurationMs;
-
- // The transition filename
- protected String mFilename;
-
- /**
- * An object of this type cannot be instantiated by using the default
- * constructor
- */
- @SuppressWarnings("unused")
- private Transition() {
- this(null, null, null, 0, 0);
- }
-
- /**
- * Constructor
- *
- * @param transitionId The transition id
- * @param afterMediaItem The transition is applied to the end of this
- * media item
- * @param beforeMediaItem The transition is applied to the beginning of
- * this media item
- * @param durationMs The duration of the transition in milliseconds
- * @param behavior The transition behavior
- */
- protected Transition(String transitionId, MediaItem afterMediaItem, MediaItem beforeMediaItem,
- long durationMs, int behavior) {
- if (behavior < BEHAVIOR_MIN_VALUE || behavior > BEHAVIOR_MAX_VALUE) {
- throw new IllegalArgumentException("Invalid behavior: " + behavior);
- }
- mUniqueId = transitionId;
- mAfterMediaItem = afterMediaItem;
- mBeforeMediaItem = beforeMediaItem;
- mDurationMs = durationMs;
- mBehavior = behavior;
- }
-
- /**
- * @return The of the transition
- */
- public String getId() {
- return mUniqueId;
- }
-
- /**
- * @return The media item at the end of which the transition is applied
- */
- public MediaItem getAfterMediaItem() {
- return mAfterMediaItem;
- }
-
- /**
- * @return The media item at the beginning of which the transition is applied
- */
- public MediaItem getBeforeMediaItem() {
- return mBeforeMediaItem;
- }
-
- /**
- * Set the duration of the transition.
- *
- * @param durationMs the duration of the transition in milliseconds
- */
- public void setDuration(long durationMs) {
- if (durationMs > getMaximumDuration()) {
- throw new IllegalArgumentException("The duration is too large");
- }
-
- mDurationMs = durationMs;
- invalidate();
- }
-
- /**
- * @return the duration of the transition in milliseconds
- */
- public long getDuration() {
- return mDurationMs;
- }
-
- /**
- * The duration of a transition cannot be greater than half of the minimum
- * duration of the bounding media items.
- *
- * @return The maximum duration of this transition
- */
- public long getMaximumDuration() {
- if (mAfterMediaItem == null) {
- return mBeforeMediaItem.getTimelineDuration() / 2;
- } else if (mBeforeMediaItem == null) {
- return mAfterMediaItem.getTimelineDuration() / 2;
- } else {
- return (Math.min(mAfterMediaItem.getTimelineDuration(),
- mBeforeMediaItem.getTimelineDuration()) / 2);
- }
- }
-
- /**
- * @return The behavior
- */
- public int getBehavior() {
- return mBehavior;
- }
-
- /**
- * Generate the video clip for the specified transition.
- * This method may block for a significant amount of time.
- *
- * Before the method completes execution it sets the mFilename to
- * the name of the newly generated transition video clip file.
- */
- abstract void generate();
-
- /**
- * Remove any resources associated with this transition
- */
- void invalidate() {
- if (mFilename != null) {
- new File(mFilename).delete();
- mFilename = null;
- }
- }
-
- /**
- * @return true if the transition is generated
- */
- boolean isGenerated() {
- return (mFilename != null);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public boolean equals(Object object) {
- if (!(object instanceof Transition)) {
- return false;
- }
- return mUniqueId.equals(((Transition)object).mUniqueId);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public int hashCode() {
- return mUniqueId.hashCode();
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import android.media.videoeditor.MediaArtistNativeHelper.AlphaMagicSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.AudioTransition;
+import android.media.videoeditor.MediaArtistNativeHelper.ClipSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.EditSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.EffectSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.SlideTransitionSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.TransitionSettings;
+import android.media.videoeditor.MediaArtistNativeHelper.VideoTransition;
+
+/**
+ * This class is super class for all transitions. Transitions (with the
+ * exception of TransitionAtStart and TransitioAtEnd) can only be inserted
+ * between media items.
+ *
+ * Adding a transition between MediaItems makes the
+ * duration of the storyboard shorter by the duration of the Transition itself.
+ * As a result, if the duration of the transition is larger than the smaller
+ * duration of the two MediaItems associated with the Transition, an exception
+ * will be thrown.
+ *
+ * During a transition, the audio track are cross-fading
+ * automatically. {@hide}
+ */
+public abstract class Transition {
+ /**
+ * The transition behavior
+ */
+ private static final int BEHAVIOR_MIN_VALUE = 0;
+
+ /** The transition starts slowly and speed up */
+ public static final int BEHAVIOR_SPEED_UP = 0;
+ /** The transition start fast and speed down */
+ public static final int BEHAVIOR_SPEED_DOWN = 1;
+ /** The transition speed is constant */
+ public static final int BEHAVIOR_LINEAR = 2;
+ /** The transition starts fast and ends fast with a slow middle */
+ public static final int BEHAVIOR_MIDDLE_SLOW = 3;
+ /** The transition starts slowly and ends slowly with a fast middle */
+ public static final int BEHAVIOR_MIDDLE_FAST = 4;
+
+ private static final int BEHAVIOR_MAX_VALUE = 4;
+
+ /**
+ * The unique id of the transition
+ */
+ private final String mUniqueId;
+
+ /**
+ * The transition is applied at the end of this media item
+ */
+ private final MediaItem mAfterMediaItem;
+ /**
+ * The transition is applied at the beginning of this media item
+ */
+ private final MediaItem mBeforeMediaItem;
+
+ /**
+ * The transition behavior
+ */
+ protected final int mBehavior;
+
+ /**
+ * The transition duration
+ */
+ protected long mDurationMs;
+
+ /**
+ * The transition filename
+ */
+ protected String mFilename;
+
+ protected MediaArtistNativeHelper mNativeHelper;
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private Transition() {
+ this(null, null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this
+ * media item
+ * @param beforeMediaItem The transition is applied to the beginning of
+ * this media item
+ * @param durationMs The duration of the transition in milliseconds
+ * @param behavior The transition behavior
+ */
+ protected Transition(String transitionId, MediaItem afterMediaItem,
+ MediaItem beforeMediaItem,long durationMs,
+ int behavior) {
+ if (behavior < BEHAVIOR_MIN_VALUE || behavior > BEHAVIOR_MAX_VALUE) {
+ throw new IllegalArgumentException("Invalid behavior: " + behavior);
+ }
+ if ((afterMediaItem == null) && (beforeMediaItem == null)) {
+ throw new IllegalArgumentException("Null media items");
+ }
+ mUniqueId = transitionId;
+ mAfterMediaItem = afterMediaItem;
+ mBeforeMediaItem = beforeMediaItem;
+ mDurationMs = durationMs;
+ mBehavior = behavior;
+ mNativeHelper = null;
+ if (durationMs > getMaximumDuration()) {
+ throw new IllegalArgumentException("The duration is too large");
+ }
+ }
+
+ /**
+ * Get the ID of the transition.
+ *
+ * @return The ID of the transition
+ */
+ public String getId() {
+ return mUniqueId;
+ }
+
+ /**
+ * Get the media item at the end of which the transition is applied.
+ *
+ * @return The media item at the end of which the transition is applied
+ */
+ public MediaItem getAfterMediaItem() {
+ return mAfterMediaItem;
+ }
+
+ /**
+ * Get the media item at the beginning of which the transition is applied.
+ *
+ * @return The media item at the beginning of which the transition is
+ * applied
+ */
+ public MediaItem getBeforeMediaItem() {
+ return mBeforeMediaItem;
+ }
+
+ /**
+ * Set the duration of the transition.
+ *
+ * @param durationMs the duration of the transition in milliseconds
+ */
+ public void setDuration(long durationMs) {
+ if (durationMs > getMaximumDuration()) {
+ throw new IllegalArgumentException("The duration is too large");
+ }
+
+ mDurationMs = durationMs;
+ invalidate();
+ }
+
+ /**
+ * Get the duration of the transition.
+ *
+ * @return the duration of the transition in milliseconds
+ */
+ public long getDuration() {
+ return mDurationMs;
+ }
+
+ /**
+ * The duration of a transition cannot be greater than half of the minimum
+ * duration of the bounding media items.
+ *
+ * @return The maximum duration of this transition
+ */
+ public long getMaximumDuration() {
+ if (mAfterMediaItem == null) {
+ return mBeforeMediaItem.getTimelineDuration() / 2;
+ } else if (mBeforeMediaItem == null) {
+ return mAfterMediaItem.getTimelineDuration() / 2;
+ } else {
+ return (Math.min(mAfterMediaItem.getTimelineDuration(),
+ mBeforeMediaItem.getTimelineDuration()) / 2);
+ }
+ }
+
+ /**
+ * Get the behavior of the transition.
+ *
+ * @return The behavior
+ */
+ public int getBehavior() {
+ return mBehavior;
+ }
+
+ /**
+ * Get the transition data.
+ *
+ * @return The transition data in TransitionSettings object
+ * {@link android.media.videoeditor.MediaArtistNativeHelper.TransitionSettings}
+ */
+ TransitionSettings getTransitionSettings() {
+ TransitionAlpha transitionAlpha = null;
+ TransitionSliding transitionSliding = null;
+ TransitionCrossfade transitionCrossfade = null;
+ TransitionFadeBlack transitionFadeBlack = null;
+ TransitionSettings transitionSetting = null;
+ transitionSetting = new TransitionSettings();
+ transitionSetting.duration = (int)getDuration();
+ if (this instanceof TransitionAlpha) {
+ transitionAlpha = (TransitionAlpha)this;
+ transitionSetting.videoTransitionType = VideoTransition.ALPHA_MAGIC;
+ transitionSetting.audioTransitionType = AudioTransition.CROSS_FADE;
+ transitionSetting.transitionBehaviour = mNativeHelper
+ .getVideoTransitionBehaviour(transitionAlpha.getBehavior());
+ transitionSetting.alphaSettings = new AlphaMagicSettings();
+ transitionSetting.slideSettings = null;
+ transitionSetting.alphaSettings.file = transitionAlpha.getPNGMaskFilename();
+ transitionSetting.alphaSettings.blendingPercent = transitionAlpha.getBlendingPercent();
+ transitionSetting.alphaSettings.invertRotation = transitionAlpha.isInvert();
+ transitionSetting.alphaSettings.rgbWidth = transitionAlpha.getRGBFileWidth();
+ transitionSetting.alphaSettings.rgbHeight = transitionAlpha.getRGBFileHeight();
+
+ } else if (this instanceof TransitionSliding) {
+ transitionSliding = (TransitionSliding)this;
+ transitionSetting.videoTransitionType = VideoTransition.SLIDE_TRANSITION;
+ transitionSetting.audioTransitionType = AudioTransition.CROSS_FADE;
+ transitionSetting.transitionBehaviour = mNativeHelper
+ .getVideoTransitionBehaviour(transitionSliding.getBehavior());
+ transitionSetting.alphaSettings = null;
+ transitionSetting.slideSettings = new SlideTransitionSettings();
+ transitionSetting.slideSettings.direction = mNativeHelper
+ .getSlideSettingsDirection(transitionSliding.getDirection());
+ } else if (this instanceof TransitionCrossfade) {
+ transitionCrossfade = (TransitionCrossfade)this;
+ transitionSetting.videoTransitionType = VideoTransition.CROSS_FADE;
+ transitionSetting.audioTransitionType = AudioTransition.CROSS_FADE;
+ transitionSetting.transitionBehaviour = mNativeHelper
+ .getVideoTransitionBehaviour(transitionCrossfade.getBehavior());
+ transitionSetting.alphaSettings = null;
+ transitionSetting.slideSettings = null;
+ } else if (this instanceof TransitionFadeBlack) {
+ transitionFadeBlack = (TransitionFadeBlack)this;
+ transitionSetting.videoTransitionType = VideoTransition.FADE_BLACK;
+ transitionSetting.audioTransitionType = AudioTransition.CROSS_FADE;
+ transitionSetting.transitionBehaviour = mNativeHelper
+ .getVideoTransitionBehaviour(transitionFadeBlack.getBehavior());
+ transitionSetting.alphaSettings = null;
+ transitionSetting.slideSettings = null;
+ }
+
+ return transitionSetting;
+ }
+
+ /**
+ * Checks if the effect and overlay applied on a media item
+ * overlaps with the transition on media item.
+ *
+ * @param m The media item
+ * @param clipSettings The ClipSettings object
+ * @param clipNo The clip no.(out of the two media items
+ * associated with current transition)for which the effect
+ * clip should be generated
+ * @return List of effects that overlap with the transition
+ */
+
+ List<EffectSettings> isEffectandOverlayOverlapping(MediaItem m, ClipSettings clipSettings,
+ int clipNo) {
+ List<Effect> effects;
+ List<Overlay> overlays;
+ List<EffectSettings> effectSettings = new ArrayList<EffectSettings>();
+ EffectSettings tmpEffectSettings;
+
+ effects = m.getAllEffects();
+ for (Effect effect : effects) {
+ if (effect instanceof EffectColor) {
+ tmpEffectSettings = mNativeHelper.getEffectSettings((EffectColor)effect);
+ mNativeHelper.adjustEffectsStartTimeAndDuration(tmpEffectSettings,
+ clipSettings.beginCutTime, clipSettings.endCutTime);
+ if (tmpEffectSettings.duration != 0) {
+ if (m instanceof MediaVideoItem) {
+ tmpEffectSettings.fiftiesFrameRate = mNativeHelper
+ .GetClosestVideoFrameRate(((MediaVideoItem)m).getFps());
+ }
+ effectSettings.add(tmpEffectSettings);
+ }
+ }
+ }
+ overlays = m.getAllOverlays();
+ for (Overlay overlay : overlays) {
+ tmpEffectSettings = mNativeHelper.getOverlaySettings((OverlayFrame)overlay);
+ mNativeHelper.adjustEffectsStartTimeAndDuration(tmpEffectSettings,
+ clipSettings.beginCutTime, clipSettings.endCutTime);
+ if (tmpEffectSettings.duration != 0) {
+ effectSettings.add(tmpEffectSettings);
+ }
+ }
+ return effectSettings;
+ }
+
+ /**
+ * Generate the video clip for the specified transition. This method may
+ * block for a significant amount of time. Before the method completes
+ * execution it sets the mFilename to the name of the newly generated
+ * transition video clip file.
+ */
+ void generate() {
+ MediaItem m1 = this.getAfterMediaItem();
+ MediaItem m2 = this.getBeforeMediaItem();
+ ClipSettings clipSettings1 = new ClipSettings();
+ ClipSettings clipSettings2 = new ClipSettings();
+ TransitionSettings transitionSetting = null;
+ EditSettings editSettings = new EditSettings();
+ List<EffectSettings> effectSettings_clip1;
+ List<EffectSettings> effectSettings_clip2;
+
+ String output = null;
+ String effectClip1 = null;
+ String effectClip2 = null;
+
+ if (mNativeHelper == null) {
+ if (m1 != null)
+ mNativeHelper = m1.getNativeContext();
+ else if (m2 != null)
+ mNativeHelper = m2.getNativeContext();
+ }
+ transitionSetting = getTransitionSettings();
+ if (m1 != null && m2 != null) {
+ /* transition between media items */
+ clipSettings1 = m1.getClipSettings();
+ clipSettings2 = m2.getClipSettings();
+ clipSettings1.beginCutTime = (int)(clipSettings1.endCutTime -
+ this.mDurationMs);
+ clipSettings2.endCutTime = (int)(clipSettings2.beginCutTime +
+ this.mDurationMs);
+ /*
+ * Check how many effects and overlays overlap with transition and
+ * generate effect clip first if there is any overlap
+ */
+ effectSettings_clip1 = isEffectandOverlayOverlapping(m1, clipSettings1,1);
+ effectSettings_clip2 = isEffectandOverlayOverlapping(m2, clipSettings2,2);
+ for (int index = 0; index < effectSettings_clip2.size(); index++ ) {
+ effectSettings_clip2.get(index).startTime += this.mDurationMs;
+ }
+ editSettings.effectSettingsArray =
+ new EffectSettings[effectSettings_clip1.size()
+ + effectSettings_clip2.size()];
+ int i=0,j=0;
+ while (i < effectSettings_clip1.size()) {
+ editSettings.effectSettingsArray[j] = effectSettings_clip1.get(i);
+ i++;
+ j++;
+ }
+ i=0;
+ while (i < effectSettings_clip2.size()) {
+ editSettings.effectSettingsArray[j] = effectSettings_clip2.get(i);
+ i++;
+ j++;
+ }
+ } else if (m1 == null && m2 != null) {
+ /* begin transition at first media item */
+ m2.generateBlankFrame(clipSettings1);
+ clipSettings2 = m2.getClipSettings();
+ clipSettings1.endCutTime = (int)(this.mDurationMs + 50);
+ clipSettings2.endCutTime = (int)(clipSettings2.beginCutTime +
+ this.mDurationMs);
+ /*
+ * Check how many effects and overlays overlap with transition and
+ * generate effect clip first if there is any overlap
+ */
+ effectSettings_clip2 = isEffectandOverlayOverlapping(m2, clipSettings2,2);
+ for (int index = 0; index < effectSettings_clip2.size(); index++ ) {
+ effectSettings_clip2.get(index).startTime += this.mDurationMs;
+ }
+ editSettings.effectSettingsArray = new EffectSettings[effectSettings_clip2.size()];
+ int i=0, j=0;
+ while (i < effectSettings_clip2.size()) {
+ editSettings.effectSettingsArray[j] = effectSettings_clip2.get(i);
+ i++;
+ j++;
+ }
+ } else if (m1 != null && m2 == null) {
+ /* end transition at last media item */
+ clipSettings1 = m1.getClipSettings();
+ m1.generateBlankFrame(clipSettings2);
+ clipSettings1.beginCutTime = (int)(clipSettings1.endCutTime -
+ this.mDurationMs);
+ clipSettings2.endCutTime = (int)(this.mDurationMs + 50);
+ /*
+ * Check how many effects and overlays overlap with transition and
+ * generate effect clip first if there is any overlap
+ */
+ effectSettings_clip1 = isEffectandOverlayOverlapping(m1, clipSettings1,1);
+ editSettings.effectSettingsArray = new EffectSettings[effectSettings_clip1.size()];
+ int i=0,j=0;
+ while (i < effectSettings_clip1.size()) {
+ editSettings.effectSettingsArray[j] = effectSettings_clip1.get(i);
+ i++;
+ j++;
+ }
+ }
+
+ editSettings.clipSettingsArray = new ClipSettings[2];
+ editSettings.clipSettingsArray[0] = clipSettings1;
+ editSettings.clipSettingsArray[1] = clipSettings2;
+ editSettings.backgroundMusicSettings = null;
+ editSettings.transitionSettingsArray = new TransitionSettings[1];
+ editSettings.transitionSettingsArray[0] = transitionSetting;
+ output = mNativeHelper.generateTransitionClip(editSettings, mUniqueId,
+ m1, m2,this);
+ setFilename(output);
+ }
+
+
+ /**
+ * Set the transition filename.
+ */
+ void setFilename(String filename) {
+ mFilename = filename;
+ }
+
+ /**
+ * Get the transition filename.
+ */
+ String getFilename() {
+ return mFilename;
+ }
+
+ /**
+ * Remove any resources associated with this transition
+ */
+ void invalidate() {
+ if (mFilename != null) {
+ new File(mFilename).delete();
+ mFilename = null;
+ }
+ }
+
+ /**
+ * Check if the transition is generated.
+ *
+ * @return true if the transition is generated
+ */
+ boolean isGenerated() {
+ return (mFilename != null);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof Transition)) {
+ return false;
+ }
+ return mUniqueId.equals(((Transition)object).mUniqueId);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mUniqueId.hashCode();
+ }
+}
diff --git a/media/java/android/media/videoeditor/TransitionAlpha.java b/media/java/android/media/videoeditor/TransitionAlpha.java
index 2bb16d2038e7..f7d17cb4cec4 100755
--- a/media/java/android/media/videoeditor/TransitionAlpha.java
+++ b/media/java/android/media/videoeditor/TransitionAlpha.java
@@ -1,122 +1,212 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import java.io.File;
-
-
-
-/**
- * This class allows to render an "alpha blending" transition according to a
- * bitmap mask. The mask shows the shape of the transition all along the
- * duration of the transition: just before the transition, video 1 is fully
- * displayed. When the transition starts, as the time goes on, pixels of video 2
- * replace pixels of video 1 according to the gray scale pixel value of the
- * mask.
- * {@hide}
- */
-public class TransitionAlpha extends Transition {
- /** This is the input JPEG file for the mask */
- private final String mMaskFilename;
-
- /**
- * This is percentage (between 0 and 100) of blending between video 1 and
- * video 2 if this value equals 0, then the mask is strictly applied if this
- * value equals 100, then the mask is not at all applied (no transition
- * effect)
- */
- private final int mBlendingPercent;
-
- /**
- * If true, this value inverts the direction of the mask: white pixels of
- * the mask show video 2 pixels first black pixels of the mask show video 2
- * pixels last.
- */
- private final boolean mIsInvert;
-
- /**
- * An object of this type cannot be instantiated by using the default
- * constructor
- */
- @SuppressWarnings("unused")
- private TransitionAlpha() {
- this(null, null, null, 0, 0, null, 0, false);
- }
-
- /**
- * Constructor
- *
- * @param transitionId The transition id
- * @param afterMediaItem The transition is applied to the end of this media
- * item
- * @param beforeMediaItem The transition is applied to the beginning of this
- * media item
- * @param durationMs duration of the transition in milliseconds
- * @param behavior behavior is one of the behavior defined in Transition
- * class
- * @param maskFilename JPEG file name. The dimension of the image
- * corresponds to 720p (16:9 aspect ratio). Mask files are
- * shared between video editors and can be created in the
- * projects folder (the parent folder for all projects).
- * @param blendingPercent The blending percent applied
- * @param invert true to invert the direction of the alpha blending
- *
- * @throws IllegalArgumentException if behavior is not supported, or if
- * direction are not supported.
- */
- public TransitionAlpha(String transitionId, MediaItem afterMediaItem,
- MediaItem beforeMediaItem, long durationMs, int behavior, String maskFilename,
- int blendingPercent, boolean invert) {
- super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
-
- if (!new File(maskFilename).exists()) {
- throw new IllegalArgumentException("Invalid mask file name: " + maskFilename);
- }
-
- mMaskFilename = maskFilename;
- mBlendingPercent = blendingPercent;
- mIsInvert = invert;
- }
-
- /**
- * @return The blending percentage
- */
- public int getBlendingPercent() {
- return mBlendingPercent;
- }
-
- /**
- * @return The mask filename
- */
- public String getMaskFilename() {
- return mMaskFilename;
- }
-
- /**
- * @return true if the direction of the alpha blending is inverted
- */
- public boolean isInvert() {
- return mIsInvert;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public void generate() {
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.IntBuffer;
+
+/**
+ * This class allows to render an "alpha blending" transition according to a
+ * bitmap mask. The mask shows the shape of the transition all along the
+ * duration of the transition: just before the transition, video 1 is fully
+ * displayed. When the transition starts, as the time goes on, pixels of video 2
+ * replace pixels of video 1 according to the gray scale pixel value of the
+ * mask.
+ * {@hide}
+ */
+public class TransitionAlpha extends Transition {
+ /** This is the input JPEG file for the mask */
+ private final String mMaskFilename;
+
+ /**
+ * This is percentage (between 0 and 100) of blending between video 1 and
+ * video 2 if this value equals 0, then the mask is strictly applied if this
+ * value equals 100, then the mask is not at all applied (no transition
+ * effect)
+ */
+ private final int mBlendingPercent;
+
+ /**
+ * If true, this value inverts the direction of the mask: white pixels of
+ * the mask show video 2 pixels first black pixels of the mask show video 2
+ * pixels last.
+ */
+ private final boolean mIsInvert;
+
+
+ private int mWidth;
+ private int mHeight;
+ private String mRGBMaskFile;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private TransitionAlpha() {
+ this(null, null, null, 0, 0, null, 0, false);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this media
+ * item
+ * @param beforeMediaItem The transition is applied to the beginning of this
+ * media item
+ * @param durationMs duration of the transition in milliseconds
+ * @param behavior behavior is one of the behavior defined in Transition
+ * class
+ * @param maskFilename JPEG file name. The dimension of the image
+ * corresponds to 720p (16:9 aspect ratio). Mask files are
+ * shared between video editors and can be created in the
+ * projects folder (the parent folder for all projects).
+ * @param blendingPercent The blending percent applied
+ * @param invert true to invert the direction of the alpha blending
+ * @throws IllegalArgumentException if behavior is not supported, or if
+ * direction are not supported.
+ */
+ public TransitionAlpha(String transitionId, MediaItem afterMediaItem,
+ MediaItem beforeMediaItem, long durationMs, int behavior,
+ String maskFilename, int blendingPercent, boolean invert) {
+ super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
+
+ /**
+ * Generate a RGB file for the supplied mask file
+ */
+ final BitmapFactory.Options dbo = new BitmapFactory.Options();
+ dbo.inJustDecodeBounds = true;
+ if (!new File(maskFilename).exists())
+ throw new IllegalArgumentException("File not Found " + maskFilename);
+ BitmapFactory.decodeFile(maskFilename, dbo);
+
+ mWidth = dbo.outWidth;
+ mHeight = dbo.outHeight;
+
+ if (afterMediaItem != null) {
+ mNativeHelper = afterMediaItem.getNativeContext();
+ }else {
+ mNativeHelper = beforeMediaItem.getNativeContext();
+ }
+
+
+ mRGBMaskFile = String.format(mNativeHelper.getProjectPath() +
+ "/" + "mask" + transitionId+ ".rgb");
+
+
+ FileOutputStream fl = null;
+
+ try{
+ fl = new FileOutputStream(mRGBMaskFile);
+ } catch (IOException e) {
+ /* catch IO exception */
+ }
+ final DataOutputStream dos = new DataOutputStream(fl);
+
+ if (fl != null) {
+ /**
+ * Write to rgb file
+ */
+ Bitmap imageBitmap = BitmapFactory.decodeFile(maskFilename);
+ final int [] framingBuffer = new int[mWidth];
+ ByteBuffer byteBuffer = ByteBuffer.allocate(framingBuffer.length * 4);
+ IntBuffer intBuffer;
+
+ byte[] array = byteBuffer.array();
+ int tmp = 0;
+ while (tmp < mHeight) {
+ imageBitmap.getPixels(framingBuffer, 0, mWidth, 0, tmp,mWidth, 1);
+ intBuffer = byteBuffer.asIntBuffer();
+ intBuffer.put(framingBuffer,0,mWidth);
+ try {
+ dos.write(array);
+ } catch (IOException e) {
+ /* catch file write error */
+ }
+ tmp += 1;
+ }
+
+ imageBitmap.recycle();
+ try{
+ fl.close();
+ }catch (IOException e) {
+ /* file close error */
+ }
+ }
+
+ /**
+ * Capture the details
+ */
+ mMaskFilename = maskFilename;
+ mBlendingPercent = blendingPercent;
+ mIsInvert = invert;
+ }
+
+ public int getRGBFileWidth() {
+ return mWidth;
+ }
+
+ public int getRGBFileHeight() {
+ return mHeight;
+ }
+
+ public String getPNGMaskFilename() {
+ return mRGBMaskFile;
+ }
+
+ /**
+ * Get the blending percentage
+ *
+ * @return The blending percentage
+ */
+ public int getBlendingPercent() {
+ return mBlendingPercent;
+ }
+
+ /**
+ * Get the filename of the mask.
+ *
+ * @return The mask filename
+ */
+ public String getMaskFilename() {
+ return mMaskFilename;
+ }
+
+ /**
+ * Check if the alpha blending direction is inverted.
+ *
+ * @return true if the direction of the alpha blending is inverted
+ */
+ public boolean isInvert() {
+ return mIsInvert;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ public void generate() {
+ super.generate();
+ }
+}
diff --git a/media/java/android/media/videoeditor/TransitionCrossfade.java b/media/java/android/media/videoeditor/TransitionCrossfade.java
index f8223e88e694..417c64e06edd 100755
--- a/media/java/android/media/videoeditor/TransitionCrossfade.java
+++ b/media/java/android/media/videoeditor/TransitionCrossfade.java
@@ -1,60 +1,62 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-
-/**
- * This class allows to render a crossfade (dissolve) effect transition between
- * two videos
- * {@hide}
- */
-public class TransitionCrossfade extends Transition {
- /**
- * An object of this type cannot be instantiated by using the default
- * constructor
- */
- @SuppressWarnings("unused")
- private TransitionCrossfade() {
- this(null, null, null, 0, 0);
- }
-
- /**
- * Constructor
- *
- * @param transitionId The transition id
- * @param afterMediaItem The transition is applied to the end of this
- * media item
- * @param beforeMediaItem The transition is applied to the beginning of
- * this media item
- * @param durationMs duration of the transition in milliseconds
- * @param behavior behavior is one of the behavior defined in Transition
- * class
- *
- * @throws IllegalArgumentException if behavior is not supported.
- */
- public TransitionCrossfade(String transitionId, MediaItem afterMediaItem,
- MediaItem beforeMediaItem, long durationMs, int behavior) {
- super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- void generate() {
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+
+/**
+ * This class allows to render a crossfade (dissolve) effect transition between
+ * two videos
+ * {@hide}
+ */
+public class TransitionCrossfade extends Transition {
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private TransitionCrossfade() {
+ this(null, null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this
+ * media item
+ * @param beforeMediaItem The transition is applied to the beginning of
+ * this media item
+ * @param durationMs duration of the transition in milliseconds
+ * @param behavior behavior is one of the behavior defined in Transition
+ * class
+ *
+ * @throws IllegalArgumentException if behavior is not supported.
+ */
+ public TransitionCrossfade(String transitionId, MediaItem afterMediaItem,
+ MediaItem beforeMediaItem, long durationMs, int behavior) {
+ super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void generate() {
+ super.generate();
+ }
+}
diff --git a/media/java/android/media/videoeditor/TransitionFadeBlack.java b/media/java/android/media/videoeditor/TransitionFadeBlack.java
index a9bf4cec06fd..da07cf0fab48 100755
--- a/media/java/android/media/videoeditor/TransitionFadeBlack.java
+++ b/media/java/android/media/videoeditor/TransitionFadeBlack.java
@@ -1,60 +1,62 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-
-/**
- * This class is used to render a fade to black and fade from black transition
- * between two media items.
- * {@hide}
- */
-public class TransitionFadeBlack extends Transition {
- /**
- * An object of this type cannot be instantiated by using the default
- * constructor
- */
- @SuppressWarnings("unused")
- private TransitionFadeBlack() {
- this(null, null, null, 0, 0);
- }
-
- /**
- * Constructor
- *
- * @param transitionId The transition id
- * @param afterMediaItem The transition is applied to the end of this
- * media item
- * @param beforeMediaItem The transition is applied to the beginning of
- * this media item
- * @param durationMs duration of the transition
- * @param behavior behavior is one of the behavior defined in Transition
- * class
- *
- * @throws IllegalArgumentException if behavior is not supported.
- */
- public TransitionFadeBlack(String transitionId, MediaItem afterMediaItem,
- MediaItem beforeMediaItem, long durationMs, int behavior) {
- super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- void generate() {
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+
+/**
+ * This class is used to render a fade to black and fade from black transition
+ * between two media items.
+ * {@hide}
+ */
+public class TransitionFadeBlack extends Transition {
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private TransitionFadeBlack() {
+ this(null, null, null, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this
+ * media item
+ * @param beforeMediaItem The transition is applied to the beginning of
+ * this media item
+ * @param durationMs duration of the transition
+ * @param behavior behavior is one of the behavior defined in Transition
+ * class
+ *
+ * @throws IllegalArgumentException if behavior is not supported.
+ */
+ public TransitionFadeBlack(String transitionId, MediaItem afterMediaItem,
+ MediaItem beforeMediaItem, long durationMs, int behavior) {
+ super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void generate() {
+ super.generate();
+ }
+}
diff --git a/media/java/android/media/videoeditor/TransitionSliding.java b/media/java/android/media/videoeditor/TransitionSliding.java
index cc9f4b287df2..57610ab05634 100755
--- a/media/java/android/media/videoeditor/TransitionSliding.java
+++ b/media/java/android/media/videoeditor/TransitionSliding.java
@@ -1,82 +1,95 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package android.media.videoeditor;
-
-/**
- * This class allows to create sliding transitions
- * {@hide}
- */
-public class TransitionSliding extends Transition {
-
- /** Video 1 is pushed to the right while video 2 is coming from left */
- public final static int DIRECTION_RIGHT_OUT_LEFT_IN = 0;
- /** Video 1 is pushed to the left while video 2 is coming from right */
- public static final int DIRECTION_LEFT_OUT_RIGHT_IN = 1;
- /** Video 1 is pushed to the top while video 2 is coming from bottom */
- public static final int DIRECTION_TOP_OUT_BOTTOM_IN = 2;
- /** Video 1 is pushed to the bottom while video 2 is coming from top */
- public static final int DIRECTION_BOTTOM_OUT_TOP_IN = 3;
-
- // The sliding transitions
- private final int mSlidingDirection;
-
- /**
- * An object of this type cannot be instantiated by using the default
- * constructor
- */
- @SuppressWarnings("unused")
- private TransitionSliding() {
- this(null, null, null, 0, 0, 0);
- }
-
- /**
- * Constructor
- *
- * @param transitionId The transition id
- * @param afterMediaItem The transition is applied to the end of this
- * media item
- * @param beforeMediaItem The transition is applied to the beginning of
- * this media item
- * @param durationMs duration of the transition in milliseconds
- * @param behavior behavior is one of the behavior defined in Transition
- * class
- * @param direction direction shall be one of the supported directions like
- * RIGHT_OUT_LEFT_IN
- *
- * @throws IllegalArgumentException if behavior is not supported.
- */
- public TransitionSliding(String transitionId, MediaItem afterMediaItem,
- MediaItem beforeMediaItem, long durationMs, int behavior, int direction) {
- super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
- mSlidingDirection = direction;
- }
-
- /**
- * @return The sliding direction
- */
- public int getDirection() {
- return mSlidingDirection;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- void generate() {
- }
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.videoeditor;
+
+/**
+ * This class allows to create sliding transitions
+ * {@hide}
+ */
+public class TransitionSliding extends Transition {
+
+ /** Video 1 is pushed to the right while video 2 is coming from left */
+ public final static int DIRECTION_RIGHT_OUT_LEFT_IN = 0;
+ /** Video 1 is pushed to the left while video 2 is coming from right */
+ public static final int DIRECTION_LEFT_OUT_RIGHT_IN = 1;
+ /** Video 1 is pushed to the top while video 2 is coming from bottom */
+ public static final int DIRECTION_TOP_OUT_BOTTOM_IN = 2;
+ /** Video 1 is pushed to the bottom while video 2 is coming from top */
+ public static final int DIRECTION_BOTTOM_OUT_TOP_IN = 3;
+
+ // The sliding transitions
+ private final int mSlidingDirection;
+
+ /**
+ * An object of this type cannot be instantiated by using the default
+ * constructor
+ */
+ @SuppressWarnings("unused")
+ private TransitionSliding() {
+ this(null, null, null, 0, 0, 0);
+ }
+
+ /**
+ * Constructor
+ *
+ * @param transitionId The transition id
+ * @param afterMediaItem The transition is applied to the end of this
+ * media item
+ * @param beforeMediaItem The transition is applied to the beginning of
+ * this media item
+ * @param durationMs duration of the transition in milliseconds
+ * @param behavior behavior is one of the behavior defined in Transition
+ * class
+ * @param direction direction shall be one of the supported directions like
+ * RIGHT_OUT_LEFT_IN
+ *
+ * @throws IllegalArgumentException if behavior is not supported.
+ */
+ public TransitionSliding(String transitionId, MediaItem afterMediaItem,
+ MediaItem beforeMediaItem, long durationMs, int behavior,
+ int direction) {
+ super(transitionId, afterMediaItem, beforeMediaItem, durationMs, behavior);
+ switch (direction) {
+ case DIRECTION_RIGHT_OUT_LEFT_IN:
+ case DIRECTION_LEFT_OUT_RIGHT_IN:
+ case DIRECTION_TOP_OUT_BOTTOM_IN:
+ case DIRECTION_BOTTOM_OUT_TOP_IN:
+ break;
+
+ default:
+ throw new IllegalArgumentException("Invalid direction");
+ }
+ mSlidingDirection = direction;
+ }
+
+ /**
+ * Get the sliding direction.
+ *
+ * @return The sliding direction
+ */
+ public int getDirection() {
+ return mSlidingDirection;
+ }
+
+ /*
+ * {@inheritDoc}
+ */
+ @Override
+ void generate() {
+ super.generate();
+ }
+}
diff --git a/media/java/android/media/videoeditor/VideoEditor.java b/media/java/android/media/videoeditor/VideoEditor.java
index 37bb661216b3..d081e6e842ae 100755
--- a/media/java/android/media/videoeditor/VideoEditor.java
+++ b/media/java/android/media/videoeditor/VideoEditor.java
@@ -1,564 +1,575 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media.videoeditor;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.CancellationException;
-
-import android.view.SurfaceHolder;
-
-/**
- * This is the interface implemented by classes which provide video editing
- * functionality. The VideoEditor implementation class manages all input and
- * output files. Unless specifically mentioned, methods are blocking. A
- * typical editing session may consist of the following sequence of operations:
- *
- * <ul>
- * <li>Add a set of MediaItems</li>
- * <li>Apply a set of Transitions between MediaItems</li>
- * <li>Add Effects and Overlays to media items</li>
- * <li>Preview the movie at any time</li>
- * <li>Save the VideoEditor implementation class internal state</li>
- * <li>Release the VideoEditor implementation class instance by invoking
- * {@link #release()}
- * </ul>
- * The internal VideoEditor state consists of the following elements:
- * <ul>
- * <li>Ordered & trimmed MediaItems</li>
- * <li>Transition video clips</li>
- * <li>Overlays</li>
- * <li>Effects</li>
- * <li>Audio waveform for the background audio and MediaItems</li>
- * <li>Project thumbnail</li>
- * <li>Last exported movie.</li>
- * <li>Other project specific data such as the current aspect ratio.</li>
- * </ul>
- * {@hide}
- */
-public interface VideoEditor {
- // The file name of the project thumbnail
- public static final String THUMBNAIL_FILENAME = "thumbnail.jpg";
-
- // Use this value instead of the specific end of the storyboard timeline
- // value.
- public final static int DURATION_OF_STORYBOARD = -1;
-
- /**
- * This listener interface is used by the VideoEditor to emit preview
- * progress notifications. This callback should be invoked after the
- * number of frames specified by
- * {@link #startPreview(SurfaceHolder surfaceHolder, long fromMs,
- * int callbackAfterFrameCount, PreviewProgressListener listener)}
- */
- public interface PreviewProgressListener {
- /**
- * This method notifies the listener of the current time position while
- * previewing a project.
- *
- * @param videoEditor The VideoEditor instance
- * @param timeMs The current preview position (expressed in milliseconds
- * since the beginning of the storyboard timeline).
- * @param end true if the end of the timeline was reached
- */
- public void onProgress(VideoEditor videoEditor, long timeMs, boolean end);
- }
-
- /**
- * This listener interface is used by the VideoEditor to emit export status
- * notifications.
- * {@link #export(String filename, ExportProgressListener listener, int height, int bitrate)}
- */
- public interface ExportProgressListener {
- /**
- * This method notifies the listener of the progress status of a export
- * operation.
- *
- * @param videoEditor The VideoEditor instance
- * @param filename The name of the file which is in the process of being
- * exported.
- * @param progress The progress in %. At the beginning of the export, this
- * value is set to 0; at the end, the value is set to 100.
- */
- public void onProgress(VideoEditor videoEditor, String filename, int progress);
- }
-
- /**
- * This listener interface is used by the VideoEditor to emit export status
- * notifications.
- * {@link #generatePreview(MediaProcessingProgressListener listener)}
- */
- public interface MediaProcessingProgressListener {
- // Values used for the action parameter
- public static final int ACTION_ENCODE = 1;
- public static final int ACTION_DECODE = 2;
-
- /**
- * This method notifies the listener of the progress status of
- * processing a media object such as a Transition, AudioTrack or a
- * media image item (when Ken Burns effect is applied).
- * This method may be called maximum 100 times for one operation.
- *
- * @param object The object that is being processed such as a
- * Transition or AudioTrack
- * @param action The type of processing being performed
- * @param progress The progress in %. At the beginning of the operation,
- * this value is set to 0; at the end, the value is set to 100.
- */
- public void onProgress(Object item, int action, int progress);
- }
-
- /**
- * @return The path where the VideoEditor stores all files related to the
- * project
- */
- public String getPath();
-
- /**
- * This method releases all in-memory resources used by the VideoEditor
- * instance. All pending operations such as preview, export and extract
- * audio waveform must be canceled.
- */
- public void release();
-
- /**
- * Persist the current internal state of VideoEditor to the project path.
- * The VideoEditor state may be restored by invoking the
- * {@link VideoEditorFactory#load(String)} method. This method does not
- * release the internal in-memory state of the VideoEditor. To release
- * the in-memory state of the VideoEditor the {@link #release()} method
- * must be invoked.
- *
- * Pending transition generations must be allowed to complete before the
- * state is saved.
- * Pending audio waveform generations must be allowed to complete.
- * Pending export operations must be allowed to continue.
- */
- public void save() throws IOException;
-
- /**
- * Create the output movie based on all media items added and the applied
- * storyboard items. This method can take a long time to execute and is
- * blocking. The application will receive progress notifications via the
- * ExportProgressListener. Specific implementations may not support multiple
- * simultaneous export operations. Note that invoking methods which would
- * change the contents of the output movie throw an IllegalStateException
- * while an export operation is pending.
- *
- * The audio and video codecs are automatically selected by the underlying
- * implementation.
- *
- * @param filename The output file name (including the full path)
- * @param height The height of the output video file. The supported values
- * for height are described in the MediaProperties class, for
- * example: HEIGHT_480. The width will be automatically computed
- * according to the aspect ratio provided by
- * {@link #setAspectRatio(int)}
- * @param bitrate The bitrate of the output video file. This is approximate
- * value for the output movie. Supported bitrate values are
- * described in the MediaProperties class for example:
- * BITRATE_384K
- * @param listener The listener for progress notifications. Use null if
- * export progress notifications are not needed.
- * @throws IllegalArgumentException if height or bitrate are not supported
- * or if the audio or video codecs are not supported
- * @throws IOException if output file cannot be created
- * @throws IllegalStateException if a preview or an export is in progress or
- * if no MediaItem has been added
- * @throws CancellationException if export is canceled by calling
- * {@link #cancelExport()}
- * @throws UnsupportOperationException if multiple simultaneous export() are
- * not allowed
- */
- public void export(String filename, int height, int bitrate, ExportProgressListener listener)
- throws IOException;
-
- /**
- * Create the output movie based on all media items added and the applied
- * storyboard items. This method can take a long time to execute and is
- * blocking. The application will receive progress notifications via the
- * ExportProgressListener. Specific implementations may not support multiple
- * simultaneous export operations. Note that invoking methods which would
- * change the contents of the output movie throw an IllegalStateException
- * while an export operation is pending.
- *
- * @param filename The output file name (including the full path)
- * @param height The height of the output video file. The supported values
- * for height are described in the MediaProperties class, for
- * example: HEIGHT_480. The width will be automatically computed
- * according to the aspect ratio provided by
- * {@link #setAspectRatio(int)}
- * @param bitrate The bitrate of the output video file. This is approximate
- * value for the output movie. Supported bitrate values are
- * described in the MediaProperties class for example:
- * BITRATE_384K
- * @param audioCodec The audio codec to be used for the export. The audio
- * codec values are defined in the MediaProperties class (e.g.
- * ACODEC_AAC_LC). Note that not all audio codec types are
- * supported for export purposes.
- * @param videoCodec The video codec to be used for the export. The video
- * codec values are defined in the MediaProperties class (e.g.
- * VCODEC_H264BP). Note that not all video codec types are
- * supported for export purposes.
- * @param listener The listener for progress notifications. Use null if
- * export progress notifications are not needed.
- * @throws IllegalArgumentException if height or bitrate are not supported
- * or if the audio or video codecs are not supported
- * @throws IOException if output file cannot be created
- * @throws IllegalStateException if a preview or an export is in progress or
- * if no MediaItem has been added
- * @throws CancellationException if export is canceled by calling
- * {@link #cancelExport()}
- * @throws UnsupportOperationException if multiple simultaneous export() are
- * not allowed
- */
- public void export(String filename, int height, int bitrate, int audioCodec, int videoCodec,
- ExportProgressListener listener) throws IOException;
-
- /**
- * Cancel the running export operation. This method blocks until the
- * export is canceled and the exported file (if any) is deleted. If the
- * export completed by the time this method is invoked, the export file
- * will be deleted.
- *
- * @param filename The filename which identifies the export operation to be
- * canceled.
- **/
- public void cancelExport(String filename);
-
- /**
- * Add a media item at the end of the storyboard.
- *
- * @param mediaItem The media item object to add
- * @throws IllegalStateException if a preview or an export is in progress or
- * if the media item id is not unique across all the media items
- * added.
- */
- public void addMediaItem(MediaItem mediaItem);
-
- /**
- * Insert a media item after the media item with the specified id.
- *
- * @param mediaItem The media item object to insert
- * @param afterMediaItemId Insert the mediaItem after the media item
- * identified by this id. If this parameter is null, the media
- * item is inserted at the beginning of the timeline.
- *
- * @throws IllegalStateException if a preview or an export is in progress
- * @throws IllegalArgumentException if media item with the specified id does
- * not exist (null is a valid value) or if the media item id is
- * not unique across all the media items added.
- */
- public void insertMediaItem(MediaItem mediaItem, String afterMediaItemId);
-
- /**
- * Move a media item after the media item with the specified id.
- *
- * Note: The project thumbnail is regenerated if the media item is or
- * becomes the first media item in the storyboard timeline.
- *
- * @param mediaItemId The id of the media item to move
- * @param afterMediaItemId Move the media item identified by mediaItemId after
- * the media item identified by this parameter. If this parameter
- * is null, the media item is moved at the beginning of the
- * timeline.
- *
- * @throws IllegalStateException if a preview or an export is in progress
- * @throws IllegalArgumentException if one of media item ids is invalid
- * (null is a valid value)
- */
- public void moveMediaItem(String mediaItemId, String afterMediaItemId);
-
- /**
- * Remove the media item with the specified id. If there are transitions
- * before or after this media item, then this/these transition(s) are
- * removed from the storyboard. If the extraction of the audio waveform is
- * in progress, the extraction is canceled and the file is deleted.
- *
- * Effects and overlays associated with the media item will also be
- * removed.
- *
- * Note: The project thumbnail is regenerated if the media item which
- * is removed is the first media item in the storyboard or if the
- * media item is the only one in the storyboard. If the
- * media item is the only one in the storyboard, the project thumbnail
- * will be set to a black frame and the aspect ratio will revert to the
- * default aspect ratio, and this method is equivalent to
- * removeAllMediaItems() in this case.
- *
- * @param mediaItemId The unique id of the media item to be removed
- *
- * @return The media item that was removed
- *
- * @throws IllegalStateException if a preview or an export is in progress
- * @throws IllegalArgumentException if media item with the specified id
- * does not exist
- */
- public MediaItem removeMediaItem(String mediaItemId);
-
- /**
- * Remove all media items in the storyboard. All effects, overlays and all
- * transitions are also removed.
- *
- * Note: The project thumbnail will be set to a black frame and the aspect
- * ratio will revert to the default aspect ratio.
- *
- * @throws IllegalStateException if a preview or an export is in progress
- */
- public void removeAllMediaItems();
-
- /**
- * Get the list of media items in the order in which it they appear in the
- * storyboard timeline.
- *
- * Note that if any media item source files are no longer
- * accessible, this method will still provide the full list of media items.
- *
- * @return The list of media items. If no media item exist an empty list
- * will be returned.
- */
- public List<MediaItem> getAllMediaItems();
-
- /**
- * Find the media item with the specified id
- *
- * @param mediaItemId The media item id
- *
- * @return The media item with the specified id (null if it does not exist)
- */
- public MediaItem getMediaItem(String mediaItemId);
-
- /**
- * Add a transition between the media items specified by the transition.
- * If a transition existed at the same position it is invalidated and then
- * the transition is replaced. Note that the new transition video clip is
- * not automatically generated by this method. The
- * {@link Transition#generate()} method must be invoked to generate
- * the transition video clip.
- *
- * Note that the TransitionAtEnd and TransitionAtStart are special kinds
- * that can not be applied between two media items.
- *
- * A crossfade audio transition will be automatically applied regardless of
- * the video transition.
- *
- * @param transition The transition to apply
- *
- * @throws IllegalStateException if a preview or an export is in progress
- * @throws IllegalArgumentException if the transition duration is larger
- * than the smallest duration of the two media item files or
- * if the two media items specified in the transition are not
- * adjacent
- */
- public void addTransition(Transition transition);
-
- /**
- * Remove the transition with the specified id.
- *
- * @param transitionId The id of the transition to be removed
- *
- * @return The transition that was removed
- * @throws IllegalStateException if a preview or an export is in progress
- * @throws IllegalArgumentException if transition with the specified id does
- * not exist
- */
- public Transition removeTransition(String transitionId);
-
- /**
- * Get the list of transitions
- *
- * @return The list of transitions. If no transitions exist an empty list
- * will be returned.
- */
- public List<Transition> getAllTransitions();
-
- /**
- * Find the transition with the specified transition id.
- *
- * @param transitionId The transition id
- *
- * @return The transition
- */
- public Transition getTransition(String transitionId);
-
- /**
- * Add the specified AudioTrack to the storyboard. Note: Specific
- * implementations may support a limited number of audio tracks (e.g. only
- * one audio track)
- *
- * @param audioTrack The AudioTrack to add
- * @throws UnsupportedOperationException if the implementation supports a
- * limited number of audio tracks.
- * @throws IllegalArgumentException if media item is not unique across all
- * the audio tracks already added.
- */
- public void addAudioTrack(AudioTrack audioTrack);
-
- /**
- * Insert an audio track after the audio track with the specified id. Use
- * addAudioTrack to add an audio track at the end of the storyboard
- * timeline.
- *
- * @param audioTrack The audio track object to insert
- * @param afterAudioTrackId Insert the audio track after the audio track
- * identified by this parameter. If this parameter is null the
- * audio track is added at the beginning of the timeline.
- * @throws IllegalStateException if a preview or an export is in progress
- * @throws IllegalArgumentException if media item with the specified id does
- * not exist (null is a valid value). if media item is not
- * unique across all the audio tracks already added.
- * @throws UnsupportedOperationException if the implementation supports a
- * limited number of audio tracks
- */
- public void insertAudioTrack(AudioTrack audioTrack, String afterAudioTrackId);
-
- /**
- * Move an AudioTrack after the AudioTrack with the specified id.
- *
- * @param audioTrackId The id of the AudioTrack to move
- * @param afterAudioTrackId Move the AudioTrack identified by audioTrackId
- * after the AudioTrack identified by this parameter. If this
- * parameter is null the audio track is added at the beginning of
- * the timeline.
- * @throws IllegalStateException if a preview or an export is in progress
- * @throws IllegalArgumentException if one of media item ids is invalid
- * (null is a valid value)
- */
- public void moveAudioTrack(String audioTrackId, String afterAudioTrackId);
-
- /**
- * Remove the audio track with the specified id. If the extraction of the
- * audio waveform is in progress, the extraction is canceled and the file is
- * deleted.
- *
- * @param audioTrackId The id of the audio track to be removed
- *
- * @return The audio track that was removed
- * @throws IllegalStateException if a preview or an export is in progress
- */
- public AudioTrack removeAudioTrack(String audioTrackId);
-
- /**
- * Get the list of AudioTracks in order in which they appear in the storyboard.
- *
- * Note that if any AudioTrack source files are not accessible anymore,
- * this method will still provide the full list of audio tracks.
- *
- * @return The list of AudioTracks. If no audio tracks exist an empty list
- * will be returned.
- */
- public List<AudioTrack> getAllAudioTracks();
-
- /**
- * Find the AudioTrack with the specified id
- *
- * @param audioTrackId The AudioTrack id
- *
- * @return The AudioTrack with the specified id (null if it does not exist)
- */
- public AudioTrack getAudioTrack(String audioTrackId);
-
- /**
- * Set the aspect ratio used in the preview and the export movie.
- *
- * The default aspect ratio is ASPECTRATIO_16_9 (16:9).
- *
- * @param aspectRatio to apply. If aspectRatio is the same as the current
- * aspect ratio, then this function just returns. The supported
- * aspect ratio are defined in the MediaProperties class for
- * example: ASPECTRATIO_16_9
- *
- * @throws IllegalStateException if a preview or an export is in progress
- * @throws IllegalArgumentException if aspect ratio is not supported
- */
- public void setAspectRatio(int aspectRatio);
-
- /**
- * Get current aspect ratio.
- *
- * @return The aspect ratio as described in MediaProperties
- */
- public int getAspectRatio();
-
- /**
- * Get the preview (and output movie) duration.
- *
- * @return The duration of the preview (and output movie)
- */
- public long getDuration();
-
- /**
- * Render a frame according to the preview aspect ratio and activating all
- * storyboard items relative to the specified time.
- *
- * @param surfaceHolder SurfaceHolder used by the application
- * @param timeMs time corresponding to the frame to display
- *
- * @return The accurate time stamp of the frame that is rendered
- * .
- * @throws IllegalStateException if a preview or an export is already
- * in progress
- * @throws IllegalArgumentException if time is negative or beyond the
- * preview duration
- */
- public long renderPreviewFrame(SurfaceHolder surfaceHolder, long timeMs);
-
- /**
- * This method must be called after the aspect ratio of the project changes
- * and before startPreview is called. Note that this method may block for
- * an extensive period of time.
- *
- * @param listener The listener interface which will be used to notify
- * the caller of the progress of each storyboard item being processed.
- */
- public void generatePreview(MediaProcessingProgressListener listener);
-
- /**
- * Start the preview of all the storyboard items applied on all MediaItems
- * This method does not block (does not wait for the preview to complete).
- * The PreviewProgressListener allows to track the progress at the time
- * interval determined by the callbackAfterFrameCount parameter. The
- * SurfaceHolder has to be created and ready for use before calling this
- * method. The method is a no-op if there are no MediaItems in the
- * storyboard.
- *
- * @param surfaceHolder SurfaceHolder where the preview is rendered.
- * @param fromMs The time (relative to the timeline) at which the preview
- * will start
- * @param toMs The time (relative to the timeline) at which the preview will
- * stop. Use -1 to play to the end of the timeline
- * @param loop true if the preview should be looped once it reaches the end
- * @param callbackAfterFrameCount The listener interface should be invoked
- * after the number of frames specified by this parameter.
- * @param listener The listener which will be notified of the preview
- * progress
- * @throws IllegalArgumentException if fromMs is beyond the preview duration
- * @throws IllegalStateException if a preview or an export is already in
- * progress
- */
- public void startPreview(SurfaceHolder surfaceHolder, long fromMs, long toMs, boolean loop,
- int callbackAfterFrameCount, PreviewProgressListener listener);
-
- /**
- * Stop the current preview. This method blocks until ongoing preview is
- * stopped. Ignored if there is no preview running.
- *
- * @return The accurate current time when stop is effective expressed in
- * milliseconds
- */
- public long stopPreview();
-}
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.videoeditor;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.CancellationException;
+
+import android.view.SurfaceHolder;
+
+/**
+ * This is the interface implemented by classes which provide video editing
+ * functionality. The VideoEditor implementation class manages all input and
+ * output files. Unless specifically mentioned, methods are blocking. A typical
+ * editing session may consist of the following sequence of operations:
+ *
+ * <ul>
+ * <li>Add a set of MediaItems</li>
+ * <li>Apply a set of Transitions between MediaItems</li>
+ * <li>Add Effects and Overlays to media items</li>
+ * <li>Preview the movie at any time</li>
+ * <li>Save the VideoEditor implementation class internal state</li>
+ * <li>Release the VideoEditor implementation class instance by invoking
+ * {@link #release()}
+ * </ul>
+ * The internal VideoEditor state consists of the following elements:
+ * <ul>
+ * <li>Ordered & trimmed MediaItems</li>
+ * <li>Transition video clips</li>
+ * <li>Overlays</li>
+ * <li>Effects</li>
+ * <li>Audio waveform for the background audio and MediaItems</li>
+ * <li>Project thumbnail</li>
+ * <li>Last exported movie.</li>
+ * <li>Other project specific data such as the current aspect ratio.</li>
+ * </ul>
+ * {@hide}
+ */
+public interface VideoEditor {
+ /**
+ * The file name of the project thumbnail
+ */
+ public static final String THUMBNAIL_FILENAME = "thumbnail.jpg";
+
+ /**
+ * Use this value instead of the specific end of the storyboard timeline
+ * value.
+ */
+ public final static int DURATION_OF_STORYBOARD = -1;
+
+ /**
+ * This listener interface is used by the VideoEditor to emit preview
+ * progress notifications. This callback should be invoked after the number
+ * of frames specified by
+ * {@link #startPreview(SurfaceHolder surfaceHolder, long fromMs,
+ * int callbackAfterFrameCount, PreviewProgressListener listener)}
+ */
+ public interface PreviewProgressListener {
+ /**
+ * This method notifies the listener of the current time position while
+ * previewing a project.
+ *
+ * @param videoEditor The VideoEditor instance
+ * @param timeMs The current preview position (expressed in milliseconds
+ * since the beginning of the storyboard timeline).
+ * @param end true if the end of the timeline was reached
+ */
+ public void onProgress(VideoEditor videoEditor, long timeMs, boolean end);
+ }
+
+ /**
+ * This listener interface is used by the VideoEditor to emit export status
+ * notifications.
+ * {@link #export(String filename, ExportProgressListener listener,
+ * int height, int bitrate)}
+ */
+ public interface ExportProgressListener {
+ /**
+ * This method notifies the listener of the progress status of a export
+ * operation.
+ *
+ * @param videoEditor The VideoEditor instance
+ * @param filename The name of the file which is in the process of being
+ * exported.
+ * @param progress The progress in %. At the beginning of the export,
+ * this value is set to 0; at the end, the value is set to 100.
+ */
+ public void onProgress(VideoEditor videoEditor, String filename,
+ int progress);
+ }
+
+ public interface MediaProcessingProgressListener {
+ /**
+ * Values used for the action parameter
+ */
+ public static final int ACTION_ENCODE = 1;
+ public static final int ACTION_DECODE = 2;
+
+ /**
+ * This method notifies the listener of the progress status of
+ * processing a media object such as a Transition, AudioTrack & Kenburns
+ * This method may be called maximum 100 times for one operation.
+ *
+ * @param object The object that is being processed such as a Transition
+ * or AudioTrack
+ * @param action The type of processing being performed
+ * @param progress The progress in %. At the beginning of the operation,
+ * this value is set to 0; at the end, the value is set to 100.
+ */
+ public void onProgress(Object item, int action, int progress);
+ }
+
+ /**
+ * @return The path where the VideoEditor stores all files related to the
+ * project
+ */
+ public String getPath();
+
+ /**
+ * This method releases all in-memory resources used by the VideoEditor
+ * instance. All pending operations such as preview, export and extract
+ * audio waveform must be canceled.
+ */
+ public void release();
+
+ /**
+ * Persist the current internal state of VideoEditor to the project path.
+ * The VideoEditor state may be restored by invoking the
+ * {@link VideoEditorFactory#load(String)} method. This method does not
+ * release the internal in-memory state of the VideoEditor. To release
+ * the in-memory state of the VideoEditor the {@link #release()} method
+ * must be invoked.
+ *
+ * Pending transition generations must be allowed to complete before the
+ * state is saved.
+ * Pending audio waveform generations must be allowed to complete.
+ * Pending export operations must be allowed to continue.
+ *
+ * @throws IOException if the internal state cannot be saved to project file
+ */
+ public void save() throws IOException;
+
+ /**
+ * Create the output movie based on all media items added and the applied
+ * storyboard items. This method can take a long time to execute and is
+ * blocking. The application will receive progress notifications via the
+ * ExportProgressListener. Specific implementations may not support multiple
+ * simultaneous export operations. Note that invoking methods which would
+ * change the contents of the output movie throw an IllegalStateException
+ * while an export operation is pending.
+ *
+ * The audio and video codecs are automatically selected by the underlying
+ * implementation.
+ *
+ * @param filename The output file name (including the full path)
+ * @param height The height of the output video file. The supported values
+ * for height are described in the MediaProperties class, for
+ * example: HEIGHT_480. The width will be automatically computed
+ * according to the aspect ratio provided by
+ * {@link #setAspectRatio(int)}
+ * @param bitrate The bitrate of the output video file. This is approximate
+ * value for the output movie. Supported bitrate values are
+ * described in the MediaProperties class for example: BITRATE_384K
+ * @param listener The listener for progress notifications. Use null if
+ * export progress notifications are not needed.
+ *
+ * @throws IllegalArgumentException if height or bitrate are not supported
+ * or if the audio or video codecs are not supported
+ * @throws IOException if output file cannot be created
+ * @throws IllegalStateException if a preview or an export is in progress or
+ * if no MediaItem has been added
+ * @throws CancellationException if export is canceled by calling
+ * {@link #cancelExport()}
+ * @throws UnsupportOperationException if multiple simultaneous export() are
+ * not allowed
+ */
+ public void export(String filename, int height, int bitrate,
+ ExportProgressListener listener)
+ throws IOException;
+
+ /**
+ * Create the output movie based on all media items added and the applied
+ * storyboard items. This method can take a long time to execute and is
+ * blocking. The application will receive progress notifications via the
+ * ExportProgressListener. Specific implementations may not support multiple
+ * simultaneous export operations. Note that invoking methods which would
+ * change the contents of the output movie throw an IllegalStateException
+ * while an export operation is pending.
+ *
+ * @param filename The output file name (including the full path)
+ * @param height The height of the output video file. The supported values
+ * for height are described in the MediaProperties class, for
+ * example: HEIGHT_480. The width will be automatically computed
+ * according to the aspect ratio provided by
+ * {@link #setAspectRatio(int)}
+ * @param bitrate The bitrate of the output video file. This is approximate
+ * value for the output movie. Supported bitrate values are
+ * described in the MediaProperties class for example: BITRATE_384K
+ * @param audioCodec The audio codec to be used for the export. The audio
+ * codec values are defined in the MediaProperties class (e.g.
+ * ACODEC_AAC_LC). Note that not all audio codec types are
+ * supported for export purposes.
+ * @param videoCodec The video codec to be used for the export. The video
+ * codec values are defined in the MediaProperties class (e.g.
+ * VCODEC_H264BP). Note that not all video codec types are
+ * supported for export purposes.
+ * @param listener The listener for progress notifications. Use null if
+ * export progress notifications are not needed.
+ *
+ * @throws IllegalArgumentException if height or bitrate are not supported
+ * or if the audio or video codecs are not supported
+ * @throws IOException if output file cannot be created
+ * @throws IllegalStateException if a preview or an export is in progress or
+ * if no MediaItem has been added
+ * @throws CancellationException if export is cancelled by calling
+ * {@link #cancelExport()}
+ * @throws UnsupportOperationException if multiple simultaneous export() are
+ * not allowed
+ */
+ public void export(String filename, int height, int bitrate, int audioCodec,
+ int videoCodec, ExportProgressListener listener)
+ throws IOException;
+
+ /**
+ * Cancel the running export operation. This method blocks until the export
+ * is cancelled and the exported file (if any) is deleted. If the export
+ * completed by the time this method is invoked, the export file will be
+ * deleted.
+ *
+ * @param filename The filename which identifies the export operation to be
+ * canceled.
+ **/
+ public void cancelExport(String filename);
+
+ /**
+ * Add a media item at the end of the storyboard.
+ *
+ * @param mediaItem The media item object to add
+ *
+ * @throws IllegalStateException if a preview or an export is in progress or
+ * if the media item id is not unique across all the media items
+ * added.
+ */
+ public void addMediaItem(MediaItem mediaItem);
+
+ /**
+ * Insert a media item after the media item with the specified id.
+ *
+ * @param mediaItem The media item object to insert
+ * @param afterMediaItemId Insert the mediaItem after the media item
+ * identified by this id. If this parameter is null, the media
+ * item is inserted at the beginning of the timeline.
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if media item with the specified id does
+ * not exist (null is a valid value) or if the media item id is
+ * not unique across all the media items added.
+ */
+ public void insertMediaItem(MediaItem mediaItem, String afterMediaItemId);
+
+ /**
+ * Move a media item after the media item with the specified id.
+ *
+ * Note: The project thumbnail is regenerated if the media item is or
+ * becomes the first media item in the storyboard timeline.
+ *
+ * @param mediaItemId The id of the media item to move
+ * @param afterMediaItemId Move the media item identified by mediaItemId
+ * after the media item identified by this parameter. If this
+ * parameter is null, the media item is moved at the beginning of
+ * the timeline.
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if one of media item ids is invalid
+ * (null is a valid value)
+ */
+ public void moveMediaItem(String mediaItemId, String afterMediaItemId);
+
+ /**
+ * Remove the media item with the specified id. If there are transitions
+ * before or after this media item, then this/these transition(s) are
+ * removed from the storyboard. If the extraction of the audio waveform is
+ * in progress, the extraction is canceled and the file is deleted.
+ *
+ * Effects and overlays associated with the media item will also be removed.
+ *
+ * Note: The project thumbnail is regenerated if the media item which is
+ * removed is the first media item in the storyboard or if the media item is
+ * the only one in the storyboard. If the media item is the only one in the
+ * storyboard, the project thumbnail will be set to a black frame and the
+ * aspect ratio will revert to the default aspect ratio and this method is
+ * equivalent to removeAllMediaItems() in this case.
+ *
+ * @param mediaItemId The unique id of the media item to be removed
+ *
+ * @return The media item that was removed
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if media item with the specified id does
+ * not exist
+ */
+ public MediaItem removeMediaItem(String mediaItemId);
+
+ /**
+ * Remove all media items in the storyboard. All effects, overlays and all
+ * transitions are also removed.
+ *
+ * Note: The project thumbnail will be set to a black frame and the aspect
+ * ratio will revert to the default aspect ratio.
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ */
+ public void removeAllMediaItems();
+
+ /**
+ * Get the list of media items in the order in which it they appear in the
+ * storyboard timeline.
+ *
+ * Note that if any media item source files are no longer
+ * accessible, this method will still provide the full list of media items.
+ *
+ * @return The list of media items. If no media item exist an empty list
+ * will be returned.
+ */
+ public List<MediaItem> getAllMediaItems();
+
+ /**
+ * Find the media item with the specified id
+ *
+ * @param mediaItemId The media item id
+ *
+ * @return The media item with the specified id (null if it does not exist)
+ */
+ public MediaItem getMediaItem(String mediaItemId);
+
+ /**
+ * Add a transition between the media items specified by the transition.
+ * If a transition existed at the same position it is invalidated and then
+ * the transition is replaced. Note that the new transition video clip is
+ * not automatically generated by this method. The
+ * {@link Transition#generate()} method must be invoked to generate
+ * the transition video clip.
+ *
+ * Note that the TransitionAtEnd and TransitionAtStart are special kinds
+ * that can not be applied between two media items.
+ *
+ * A crossfade audio transition will be automatically applied regardless of
+ * the video transition.
+ *
+ * @param transition The transition to apply
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if the transition duration is larger
+ * than the smallest duration of the two media item files or if
+ * the two media items specified in the transition are not
+ * adjacent
+ */
+ public void addTransition(Transition transition);
+
+ /**
+ * Remove the transition with the specified id.
+ *
+ * @param transitionId The id of the transition to be removed
+ *
+ * @return The transition that was removed
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if transition with the specified id does
+ * not exist
+ */
+ public Transition removeTransition(String transitionId);
+
+ /**
+ * Get the list of transitions
+ *
+ * @return The list of transitions. If no transitions exist an empty list
+ * will be returned.
+ */
+ public List<Transition> getAllTransitions();
+
+ /**
+ * Find the transition with the specified transition id.
+ *
+ * @param transitionId The transition id
+ *
+ * @return The transition
+ */
+ public Transition getTransition(String transitionId);
+
+ /**
+ * Add the specified AudioTrack to the storyboard. Note: Specific
+ * implementations may support a limited number of audio tracks (e.g. only
+ * one audio track)
+ *
+ * @param audioTrack The AudioTrack to add
+ *
+ * @throws UnsupportedOperationException if the implementation supports a
+ * limited number of audio tracks.
+ * @throws IllegalArgumentException if media item is not unique across all
+ * the audio tracks already added.
+ */
+ public void addAudioTrack(AudioTrack audioTrack);
+
+ /**
+ * Insert an audio track after the audio track with the specified id. Use
+ * addAudioTrack to add an audio track at the end of the storyboard
+ * timeline.
+ *
+ * @param audioTrack The audio track object to insert
+ * @param afterAudioTrackId Insert the audio track after the audio track
+ * identified by this parameter. If this parameter is null the
+ * audio track is added at the beginning of the timeline.
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if media item with the specified id does
+ * not exist (null is a valid value). if media item is not unique
+ * across all the audio tracks already added.
+ * @throws UnsupportedOperationException if the implementation supports a
+ * limited number of audio tracks
+ */
+ public void insertAudioTrack(AudioTrack audioTrack, String afterAudioTrackId);
+
+ /**
+ * Move an AudioTrack after the AudioTrack with the specified id.
+ *
+ * @param audioTrackId The id of the AudioTrack to move
+ * @param afterAudioTrackId Move the AudioTrack identified by audioTrackId
+ * after the AudioTrack identified by this parameter. If this
+ * parameter is null the audio track is added at the beginning of
+ * the timeline.
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if one of media item ids is invalid
+ * (null is a valid value)
+ */
+ public void moveAudioTrack(String audioTrackId, String afterAudioTrackId);
+
+ /**
+ * Remove the audio track with the specified id. If the extraction of the
+ * audio waveform is in progress, the extraction is canceled and the file is
+ * deleted.
+ *
+ * @param audioTrackId The id of the audio track to be removed
+ *
+ * @return The audio track that was removed
+ * @throws IllegalStateException if a preview or an export is in progress
+ */
+ public AudioTrack removeAudioTrack(String audioTrackId);
+
+ /**
+ * Get the list of AudioTracks in order in which they appear in the
+ * storyboard.
+ *
+ * Note that if any AudioTrack source files are not accessible anymore,
+ * this method will still provide the full list of audio tracks.
+ *
+ * @return The list of AudioTracks. If no audio tracks exist an empty list
+ * will be returned.
+ */
+ public List<AudioTrack> getAllAudioTracks();
+
+ /**
+ * Find the AudioTrack with the specified id
+ *
+ * @param audioTrackId The AudioTrack id
+ *
+ * @return The AudioTrack with the specified id (null if it does not exist)
+ */
+ public AudioTrack getAudioTrack(String audioTrackId);
+
+ /**
+ * Set the aspect ratio used in the preview and the export movie.
+ *
+ * The default aspect ratio is ASPECTRATIO_16_9 (16:9).
+ *
+ * @param aspectRatio to apply. If aspectRatio is the same as the current
+ * aspect ratio, then this function just returns. The supported
+ * aspect ratio are defined in the MediaProperties class for
+ * example: ASPECTRATIO_16_9
+ *
+ * @throws IllegalStateException if a preview or an export is in progress
+ * @throws IllegalArgumentException if aspect ratio is not supported
+ */
+ public void setAspectRatio(int aspectRatio);
+
+ /**
+ * Get current aspect ratio.
+ *
+ * @return The aspect ratio as described in MediaProperties
+ */
+ public int getAspectRatio();
+
+ /**
+ * Get the preview (and output movie) duration.
+ *
+ * @return The duration of the preview (and output movie)
+ */
+ public long getDuration();
+
+ /**
+ * Render a frame according to the preview aspect ratio and activating all
+ * storyboard items relative to the specified time.
+ *
+ * @param surfaceHolder SurfaceHolder used by the application
+ * @param timeMs time corresponding to the frame to display
+ *
+ * @return The accurate time stamp of the frame that is rendered.
+ *
+ * @throws IllegalStateException if a preview or an export is already in
+ * progress
+ * @throws IllegalArgumentException if time is negative or beyond the
+ * preview duration
+ */
+ public long renderPreviewFrame(SurfaceHolder surfaceHolder, long timeMs);
+
+ /**
+ * This method must be called after any changes made to the storyboard
+ * and before startPreview is called. Note that this method may block for an
+ * extensive period of time.
+ */
+ public void generatePreview(MediaProcessingProgressListener listener);
+
+
+ /**
+ * Start the preview of all the storyboard items applied on all MediaItems
+ * This method does not block (does not wait for the preview to complete).
+ * The PreviewProgressListener allows to track the progress at the time
+ * interval determined by the callbackAfterFrameCount parameter. The
+ * SurfaceHolder has to be created and ready for use before calling this
+ * method. The method is a no-op if there are no MediaItems in the
+ * storyboard.
+ *
+ * @param surfaceHolder SurfaceHolder where the preview is rendered.
+ * @param fromMs The time (relative to the timeline) at which the preview
+ * will start
+ * @param toMs The time (relative to the timeline) at which the preview will
+ * stop. Use -1 to play to the end of the timeline
+ * @param loop true if the preview should be looped once it reaches the end
+ * @param callbackAfterFrameCount The listener interface should be invoked
+ * after the number of frames specified by this parameter.
+ * @param listener The listener which will be notified of the preview
+ * progress
+ *
+ * @throws IllegalArgumentException if fromMs is beyond the preview duration
+ * @throws IllegalStateException if a preview or an export is already in
+ * progress
+ */
+ public void startPreview(SurfaceHolder surfaceHolder, long fromMs, long toMs,
+ boolean loop,int callbackAfterFrameCount,
+ PreviewProgressListener listener);
+
+ /**
+ * Stop the current preview. This method blocks until ongoing preview is
+ * stopped. Ignored if there is no preview running.
+ *
+ * @return The accurate current time when stop is effective expressed in
+ * milliseconds
+ */
+ public long stopPreview();
+}
diff --git a/media/java/android/media/videoeditor/VideoEditorFactory.java b/media/java/android/media/videoeditor/VideoEditorFactory.java
index 85b26663d566..85c329fe4fdb 100755
--- a/media/java/android/media/videoeditor/VideoEditorFactory.java
+++ b/media/java/android/media/videoeditor/VideoEditorFactory.java
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+
package android.media.videoeditor;
import java.io.File;
@@ -22,7 +23,6 @@ import java.io.IOException;
import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
-
/**
* The VideoEditorFactory class must be used to instantiate VideoEditor objects
* by creating a new project {@link #create(String)} or by loading an
@@ -43,14 +43,19 @@ public class VideoEditorFactory {
* not be accessed in read/write mode
*/
public static VideoEditor create(String projectPath) throws IOException {
- // If the project path does not exist create it
+ /*
+ * If the project path does not exist create it
+ */
final File dir = new File(projectPath);
if (!dir.exists()) {
if (!dir.mkdirs()) {
- throw new FileNotFoundException("Cannot create project path: " + projectPath);
+ throw new FileNotFoundException("Cannot create project path: "
+ + projectPath);
} else {
- // Create the file which hides the media files
- // from the media scanner
+ /*
+ * Create the file which hides the media files
+ * from the media scanner
+ */
if (!new File(dir, ".nomedia").createNewFile()) {
throw new FileNotFoundException("Cannot create file .nomedia");
}
@@ -69,7 +74,8 @@ public class VideoEditorFactory {
* are stored. When a project is deleted the application is
* responsible for deleting the path and its contents.
* @param generatePreview if set to true the
- * {@link MediaEditor#generatePreview(MediaProcessingProgressListener listener)}
+ * {@link MediaEditor#generatePreview(MediaProcessingProgressListener
+ * listener)}
* will be called internally to generate any needed transitions.
*
* @return The VideoEditor instance
@@ -79,7 +85,7 @@ public class VideoEditorFactory {
* media files cannot be retrieved
*/
public static VideoEditor load(String projectPath, boolean generatePreview)
- throws IOException {
+ throws IOException {
final VideoEditor videoEditor = new VideoEditorImpl(projectPath);
if (generatePreview) {
videoEditor.generatePreview(null);
diff --git a/media/java/android/media/videoeditor/VideoEditorImpl.java b/media/java/android/media/videoeditor/VideoEditorImpl.java
index 1a145e661f54..71c26249020d 100644..100755
--- a/media/java/android/media/videoeditor/VideoEditorImpl.java
+++ b/media/java/android/media/videoeditor/VideoEditorImpl.java
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+
package android.media.videoeditor;
import java.io.File;
@@ -26,6 +27,7 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.Semaphore;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
@@ -34,19 +36,32 @@ import org.xmlpull.v1.XmlSerializer;
import android.graphics.Rect;
import android.util.Log;
import android.util.Xml;
+import android.view.Surface;
import android.view.SurfaceHolder;
+import android.graphics.Bitmap;
+
/**
* The VideoEditor implementation {@hide}
*/
public class VideoEditorImpl implements VideoEditor {
- // Logging
+ /*
+ * Logging
+ */
private static final String TAG = "VideoEditorImpl";
- // The project filename
+ /*
+ * The project filename
+ */
private static final String PROJECT_FILENAME = "videoeditor.xml";
+ /*
+ * Semaphore to control preview calls
+ */
+ final Semaphore mPreviewSemaphore = new Semaphore(1, true);
- // XML tags
+ /*
+ * XML tags
+ */
private static final String TAG_PROJECT = "project";
private static final String TAG_MEDIA_ITEMS = "media_items";
private static final String TAG_MEDIA_ITEM = "media_item";
@@ -54,7 +69,8 @@ public class VideoEditorImpl implements VideoEditor {
private static final String TAG_TRANSITION = "transition";
private static final String TAG_OVERLAYS = "overlays";
private static final String TAG_OVERLAY = "overlay";
- private static final String TAG_OVERLAY_USER_ATTRIBUTES = "overlay_user_attributes";
+ private static final String TAG_OVERLAY_USER_ATTRIBUTES =
+ "overlay_user_attributes";
private static final String TAG_EFFECTS = "effects";
private static final String TAG_EFFECT = "effect";
private static final String TAG_AUDIO_TRACKS = "audio_tracks";
@@ -62,9 +78,11 @@ public class VideoEditorImpl implements VideoEditor {
private static final String ATTR_ID = "id";
private static final String ATTR_FILENAME = "filename";
- private static final String ATTR_AUDIO_WAVEFORM_FILENAME = "wavefoem";
+ private static final String ATTR_AUDIO_WAVEFORM_FILENAME = "waveform";
private static final String ATTR_RENDERING_MODE = "rendering_mode";
private static final String ATTR_ASPECT_RATIO = "aspect_ratio";
+ private static final String ATTR_PREVIEW_PREPARE = "preview_prepare_invalid";
+ private static final String ATTR_REGENERATE_PCM = "regeneratePCMFlag";
private static final String ATTR_TYPE = "type";
private static final String ATTR_DURATION = "duration";
private static final String ATTR_START_TIME = "start_time";
@@ -80,156 +98,66 @@ public class VideoEditorImpl implements VideoEditor {
private static final String ATTR_AFTER_MEDIA_ITEM_ID = "after_media_item";
private static final String ATTR_COLOR_EFFECT_TYPE = "color_type";
private static final String ATTR_COLOR_EFFECT_VALUE = "color_value";
- private static final String ATTR_START_RECT_L = "start_l";
- private static final String ATTR_START_RECT_T = "start_t";
- private static final String ATTR_START_RECT_R = "start_r";
- private static final String ATTR_START_RECT_B = "start_b";
- private static final String ATTR_END_RECT_L = "end_l";
- private static final String ATTR_END_RECT_T = "end_t";
- private static final String ATTR_END_RECT_R = "end_r";
- private static final String ATTR_END_RECT_B = "end_b";
+ private static final String ATTR_START_RECT_LEFT = "start_l";
+ private static final String ATTR_START_RECT_TOP = "start_t";
+ private static final String ATTR_START_RECT_RIGHT = "start_r";
+ private static final String ATTR_START_RECT_BOTTOM = "start_b";
+ private static final String ATTR_END_RECT_LEFT = "end_l";
+ private static final String ATTR_END_RECT_TOP = "end_t";
+ private static final String ATTR_END_RECT_RIGHT = "end_r";
+ private static final String ATTR_END_RECT_BOTTOM = "end_b";
private static final String ATTR_LOOP = "loop";
private static final String ATTR_MUTED = "muted";
private static final String ATTR_DUCK_ENABLED = "ducking_enabled";
private static final String ATTR_DUCK_THRESHOLD = "ducking_threshold";
private static final String ATTR_DUCKED_TRACK_VOLUME = "ducking_volume";
+ private static final String ATTR_GENERATED_IMAGE_CLIP =
+ "generated_image_clip";
+ private static final String ATTR_GENERATED_TRANSITION_CLIP =
+ "generated_transition_clip";
+ private static final String ATTR_IS_TRANSITION_GENERATED =
+ "is_transition_generated";
+ private static final String ATTR_OVERLAY_RGB_FILENAME =
+ "overlay_rgb_filename";
+ private static final String ATTR_OVERLAY_FRAME_WIDTH =
+ "overlay_frame_width";
+ private static final String ATTR_OVERLAY_FRAME_HEIGHT =
+ "overlay_frame_height";
- // Instance variables
+ /*
+ * Instance variables
+ */
private long mDurationMs;
private final String mProjectPath;
private final List<MediaItem> mMediaItems = new ArrayList<MediaItem>();
private final List<AudioTrack> mAudioTracks = new ArrayList<AudioTrack>();
private final List<Transition> mTransitions = new ArrayList<Transition>();
- private PreviewThread mPreviewThread;
private int mAspectRatio;
- /**
- * The preview thread
+ /*
+ * Private Object for calling native Methods via MediaArtistNativeHelper
*/
- private class PreviewThread extends Thread {
- // Instance variables
- private final static long FRAME_DURATION = 33;
-
- // Instance variables
- private final PreviewProgressListener mListener;
- private final int mCallbackAfterFrameCount;
- private final long mFromMs, mToMs;
- private boolean mRun, mLoop;
- private long mPositionMs;
-
- /**
- * Constructor
- *
- * @param fromMs Start preview at this position
- * @param toMs The time (relative to the timeline) at which the preview
- * will stop. Use -1 to play to the end of the timeline
- * @param callbackAfterFrameCount The listener interface should be
- * invoked after the number of frames specified by this
- * parameter.
- * @param loop true if the preview should be looped once it reaches the
- * end
- * @param listener The listener
- */
- public PreviewThread(long fromMs, long toMs, boolean loop, int callbackAfterFrameCount,
- PreviewProgressListener listener) {
- mPositionMs = mFromMs = fromMs;
- if (toMs < 0) {
- mToMs = mDurationMs;
- } else {
- mToMs = toMs;
- }
- mLoop = loop;
- mCallbackAfterFrameCount = callbackAfterFrameCount;
- mListener = listener;
- mRun = true;
- }
-
- /*
- * {@inheritDoc}
- */
- @Override
- public void run() {
- if (Log.isLoggable(TAG, Log.DEBUG)) {
- Log.d(TAG, "===> PreviewThread.run enter");
- }
- int frameCount = 0;
- while (mRun) {
- try {
- sleep(FRAME_DURATION);
- } catch (InterruptedException ex) {
- break;
- }
- frameCount++;
- mPositionMs += FRAME_DURATION;
-
- if (mPositionMs >= mToMs) {
- if (!mLoop) {
- if (mListener != null) {
- mListener.onProgress(VideoEditorImpl.this, mPositionMs, true);
- }
- if (Log.isLoggable(TAG, Log.DEBUG)) {
- Log.d(TAG, "PreviewThread.run playback complete");
- }
- break;
- } else {
- // Fire a notification for the end of the clip
- if (mListener != null) {
- mListener.onProgress(VideoEditorImpl.this, mToMs, false);
- }
-
- // Rewind
- mPositionMs = mFromMs;
- if (mListener != null) {
- mListener.onProgress(VideoEditorImpl.this, mPositionMs, false);
- }
- if (Log.isLoggable(TAG, Log.DEBUG)) {
- Log.d(TAG, "PreviewThread.run playback complete");
- }
- frameCount = 0;
- }
- } else {
- if (frameCount == mCallbackAfterFrameCount) {
- if (mListener != null) {
- mListener.onProgress(VideoEditorImpl.this, mPositionMs, false);
- }
- frameCount = 0;
- }
- }
- }
-
- if (Log.isLoggable(TAG, Log.DEBUG)) {
- Log.d(TAG, "===> PreviewThread.run exit");
- }
- }
-
- /**
- * Stop the preview
- *
- * @return The stop position
- */
- public long stopPreview() {
- mRun = false;
- try {
- join();
- } catch (InterruptedException ex) {
- }
- return mPositionMs;
- }
- };
+ private MediaArtistNativeHelper mMANativeHelper;
+ private VideoEditor veObject = null;
+ private boolean mPreviewInProgress = false;
/**
* Constructor
*
- * @param projectPath
+ * @param projectPath - The path where the VideoEditor stores all files
+ * related to the project
*/
public VideoEditorImpl(String projectPath) throws IOException {
+
+ mMANativeHelper = new MediaArtistNativeHelper(projectPath, this);
mProjectPath = projectPath;
final File projectXml = new File(projectPath, PROJECT_FILENAME);
if (projectXml.exists()) {
try {
load();
} catch (Exception ex) {
- throw new IOException(ex);
+ ex.printStackTrace();
+ throw new IOException(ex.toString());
}
} else {
mAspectRatio = MediaProperties.ASPECT_RATIO_16_9;
@@ -238,236 +166,409 @@ public class VideoEditorImpl implements VideoEditor {
}
/*
+ * @return The MediaArtistNativeHelper object
+ */
+ MediaArtistNativeHelper getNativeContext() {
+ return mMANativeHelper;
+ }
+
+ /*
* {@inheritDoc}
*/
- public String getPath() {
- return mProjectPath;
+ public synchronized void addAudioTrack(AudioTrack audioTrack) {
+ if (audioTrack == null) {
+ throw new IllegalArgumentException("Audio Track is null");
+ }
+ if (mAudioTracks.size() == 1) {
+ throw new IllegalArgumentException("No more tracks can be added");
+ }
+
+ /*
+ * Add the audio track to AudioTrack list
+ */
+ mAudioTracks.add(audioTrack);
+
+ /*
+ * Form the audio PCM file path
+ */
+ String audioTrackPCMFilePath = String.format(mProjectPath + "/"
+ + "AudioPcm" + audioTrack.getId() + ".pcm");
+
+ /*
+ * Create PCM only if not generated in previous session
+ */
+ if (new File(audioTrackPCMFilePath).exists())
+ {
+ mMANativeHelper.setAudioflag(false);
+ }
+ mMANativeHelper.setGeneratePreview(true);
}
/*
* {@inheritDoc}
*/
public synchronized void addMediaItem(MediaItem mediaItem) {
- if (mPreviewThread != null) {
- throw new IllegalStateException("Previewing is in progress");
+ /*
+ * Validate Media Item
+ */
+ if (mediaItem == null) {
+ throw new IllegalArgumentException("Media item is null");
}
-
+ /*
+ * Add the Media item to MediaItem list
+ */
if (mMediaItems.contains(mediaItem)) {
- throw new IllegalArgumentException("Media item already exists: " + mediaItem.getId());
+ throw new IllegalArgumentException("Media item already exists: " +
+ mediaItem.getId());
}
- // Invalidate the end transition if necessary
+ /*
+ * Invalidate the end transition if necessary
+ */
final int mediaItemsCount = mMediaItems.size();
if ( mediaItemsCount > 0) {
removeTransitionAfter(mediaItemsCount - 1);
}
- // Add the new media item
+ /*
+ * Add the new media item
+ */
mMediaItems.add(mediaItem);
computeTimelineDuration();
+ mMANativeHelper.setGeneratePreview(true);
+ /*
+ * Generate project thumbnail only from first media Item on storyboard
+ */
+ if (mMediaItems.size() == 1) {
+ generateProjectThumbnail();
+ }
}
+
/*
* {@inheritDoc}
*/
- public synchronized void insertMediaItem(MediaItem mediaItem, String afterMediaItemId) {
- if (mPreviewThread != null) {
- throw new IllegalStateException("Previewing is in progress");
+ public synchronized void addTransition(Transition transition) {
+ if (transition == null) {
+ throw new IllegalArgumentException("Null Transition");
}
-
- if (mMediaItems.contains(mediaItem)) {
- throw new IllegalArgumentException("Media item already exists: " + mediaItem.getId());
+ final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
+ final MediaItem afterMediaItem = transition.getAfterMediaItem();
+ /*
+ * Check if the MediaItems are in sequence
+ */
+ if (mMediaItems == null) {
+ throw new IllegalArgumentException("No media items are added");
}
+ if ((afterMediaItem != null) && (beforeMediaItem != null)) {
+ int afterMediaItemIndex = mMediaItems.indexOf(afterMediaItem);
+ int beforeMediaItemIndex = mMediaItems.indexOf(beforeMediaItem);
- if (afterMediaItemId == null) {
- if (mMediaItems.size() > 0) {
- // Invalidate the transition at the beginning of the timeline
- removeTransitionBefore(0);
+
+ if ((afterMediaItemIndex == -1) || (beforeMediaItemIndex == -1)) {
+ throw new IllegalArgumentException
+ ("Either of the mediaItem is not found in the list");
}
- mMediaItems.add(0, mediaItem);
- computeTimelineDuration();
- } else {
- final int mediaItemCount = mMediaItems.size();
- for (int i = 0; i < mediaItemCount; i++) {
- final MediaItem mi = mMediaItems.get(i);
- if (mi.getId().equals(afterMediaItemId)) {
- // Invalidate the transition at this position
- removeTransitionAfter(i);
- // Insert the new media item
- mMediaItems.add(i + 1, mediaItem);
- computeTimelineDuration();
- return;
- }
+ if (afterMediaItemIndex != (beforeMediaItemIndex - 1) ) {
+ throw new IllegalArgumentException("MediaItems are not in sequence");
+ }
+ }
+
+ mTransitions.add(transition);
+ /*
+ * Cross reference the transitions
+ */
+ if (afterMediaItem != null) {
+ /*
+ * If a transition already exists at the specified position then
+ * invalidate it.
+ */
+ if (afterMediaItem.getEndTransition() != null) {
+ afterMediaItem.getEndTransition().invalidate();
+ mTransitions.remove(afterMediaItem.getEndTransition());
}
- throw new IllegalArgumentException("MediaItem not found: " + afterMediaItemId);
+ afterMediaItem.setEndTransition(transition);
}
+
+ if (beforeMediaItem != null) {
+ /*
+ * If a transition already exists at the specified position then
+ * invalidate it.
+ */
+ if (beforeMediaItem.getBeginTransition() != null) {
+ beforeMediaItem.getBeginTransition().invalidate();
+ mTransitions.remove(beforeMediaItem.getBeginTransition());
+ }
+ beforeMediaItem.setBeginTransition(transition);
+ }
+
+ computeTimelineDuration();
+ mMANativeHelper.setGeneratePreview(true);
}
/*
* {@inheritDoc}
*/
- public synchronized void moveMediaItem(String mediaItemId, String afterMediaItemId) {
- if (mPreviewThread != null) {
- throw new IllegalStateException("Previewing is in progress");
+ public void cancelExport(String filename) {
+ if (mMANativeHelper != null && filename != null) {
+ mMANativeHelper.stop(filename);
}
+ }
- final MediaItem moveMediaItem = removeMediaItem(mediaItemId);
- if (moveMediaItem == null) {
- throw new IllegalArgumentException("Target MediaItem not found: " + mediaItemId);
+ /*
+ * {@inheritDoc}
+ */
+ public void export(String filename, int height, int bitrate,
+ int audioCodec, int videoCodec,
+ ExportProgressListener listener) throws IOException {
+ if ( filename == null) {
+ throw new IllegalArgumentException("export: filename is null");
+ }
+ File tempPathFile = new File(filename);
+ if (tempPathFile == null) {
+ throw new IOException(filename + "can not be created");
+ }
+ if (mMediaItems.size() == 0) {
+ throw new IllegalStateException("No MediaItems added");
}
- if (afterMediaItemId == null) {
- if (mMediaItems.size() > 0) {
- // Invalidate adjacent transitions at the insertion point
- removeTransitionBefore(0);
+ switch (audioCodec) {
+ case MediaProperties.ACODEC_AAC_LC:
+ break;
+ case MediaProperties.ACODEC_AMRNB:
+ break;
- // Insert the media item at the new position
- mMediaItems.add(0, moveMediaItem);
- computeTimelineDuration();
- } else {
- throw new IllegalStateException("Cannot move media item (it is the only item)");
- }
- } else {
- final int mediaItemCount = mMediaItems.size();
- for (int i = 0; i < mediaItemCount; i++) {
- final MediaItem mi = mMediaItems.get(i);
- if (mi.getId().equals(afterMediaItemId)) {
- // Invalidate adjacent transitions at the insertion point
- removeTransitionAfter(i);
- // Insert the media item at the new position
- mMediaItems.add(i + 1, moveMediaItem);
- computeTimelineDuration();
- return;
- }
- }
+ default :
+ throw new IllegalArgumentException("Audio codec type incorrect");
+ }
+
+ switch (videoCodec) {
+ case MediaProperties.VCODEC_H263:
+ break;
+ case MediaProperties.VCODEC_H264BP:
+ break;
+ case MediaProperties.VCODEC_MPEG4:
+ break;
- throw new IllegalArgumentException("MediaItem not found: " + afterMediaItemId);
+ default :
+ throw new IllegalArgumentException("Video codec type incorrect");
}
+
+ switch (height) {
+ case MediaProperties.HEIGHT_144:
+ break;
+ case MediaProperties.HEIGHT_360:
+ break;
+ case MediaProperties.HEIGHT_480:
+ break;
+ case MediaProperties.HEIGHT_720:
+ break;
+
+ default:
+ throw new IllegalArgumentException("Argument Height incorrect");
+ }
+
+ switch (bitrate) {
+ case MediaProperties.BITRATE_28K:
+ break;
+ case MediaProperties.BITRATE_40K:
+ break;
+ case MediaProperties.BITRATE_64K:
+ break;
+ case MediaProperties.BITRATE_96K:
+ break;
+ case MediaProperties.BITRATE_128K:
+ break;
+ case MediaProperties.BITRATE_192K:
+ break;
+ case MediaProperties.BITRATE_256K:
+ break;
+ case MediaProperties.BITRATE_384K:
+ break;
+ case MediaProperties.BITRATE_512K:
+ break;
+ case MediaProperties.BITRATE_800K:
+ break;
+ case MediaProperties.BITRATE_2M:
+ break;
+ case MediaProperties.BITRATE_5M:
+ break;
+ case MediaProperties.BITRATE_8M:
+ break;
+
+ default:
+ throw new IllegalArgumentException("Argument Bitrate incorrect");
+ }
+
+ mMANativeHelper.export(filename, mProjectPath, height,bitrate,audioCodec,
+ videoCodec,mMediaItems, mTransitions, mAudioTracks,listener);
}
/*
* {@inheritDoc}
*/
- public synchronized MediaItem removeMediaItem(String mediaItemId) {
- if (mPreviewThread != null) {
- throw new IllegalStateException("Previewing is in progress");
+ public void export(String filename, int height, int bitrate,
+ ExportProgressListener listener) throws IOException {
+ if ( filename == null) {
+ throw new IllegalArgumentException("export: filename is null");
+ }
+ File tempPathFile = new File(filename);
+ if (tempPathFile == null) {
+ throw new IOException(filename + "can not be created");
+ }
+ if (mMediaItems.size() == 0) {
+ throw new IllegalStateException("No MediaItems added");
}
- final MediaItem mediaItem = getMediaItem(mediaItemId);
- if (mediaItem != null) {
- // Remove the media item
- mMediaItems.remove(mediaItem);
- // Remove the adjacent transitions
- removeAdjacentTransitions(mediaItem);
- computeTimelineDuration();
+ switch (height) {
+ case MediaProperties.HEIGHT_144:
+ break;
+ case MediaProperties.HEIGHT_360:
+ break;
+ case MediaProperties.HEIGHT_480:
+ break;
+ case MediaProperties.HEIGHT_720:
+ break;
+
+ default:
+ throw new IllegalArgumentException("Argument Height incorrect");
+ }
+ switch (bitrate) {
+ case MediaProperties.BITRATE_28K:
+ break;
+ case MediaProperties.BITRATE_40K:
+ break;
+ case MediaProperties.BITRATE_64K:
+ break;
+ case MediaProperties.BITRATE_96K:
+ break;
+ case MediaProperties.BITRATE_128K:
+ break;
+ case MediaProperties.BITRATE_192K:
+ break;
+ case MediaProperties.BITRATE_256K:
+ break;
+ case MediaProperties.BITRATE_384K:
+ break;
+ case MediaProperties.BITRATE_512K:
+ break;
+ case MediaProperties.BITRATE_800K:
+ break;
+ case MediaProperties.BITRATE_2M:
+ break;
+ case MediaProperties.BITRATE_5M:
+ break;
+ case MediaProperties.BITRATE_8M:
+ break;
+
+ default:
+ throw new IllegalArgumentException("Argument Bitrate incorrect");
}
- return mediaItem;
+ mMANativeHelper.export(filename, mProjectPath, height,bitrate,
+ mMediaItems, mTransitions, mAudioTracks,
+ listener);
}
/*
* {@inheritDoc}
*/
- public synchronized MediaItem getMediaItem(String mediaItemId) {
- for (MediaItem mediaItem : mMediaItems) {
- if (mediaItem.getId().equals(mediaItemId)) {
- return mediaItem;
+ public void generatePreview(MediaProcessingProgressListener listener) {
+ boolean semAcquireDone = false;
+ try{
+ mPreviewSemaphore.acquire();
+ semAcquireDone = true;
+ mMANativeHelper.setGeneratePreview(true);
+ if ((mMediaItems.size() > 0) || (mAudioTracks.size() > 0)) {
+ mMANativeHelper.previewStoryBoard(mMediaItems, mTransitions,
+ mAudioTracks, listener);
+ }
+ } catch (InterruptedException ex) {
+ Log.e("VideoEditorImpl", "Sem acquire NOT successful in previewStoryBoard");
+ } finally {
+ if (semAcquireDone) {
+ mPreviewSemaphore.release();
}
}
+ }
- return null;
+ /*
+ * {@inheritDoc}
+ */
+ public List<AudioTrack> getAllAudioTracks() {
+ return mAudioTracks;
}
/*
* {@inheritDoc}
*/
- public synchronized List<MediaItem> getAllMediaItems() {
+ public List<MediaItem> getAllMediaItems() {
return mMediaItems;
}
/*
* {@inheritDoc}
*/
- public synchronized void removeAllMediaItems() {
- mMediaItems.clear();
-
- // Invalidate all transitions
- for (Transition transition : mTransitions) {
- transition.invalidate();
- }
- mTransitions.clear();
-
- mDurationMs = 0;
+ public List<Transition> getAllTransitions() {
+ return mTransitions;
}
/*
* {@inheritDoc}
*/
- public synchronized void addTransition(Transition transition) {
- mTransitions.add(transition);
-
- final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
- final MediaItem afterMediaItem = transition.getAfterMediaItem();
+ public int getAspectRatio() {
+ return mAspectRatio;
+ }
- // Cross reference the transitions
- if (afterMediaItem != null) {
- // If a transition already exists at the specified position then
- // invalidate it.
- if (afterMediaItem.getEndTransition() != null) {
- afterMediaItem.getEndTransition().invalidate();
+ /*
+ * {@inheritDoc}
+ */
+ public AudioTrack getAudioTrack(String audioTrackId) {
+ for (AudioTrack at : mAudioTracks) {
+ if (at.getId().equals(audioTrackId)) {
+ return at;
}
- afterMediaItem.setEndTransition(transition);
}
+ return null;
+ }
- if (beforeMediaItem != null) {
- // If a transition already exists at the specified position then
- // invalidate it.
- if (beforeMediaItem.getBeginTransition() != null) {
- beforeMediaItem.getBeginTransition().invalidate();
- }
- beforeMediaItem.setBeginTransition(transition);
- }
+ /*
+ * {@inheritDoc}
+ */
+ public long getDuration() {
+ /**
+ * Since MediaImageItem can change duration we need to compute the
+ * duration here
+ */
+ computeTimelineDuration();
+ return mDurationMs;
+ }
+ /*
+ * Force updates the timeline duration
+ */
+ void updateTimelineDuration() {
computeTimelineDuration();
}
/*
* {@inheritDoc}
*/
- public synchronized Transition removeTransition(String transitionId) {
- if (mPreviewThread != null) {
- throw new IllegalStateException("Previewing is in progress");
- }
-
- final Transition transition = getTransition(transitionId);
- if (transition == null) {
- throw new IllegalStateException("Transition not found: " + transitionId);
- }
-
- // Remove the transition references
- final MediaItem afterMediaItem = transition.getAfterMediaItem();
- if (afterMediaItem != null) {
- afterMediaItem.setEndTransition(null);
- }
-
- final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
- if (beforeMediaItem != null) {
- beforeMediaItem.setBeginTransition(null);
+ public synchronized MediaItem getMediaItem(String mediaItemId) {
+ for (MediaItem mediaItem : mMediaItems) {
+ if (mediaItem.getId().equals(mediaItemId)) {
+ return mediaItem;
+ }
}
-
- mTransitions.remove(transition);
- transition.invalidate();
- computeTimelineDuration();
-
- return transition;
+ return null;
}
/*
* {@inheritDoc}
*/
- public List<Transition> getAllTransitions() {
- return mTransitions;
+ public String getPath() {
+ return mProjectPath;
}
/*
@@ -479,27 +580,16 @@ public class VideoEditorImpl implements VideoEditor {
return transition;
}
}
-
return null;
}
/*
* {@inheritDoc}
*/
- public synchronized void addAudioTrack(AudioTrack audioTrack) {
- if (mPreviewThread != null) {
- throw new IllegalStateException("Previewing is in progress");
- }
-
- mAudioTracks.add(audioTrack);
- }
-
- /*
- * {@inheritDoc}
- */
- public synchronized void insertAudioTrack(AudioTrack audioTrack, String afterAudioTrackId) {
- if (mPreviewThread != null) {
- throw new IllegalStateException("Previewing is in progress");
+ public synchronized void insertAudioTrack(AudioTrack audioTrack,
+ String afterAudioTrackId) {
+ if (mAudioTracks.size() == 1) {
+ throw new IllegalArgumentException("No more tracks can be added");
}
if (afterAudioTrackId == null) {
@@ -513,245 +603,355 @@ public class VideoEditorImpl implements VideoEditor {
return;
}
}
+ throw new IllegalArgumentException("AudioTrack not found: "
+ + afterAudioTrackId);
+ }
+ mMANativeHelper.setGeneratePreview(true);
+ }
- throw new IllegalArgumentException("AudioTrack not found: " + afterAudioTrackId);
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized void insertMediaItem(MediaItem mediaItem,
+ String afterMediaItemId) {
+ if (mMediaItems.contains(mediaItem)) {
+ throw new IllegalArgumentException("Media item already exists: "
+ + mediaItem.getId());
}
+
+ if (afterMediaItemId == null) {
+ if (mMediaItems.size() > 0) {
+ /**
+ * Invalidate the transition at the beginning of the timeline
+ */
+ removeTransitionBefore(0);
+ }
+ mMediaItems.add(0, mediaItem);
+ computeTimelineDuration();
+ generateProjectThumbnail();
+ } else {
+ final int mediaItemCount = mMediaItems.size();
+ for (int i = 0; i < mediaItemCount; i++) {
+ final MediaItem mi = mMediaItems.get(i);
+ if (mi.getId().equals(afterMediaItemId)) {
+ /**
+ * Invalidate the transition at this position
+ */
+ removeTransitionAfter(i);
+ /**
+ * Insert the new media item
+ */
+ mMediaItems.add(i + 1, mediaItem);
+ computeTimelineDuration();
+ mMANativeHelper.setGeneratePreview(true);
+ return;
+ }
+ }
+ throw new IllegalArgumentException("MediaItem not found: "
+ + afterMediaItemId);
+ }
+ mMANativeHelper.setGeneratePreview(true);
}
/*
* {@inheritDoc}
*/
- public synchronized void moveAudioTrack(String audioTrackId, String afterAudioTrackId) {
+ public synchronized void moveAudioTrack(String audioTrackId,
+ String afterAudioTrackId) {
throw new IllegalStateException("Not supported");
}
/*
* {@inheritDoc}
*/
- public synchronized AudioTrack removeAudioTrack(String audioTrackId) {
- if (mPreviewThread != null) {
- throw new IllegalStateException("Previewing is in progress");
+ public synchronized void moveMediaItem(String mediaItemId,
+ String afterMediaItemId) {
+ final MediaItem moveMediaItem = removeMediaItem(mediaItemId,true);
+ if (moveMediaItem == null) {
+ throw new IllegalArgumentException("Target MediaItem not found: "
+ + mediaItemId);
}
- final AudioTrack audioTrack = getAudioTrack(audioTrackId);
- if (audioTrack != null) {
- mAudioTracks.remove(audioTrack);
+ if (afterMediaItemId == null) {
+ if (mMediaItems.size() > 0) {
+ /**
+ * Invalidate adjacent transitions at the insertion point
+ */
+ removeTransitionBefore(0);
+
+ /**
+ * Insert the media item at the new position
+ */
+ mMediaItems.add(0, moveMediaItem);
+ computeTimelineDuration();
+ generateProjectThumbnail();
+ } else {
+ throw new IllegalStateException("Cannot move media item (it is the only item)");
+ }
+ } else {
+ final int mediaItemCount = mMediaItems.size();
+ for (int i = 0; i < mediaItemCount; i++) {
+ final MediaItem mi = mMediaItems.get(i);
+ if (mi.getId().equals(afterMediaItemId)) {
+ /**
+ * Invalidate adjacent transitions at the insertion point
+ */
+ removeTransitionAfter(i);
+ /**
+ * Insert the media item at the new position
+ */
+ mMediaItems.add(i + 1, moveMediaItem);
+ computeTimelineDuration();
+ mMANativeHelper.setGeneratePreview(true);
+ return;
+ }
+ }
+
+ throw new IllegalArgumentException("MediaItem not found: "
+ + afterMediaItemId);
}
+ mMANativeHelper.setGeneratePreview(true);
+ }
- return audioTrack;
+ /*
+ * {@inheritDoc}
+ */
+ public void release() {
+ stopPreview();
+ mMediaItems.clear();
+ mAudioTracks.clear();
+ mTransitions.clear();
+ mMANativeHelper.releaseNativeHelper();
+ if (mMANativeHelper!= null)
+ mMANativeHelper = null;
+ if (veObject != null)
+ veObject= null;
}
/*
* {@inheritDoc}
*/
- public AudioTrack getAudioTrack(String audioTrackId) {
- for (AudioTrack at : mAudioTracks) {
- if (at.getId().equals(audioTrackId)) {
- return at;
- }
+ public synchronized void removeAllMediaItems() {
+ mMediaItems.clear();
+
+ /**
+ * Invalidate all transitions
+ */
+ for (Transition transition : mTransitions) {
+ transition.invalidate();
+ }
+ mTransitions.clear();
+
+ mDurationMs = 0;
+ mMANativeHelper.setGeneratePreview(true);
+ /**
+ * If a thumbnail already exists, then delete it
+ */
+ if ((new File(mProjectPath + "/" + THUMBNAIL_FILENAME)).exists()) {
+ (new File(mProjectPath + "/" + THUMBNAIL_FILENAME)).delete();
}
- return null;
}
/*
* {@inheritDoc}
*/
- public List<AudioTrack> getAllAudioTracks() {
- return mAudioTracks;
+ public synchronized AudioTrack removeAudioTrack(String audioTrackId) {
+ final AudioTrack audioTrack = getAudioTrack(audioTrackId);
+ if (audioTrack != null) {
+ mAudioTracks.remove(audioTrack);
+ audioTrack.invalidate();
+ mMANativeHelper.invalidatePcmFile();
+ mMANativeHelper.setAudioflag(true);
+ }
+ else {
+ throw new IllegalArgumentException(" No more audio tracks");
+ }
+ mMANativeHelper.setGeneratePreview(true);
+ return audioTrack;
}
/*
* {@inheritDoc}
*/
- public void save() throws IOException {
- final XmlSerializer serializer = Xml.newSerializer();
- final StringWriter writer = new StringWriter();
- serializer.setOutput(writer);
- serializer.startDocument("UTF-8", true);
- serializer.startTag("", TAG_PROJECT);
- serializer.attribute("", ATTR_ASPECT_RATIO, Integer.toString(mAspectRatio));
-
- serializer.startTag("", TAG_MEDIA_ITEMS);
- for (MediaItem mediaItem : mMediaItems) {
- serializer.startTag("", TAG_MEDIA_ITEM);
- serializer.attribute("", ATTR_ID, mediaItem.getId());
- serializer.attribute("", ATTR_TYPE, mediaItem.getClass().getSimpleName());
- serializer.attribute("", ATTR_FILENAME, mediaItem.getFilename());
- serializer.attribute("", ATTR_RENDERING_MODE, Integer.toString(
- mediaItem.getRenderingMode()));
- if (mediaItem instanceof MediaVideoItem) {
- final MediaVideoItem mvi = (MediaVideoItem)mediaItem;
- serializer
- .attribute("", ATTR_BEGIN_TIME, Long.toString(mvi.getBoundaryBeginTime()));
- serializer.attribute("", ATTR_END_TIME, Long.toString(mvi.getBoundaryEndTime()));
- serializer.attribute("", ATTR_VOLUME, Integer.toString(mvi.getVolume()));
- serializer.attribute("", ATTR_MUTED, Boolean.toString(mvi.isMuted()));
- if (mvi.getAudioWaveformFilename() != null) {
- serializer.attribute("", ATTR_AUDIO_WAVEFORM_FILENAME,
- mvi.getAudioWaveformFilename());
- }
- } else if (mediaItem instanceof MediaImageItem) {
- serializer.attribute("", ATTR_DURATION,
- Long.toString(mediaItem.getTimelineDuration()));
+ public synchronized MediaItem removeMediaItem(String mediaItemId) {
+ final String firstItemString = mMediaItems.get(0).getId();
+ final MediaItem mediaItem = getMediaItem(mediaItemId);
+ if (mediaItem != null) {
+ /**
+ * Remove the media item
+ */
+ mMediaItems.remove(mediaItem);
+ if (mediaItem instanceof MediaImageItem) {
+ ((MediaImageItem)mediaItem).invalidate();
}
-
final List<Overlay> overlays = mediaItem.getAllOverlays();
if (overlays.size() > 0) {
- serializer.startTag("", TAG_OVERLAYS);
for (Overlay overlay : overlays) {
- serializer.startTag("", TAG_OVERLAY);
- serializer.attribute("", ATTR_ID, overlay.getId());
- serializer.attribute("", ATTR_TYPE, overlay.getClass().getSimpleName());
- serializer.attribute("", ATTR_BEGIN_TIME,
- Long.toString(overlay.getStartTime()));
- serializer.attribute("", ATTR_DURATION, Long.toString(overlay.getDuration()));
if (overlay instanceof OverlayFrame) {
final OverlayFrame overlayFrame = (OverlayFrame)overlay;
- overlayFrame.save(getPath());
- if (overlayFrame.getFilename() != null) {
- serializer.attribute("", ATTR_FILENAME, overlayFrame.getFilename());
- }
+ overlayFrame.invalidate();
}
-
- // Save the user attributes
- serializer.startTag("", TAG_OVERLAY_USER_ATTRIBUTES);
- final Map<String, String> userAttributes = overlay.getUserAttributes();
- for (String name : userAttributes.keySet()) {
- final String value = userAttributes.get(name);
- if (value != null) {
- serializer.attribute("", name, value);
- }
- }
- serializer.endTag("", TAG_OVERLAY_USER_ATTRIBUTES);
-
- serializer.endTag("", TAG_OVERLAY);
}
- serializer.endTag("", TAG_OVERLAYS);
}
- final List<Effect> effects = mediaItem.getAllEffects();
- if (effects.size() > 0) {
- serializer.startTag("", TAG_EFFECTS);
- for (Effect effect : effects) {
- serializer.startTag("", TAG_EFFECT);
- serializer.attribute("", ATTR_ID, effect.getId());
- serializer.attribute("", ATTR_TYPE, effect.getClass().getSimpleName());
- serializer.attribute("", ATTR_BEGIN_TIME,
- Long.toString(effect.getStartTime()));
- serializer.attribute("", ATTR_DURATION, Long.toString(effect.getDuration()));
- if (effect instanceof EffectColor) {
- final EffectColor colorEffect = (EffectColor)effect;
- serializer.attribute("", ATTR_COLOR_EFFECT_TYPE,
- Integer.toString(colorEffect.getType()));
- if (colorEffect.getType() == EffectColor.TYPE_COLOR ||
- colorEffect.getType() == EffectColor.TYPE_GRADIENT) {
- serializer.attribute("", ATTR_COLOR_EFFECT_VALUE,
- Integer.toString(colorEffect.getColor()));
- }
- } else if (effect instanceof EffectKenBurns) {
- final Rect startRect = ((EffectKenBurns)effect).getStartRect();
- serializer.attribute("", ATTR_START_RECT_L,
- Integer.toString(startRect.left));
- serializer.attribute("", ATTR_START_RECT_T,
- Integer.toString(startRect.top));
- serializer.attribute("", ATTR_START_RECT_R,
- Integer.toString(startRect.right));
- serializer.attribute("", ATTR_START_RECT_B,
- Integer.toString(startRect.bottom));
+ /**
+ * Remove the adjacent transitions
+ */
+ removeAdjacentTransitions(mediaItem);
+ computeTimelineDuration();
+ }
+ mMANativeHelper.setGeneratePreview(true);
+ /**
+ * If string equals first mediaItem, then
+ * generate Project thumbail
+ */
+ if (firstItemString.equals(mediaItemId)) {
+ generateProjectThumbnail();
+ }
- final Rect endRect = ((EffectKenBurns)effect).getEndRect();
- serializer.attribute("", ATTR_END_RECT_L, Integer.toString(endRect.left));
- serializer.attribute("", ATTR_END_RECT_T, Integer.toString(endRect.top));
- serializer.attribute("", ATTR_END_RECT_R, Integer.toString(endRect.right));
- serializer.attribute("", ATTR_END_RECT_B,
- Integer.toString(endRect.bottom));
- }
+ if (mediaItem instanceof MediaVideoItem) {
+ /**
+ * Delete the graph file
+ */
+ ((MediaVideoItem)mediaItem).invalidate();
+ }
+ return mediaItem;
+ }
- serializer.endTag("", TAG_EFFECT);
- }
- serializer.endTag("", TAG_EFFECTS);
- }
+ private synchronized MediaItem removeMediaItem(String mediaItemId,
+ boolean flag) {
+ final String firstItemString = mMediaItems.get(0).getId();
- serializer.endTag("", TAG_MEDIA_ITEM);
+ final MediaItem mediaItem = getMediaItem(mediaItemId);
+ if (mediaItem != null) {
+ /**
+ * Remove the media item
+ */
+ mMediaItems.remove(mediaItem);
+ /**
+ * Remove the adjacent transitions
+ */
+ removeAdjacentTransitions(mediaItem);
+ computeTimelineDuration();
}
- serializer.endTag("", TAG_MEDIA_ITEMS);
+ mMANativeHelper.setGeneratePreview(true);
- serializer.startTag("", TAG_TRANSITIONS);
+ /**
+ * If string equals first mediaItem, then
+ * generate Project thumbail
+ */
+ if (firstItemString.equals(mediaItemId)) {
+ generateProjectThumbnail();
+ }
+ return mediaItem;
+ }
- for (Transition transition : mTransitions) {
- serializer.startTag("", TAG_TRANSITION);
- serializer.attribute("", ATTR_ID, transition.getId());
- serializer.attribute("", ATTR_TYPE, transition.getClass().getSimpleName());
- serializer.attribute("", ATTR_DURATION, Long.toString(transition.getDuration()));
- serializer.attribute("", ATTR_BEHAVIOR, Integer.toString(transition.getBehavior()));
- final MediaItem afterMediaItem = transition.getAfterMediaItem();
- if (afterMediaItem != null) {
- serializer.attribute("", ATTR_AFTER_MEDIA_ITEM_ID, afterMediaItem.getId());
- }
+ /*
+ * {@inheritDoc}
+ */
+ public synchronized Transition removeTransition(String transitionId) {
+ final Transition transition = getTransition(transitionId);
+ if (transition == null) {
+ throw new IllegalStateException("Transition not found: "
+ + transitionId);
+ }
- final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
- if (beforeMediaItem != null) {
- serializer.attribute("", ATTR_BEFORE_MEDIA_ITEM_ID, beforeMediaItem.getId());
- }
+ /**
+ * Remove the transition references
+ */
+ final MediaItem afterMediaItem = transition.getAfterMediaItem();
+ if (afterMediaItem != null) {
+ afterMediaItem.setEndTransition(null);
+ }
- if (transition instanceof TransitionSliding) {
- serializer.attribute("", ATTR_DIRECTION,
- Integer.toString(((TransitionSliding)transition).getDirection()));
- } else if (transition instanceof TransitionAlpha) {
- TransitionAlpha ta = (TransitionAlpha)transition;
- serializer.attribute("", ATTR_BLENDING, Integer.toString(ta.getBlendingPercent()));
- serializer.attribute("", ATTR_INVERT, Boolean.toString(ta.isInvert()));
- if (ta.getMaskFilename() != null) {
- serializer.attribute("", ATTR_MASK, ta.getMaskFilename());
- }
- }
- serializer.endTag("", TAG_TRANSITION);
+ final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
+ if (beforeMediaItem != null) {
+ beforeMediaItem.setBeginTransition(null);
}
- serializer.endTag("", TAG_TRANSITIONS);
- serializer.startTag("", TAG_AUDIO_TRACKS);
- for (AudioTrack at : mAudioTracks) {
- serializer.startTag("", TAG_AUDIO_TRACK);
- serializer.attribute("", ATTR_ID, at.getId());
- serializer.attribute("", ATTR_FILENAME, at.getFilename());
- serializer.attribute("", ATTR_START_TIME, Long.toString(at.getStartTime()));
- serializer.attribute("", ATTR_BEGIN_TIME, Long.toString(at.getBoundaryBeginTime()));
- serializer.attribute("", ATTR_END_TIME, Long.toString(at.getBoundaryEndTime()));
- serializer.attribute("", ATTR_VOLUME, Integer.toString(at.getVolume()));
- serializer.attribute("", ATTR_DUCK_ENABLED, Boolean.toString(at.isDuckingEnabled()));
- serializer.attribute("", ATTR_DUCKED_TRACK_VOLUME, Integer.toString(at.getDuckedTrackVolume()));
- serializer.attribute("", ATTR_DUCK_THRESHOLD, Integer.toString(at.getDuckingThreshhold()));
- serializer.attribute("", ATTR_MUTED, Boolean.toString(at.isMuted()));
- serializer.attribute("", ATTR_LOOP, Boolean.toString(at.isLooping()));
- if (at.getAudioWaveformFilename() != null) {
- serializer.attribute("", ATTR_AUDIO_WAVEFORM_FILENAME,
- at.getAudioWaveformFilename());
- }
+ mTransitions.remove(transition);
+ transition.invalidate();
+ computeTimelineDuration();
+ mMANativeHelper.setGeneratePreview(true);
+ return transition;
+ }
- serializer.endTag("", TAG_AUDIO_TRACK);
+ /*
+ * {@inheritDoc}
+ */
+ public long renderPreviewFrame(SurfaceHolder surfaceHolder, long timeMs) {
+ long result = 0;
+ int surfaceWidth = 0;
+ int surfaceHeight = 0;
+ Rect frame;
+
+ if (surfaceHolder == null) {
+ throw new IllegalArgumentException("Surface Holder is null");
}
- serializer.endTag("", TAG_AUDIO_TRACKS);
- serializer.endTag("", TAG_PROJECT);
- serializer.endDocument();
+ if (timeMs < 0) {
+ throw new IllegalArgumentException("requested time not correct");
+ } else if (timeMs > mDurationMs) {
+ throw new IllegalArgumentException("requested time more than duration");
+ }
+ if (mMANativeHelper != null) {
+ if (mMANativeHelper.mInvalidatePreviewArray) {
+ return -1;
+ }
+ }
+ else {
+ return -1;
+ }
+ boolean semAcquireDone = false;
+
+ try{
+ mPreviewSemaphore.acquire();
+ semAcquireDone = true;
+ Surface surface = surfaceHolder.getSurface();
+ frame = surfaceHolder.getSurfaceFrame();
+ surfaceWidth = frame.width();
+ surfaceHeight = frame.height();
+
+ if (surface == null) {
+ throw new RuntimeException("Surface could not be retrieved from Surface holder");
+ }
- // Save the metadata XML file
- final FileOutputStream out = new FileOutputStream(new File(getPath(), PROJECT_FILENAME));
- out.write(writer.toString().getBytes());
- out.flush();
- out.close();
+ if (!mMANativeHelper.mInvalidatePreviewArray) {
+ if (mMediaItems.size() > 0) {
+ result = mMANativeHelper.renderPreviewFrame(surface,
+ timeMs,surfaceWidth,surfaceHeight);
+ }
+ else {
+ result = 0;
+ }
+ }
+ else {
+ result = -1;
+ }
+
+ } catch (InterruptedException ex) {
+ Log.e("VideoEditorImpl", "Sem acquire NOT successful in renderPreviewFrame");
+ }
+ finally {
+ if (semAcquireDone) {
+ mPreviewSemaphore.release();
+ }
+ }
+ return result;
}
/**
- * Load the project form XML
+ * the project form XML
*/
- private void load() throws FileNotFoundException, XmlPullParserException, IOException {
+ private void load() throws FileNotFoundException, XmlPullParserException,
+ IOException {
final File file = new File(mProjectPath, PROJECT_FILENAME);
+ /**
+ * Load the metadata
+ */
final FileInputStream fis = new FileInputStream(file);
-
try {
- // Load the metadata
final XmlPullParser parser = Xml.newPullParser();
parser.setInput(fis, "UTF-8");
int eventType = parser.getEventType();
@@ -763,44 +963,59 @@ public class VideoEditorImpl implements VideoEditor {
case XmlPullParser.START_TAG: {
name = parser.getName();
if (TAG_PROJECT.equals(name)) {
- mAspectRatio = Integer.parseInt(parser.getAttributeValue("",
- ATTR_ASPECT_RATIO));
+ mAspectRatio =
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_ASPECT_RATIO));
+ final boolean mInvalidatePreviewArray =
+ Boolean.parseBoolean(parser.getAttributeValue("",
+ ATTR_PREVIEW_PREPARE));
+ mMANativeHelper.setGeneratePreview(mInvalidatePreviewArray);
+
+ final boolean mRegenPCM =
+ Boolean.parseBoolean(parser.getAttributeValue("",
+ ATTR_REGENERATE_PCM));
+ mMANativeHelper.setAudioflag(mRegenPCM);
+
} else if (TAG_MEDIA_ITEM.equals(name)) {
- final String mediaItemId = parser.getAttributeValue("", ATTR_ID);
- final String type = parser.getAttributeValue("", ATTR_TYPE);
- final String filename = parser.getAttributeValue("", ATTR_FILENAME);
- final int renderingMode = Integer.parseInt(
- parser.getAttributeValue("", ATTR_RENDERING_MODE));
+ final String mediaItemId =
+ parser.getAttributeValue("", ATTR_ID);
+ final String type =
+ parser.getAttributeValue("", ATTR_TYPE);
+ final String filename =
+ parser.getAttributeValue("", ATTR_FILENAME);
+ final int renderingMode =
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_RENDERING_MODE));
if (MediaImageItem.class.getSimpleName().equals(type)) {
- final long durationMs = Long.parseLong(
- parser.getAttributeValue("", ATTR_DURATION));
+ final long durationMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_DURATION));
currentMediaItem = new MediaImageItem(this, mediaItemId, filename,
durationMs, renderingMode);
} else if (MediaVideoItem.class.getSimpleName().equals(type)) {
- final long beginMs = Long.parseLong(
- parser.getAttributeValue("", ATTR_BEGIN_TIME));
- final long endMs = Long.parseLong(
- parser.getAttributeValue("", ATTR_END_TIME));
- final int volume = Integer.parseInt(
- parser.getAttributeValue("", ATTR_VOLUME));
- final boolean muted = Boolean.parseBoolean(
- parser.getAttributeValue("", ATTR_MUTED));
- final String audioWaveformFilename =
- parser.getAttributeValue("", ATTR_AUDIO_WAVEFORM_FILENAME);
+ final long beginMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_BEGIN_TIME));
+ final long endMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_END_TIME));
+ final int volume = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_VOLUME));
+ final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("",
+ ATTR_MUTED));
+ final String audioWaveformFilename = parser.getAttributeValue("",
+ ATTR_AUDIO_WAVEFORM_FILENAME);
currentMediaItem = new MediaVideoItem(this, mediaItemId, filename,
renderingMode, beginMs, endMs, volume, muted,
audioWaveformFilename);
- final long beginTimeMs = Long.parseLong(
- parser.getAttributeValue("", ATTR_BEGIN_TIME));
- final long endTimeMs = Long.parseLong(
- parser.getAttributeValue("", ATTR_END_TIME));
- ((MediaVideoItem)currentMediaItem).setExtractBoundaries(
- beginTimeMs, endTimeMs);
+ final long beginTimeMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_BEGIN_TIME));
+ final long endTimeMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_END_TIME));
+ ((MediaVideoItem)currentMediaItem).setExtractBoundaries(beginTimeMs,
+ endTimeMs);
- final int volumePercent = Integer.parseInt(
- parser.getAttributeValue("", ATTR_VOLUME));
+ final int volumePercent = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_VOLUME));
((MediaVideoItem)currentMediaItem).setVolume(volumePercent);
} else {
Log.e(TAG, "Unknown media item type: " + type);
@@ -836,6 +1051,18 @@ public class VideoEditorImpl implements VideoEditor {
if (effect != null) {
currentMediaItem.addEffect(effect);
}
+ if (effect instanceof EffectKenBurns) {
+ String filename = parser.getAttributeValue("", ATTR_GENERATED_IMAGE_CLIP);
+
+ if (new File(filename).exists() == true) {
+ ((MediaImageItem)currentMediaItem).setGeneratedImageClip(filename);
+ ((MediaImageItem)currentMediaItem).setRegenerateClip(false);
+ }
+ else {
+ ((MediaImageItem)currentMediaItem).setGeneratedImageClip(null);
+ ((MediaImageItem)currentMediaItem).setRegenerateClip(true);
+ }
+ }
}
} else if (TAG_AUDIO_TRACK.equals(name)) {
final AudioTrack audioTrack = parseAudioTrack(parser);
@@ -879,10 +1106,15 @@ public class VideoEditorImpl implements VideoEditor {
private Transition parseTransition(XmlPullParser parser) {
final String transitionId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
- final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
- final int behavior = Integer.parseInt(parser.getAttributeValue("", ATTR_BEHAVIOR));
+ final long durationMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_DURATION));
+ final int behavior = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_BEHAVIOR));
+ final boolean isTransitionGenerated;
- final String beforeMediaItemId = parser.getAttributeValue("", ATTR_BEFORE_MEDIA_ITEM_ID);
+
+ final String beforeMediaItemId = parser.getAttributeValue("",
+ ATTR_BEFORE_MEDIA_ITEM_ID);
final MediaItem beforeMediaItem;
if (beforeMediaItemId != null) {
beforeMediaItem = getMediaItem(beforeMediaItemId);
@@ -890,7 +1122,8 @@ public class VideoEditorImpl implements VideoEditor {
beforeMediaItem = null;
}
- final String afterMediaItemId = parser.getAttributeValue("", ATTR_AFTER_MEDIA_ITEM_ID);
+ final String afterMediaItemId = parser.getAttributeValue("",
+ ATTR_AFTER_MEDIA_ITEM_ID);
final MediaItem afterMediaItem;
if (afterMediaItemId != null) {
afterMediaItem = getMediaItem(afterMediaItemId);
@@ -927,9 +1160,22 @@ public class VideoEditorImpl implements VideoEditor {
afterMediaItem.setEndTransition(transition);
}
+ isTransitionGenerated = Boolean.parseBoolean(parser.getAttributeValue("",
+ ATTR_IS_TRANSITION_GENERATED));
+ if (isTransitionGenerated == true) {
+ final String transitionFile = parser.getAttributeValue("",
+ ATTR_GENERATED_TRANSITION_CLIP);
+
+ if (new File(transitionFile).exists() == true) {
+ transition.setFilename(transitionFile);
+ } else {
+ transition.setFilename(null);
+ }
+ }
return transition;
}
+
/**
* Parse the overlay
*
@@ -941,17 +1187,36 @@ public class VideoEditorImpl implements VideoEditor {
private Overlay parseOverlay(XmlPullParser parser, MediaItem mediaItem) {
final String overlayId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
- final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
- final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
+ final long durationMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_DURATION));
+ final long startTimeMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_BEGIN_TIME));
final Overlay overlay;
if (OverlayFrame.class.getSimpleName().equals(type)) {
final String filename = parser.getAttributeValue("", ATTR_FILENAME);
- overlay = new OverlayFrame(mediaItem, overlayId, filename, startTimeMs, durationMs);
+ overlay = new OverlayFrame(mediaItem, overlayId, filename,
+ startTimeMs, durationMs);
} else {
overlay = null;
}
+ final String overlayRgbFileName = parser.getAttributeValue("",
+ ATTR_OVERLAY_RGB_FILENAME);
+ if (overlayRgbFileName != null) {
+ ((OverlayFrame)overlay).setFilename(overlayRgbFileName);
+
+ final int overlayFrameWidth =
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_OVERLAY_FRAME_WIDTH));
+ final int overlayFrameHeight =
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_OVERLAY_FRAME_HEIGHT));
+
+ ((OverlayFrame)overlay).setOverlayFrameWidth(overlayFrameWidth);
+ ((OverlayFrame)overlay).setOverlayFrameHeight(overlayFrameHeight);
+ }
+
return overlay;
}
@@ -966,35 +1231,47 @@ public class VideoEditorImpl implements VideoEditor {
private Effect parseEffect(XmlPullParser parser, MediaItem mediaItem) {
final String effectId = parser.getAttributeValue("", ATTR_ID);
final String type = parser.getAttributeValue("", ATTR_TYPE);
- final long durationMs = Long.parseLong(parser.getAttributeValue("", ATTR_DURATION));
- final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
+ final long durationMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_DURATION));
+ final long startTimeMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_BEGIN_TIME));
final Effect effect;
if (EffectColor.class.getSimpleName().equals(type)) {
final int colorEffectType =
- Integer.parseInt(parser.getAttributeValue("", ATTR_COLOR_EFFECT_TYPE));
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_COLOR_EFFECT_TYPE));
final int color;
if (colorEffectType == EffectColor.TYPE_COLOR
|| colorEffectType == EffectColor.TYPE_GRADIENT) {
- color = Integer.parseInt(parser.getAttributeValue("", ATTR_COLOR_EFFECT_VALUE));
+ color = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_COLOR_EFFECT_VALUE));
} else {
color = 0;
}
- effect = new EffectColor(mediaItem, effectId, startTimeMs, durationMs,
- colorEffectType, color);
+ effect = new EffectColor(mediaItem, effectId, startTimeMs,
+ durationMs, colorEffectType, color);
} else if (EffectKenBurns.class.getSimpleName().equals(type)) {
final Rect startRect = new Rect(
- Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_L)),
- Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_T)),
- Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_R)),
- Integer.parseInt(parser.getAttributeValue("", ATTR_START_RECT_B)));
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_START_RECT_LEFT)),
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_START_RECT_TOP)),
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_START_RECT_RIGHT)),
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_START_RECT_BOTTOM)));
final Rect endRect = new Rect(
- Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_L)),
- Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_T)),
- Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_R)),
- Integer.parseInt(parser.getAttributeValue("", ATTR_END_RECT_B)));
- effect = new EffectKenBurns(mediaItem, effectId, startRect, endRect, startTimeMs,
- durationMs);
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_END_RECT_LEFT)),
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_END_RECT_TOP)),
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_END_RECT_RIGHT)),
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_END_RECT_BOTTOM)));
+ effect = new EffectKenBurns(mediaItem, effectId, startRect, endRect,
+ startTimeMs, durationMs);
} else {
effect = null;
}
@@ -1012,19 +1289,38 @@ public class VideoEditorImpl implements VideoEditor {
private AudioTrack parseAudioTrack(XmlPullParser parser) {
final String audioTrackId = parser.getAttributeValue("", ATTR_ID);
final String filename = parser.getAttributeValue("", ATTR_FILENAME);
- final long startTimeMs = Long.parseLong(parser.getAttributeValue("", ATTR_START_TIME));
- final long beginMs = Long.parseLong(parser.getAttributeValue("", ATTR_BEGIN_TIME));
- final long endMs = Long.parseLong(parser.getAttributeValue("", ATTR_END_TIME));
- final int volume = Integer.parseInt(parser.getAttributeValue("", ATTR_VOLUME));
- final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_MUTED));
- final boolean loop = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_LOOP));
- final boolean duckingEnabled = Boolean.parseBoolean(parser.getAttributeValue("", ATTR_DUCK_ENABLED));
- final int duckThreshold = Integer.parseInt(parser.getAttributeValue("", ATTR_DUCK_THRESHOLD));
- final int duckedTrackVolume = Integer.parseInt(parser.getAttributeValue("", ATTR_DUCKED_TRACK_VOLUME));
- final String waveformFilename = parser.getAttributeValue("", ATTR_AUDIO_WAVEFORM_FILENAME);
+ final long startTimeMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_START_TIME));
+ final long beginMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_BEGIN_TIME));
+ final long endMs = Long.parseLong(parser.getAttributeValue("",
+ ATTR_END_TIME));
+ final int volume = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_VOLUME));
+ final boolean muted = Boolean.parseBoolean(parser.getAttributeValue("",
+ ATTR_MUTED));
+ final boolean loop = Boolean.parseBoolean(parser.getAttributeValue("",
+ ATTR_LOOP));
+ final boolean duckingEnabled =
+ Boolean.parseBoolean(parser.getAttributeValue("",
+ ATTR_DUCK_ENABLED));
+ final int duckThreshold = Integer.parseInt(parser.getAttributeValue("",
+ ATTR_DUCK_THRESHOLD));
+ final int duckedTrackVolume =
+ Integer.parseInt(parser.getAttributeValue("",
+ ATTR_DUCKED_TRACK_VOLUME));
+
+ final String waveformFilename = parser.getAttributeValue("",
+ ATTR_AUDIO_WAVEFORM_FILENAME);
try {
- final AudioTrack audioTrack = new AudioTrack(this, audioTrackId, filename, startTimeMs,
- beginMs, endMs, loop, volume, muted, duckingEnabled, duckThreshold, duckedTrackVolume, waveformFilename);
+ final AudioTrack audioTrack = new AudioTrack(this, audioTrackId,
+ filename, startTimeMs,
+ beginMs, endMs, loop,
+ volume, muted,
+ duckingEnabled,
+ duckThreshold,
+ duckedTrackVolume,
+ waveformFilename);
return audioTrack;
} catch (IOException ex) {
@@ -1032,64 +1328,244 @@ public class VideoEditorImpl implements VideoEditor {
}
}
- /*
- * {@inheritDoc}
- */
- public void cancelExport(String filename) {
- }
/*
* {@inheritDoc}
*/
- public void export(String filename, int height, int bitrate, ExportProgressListener listener)
- throws IOException {
- }
+ public void save() throws IOException {
+ final XmlSerializer serializer = Xml.newSerializer();
+ final StringWriter writer = new StringWriter();
+ serializer.setOutput(writer);
+ serializer.startDocument("UTF-8", true);
+ serializer.startTag("", TAG_PROJECT);
+ serializer.attribute("",
+ ATTR_ASPECT_RATIO, Integer.toString(mAspectRatio));
+ serializer.attribute("", ATTR_PREVIEW_PREPARE,
+ Boolean.toString(mMANativeHelper.getGeneratePreview()));
- /*
- * {@inheritDoc}
- */
- public void export(String filename, int height, int bitrate, int audioCodec, int videoCodec,
- ExportProgressListener listener) throws IOException {
- }
+ serializer.attribute("", ATTR_REGENERATE_PCM,
+ Boolean.toString(mMANativeHelper.getAudioflag()));
- /*
- * {@inheritDoc}
- */
- public void generatePreview(MediaProcessingProgressListener listener) {
- // Generate all the needed transitions
- for (Transition transition : mTransitions) {
- if (!transition.isGenerated()) {
- transition.generate();
+ serializer.startTag("", TAG_MEDIA_ITEMS);
+ for (MediaItem mediaItem : mMediaItems) {
+ serializer.startTag("", TAG_MEDIA_ITEM);
+ serializer.attribute("", ATTR_ID, mediaItem.getId());
+ serializer.attribute("", ATTR_TYPE,
+ mediaItem.getClass().getSimpleName());
+ serializer.attribute("", ATTR_FILENAME, mediaItem.getFilename());
+ serializer.attribute("", ATTR_RENDERING_MODE, Integer.toString(
+ mediaItem.getRenderingMode()));
+ if (mediaItem instanceof MediaVideoItem) {
+ final MediaVideoItem mvi = (MediaVideoItem)mediaItem;
+ serializer
+ .attribute("", ATTR_BEGIN_TIME,
+ Long.toString(mvi.getBoundaryBeginTime()));
+ serializer.attribute("", ATTR_END_TIME,
+ Long.toString(mvi.getBoundaryEndTime()));
+ serializer.attribute("", ATTR_VOLUME,
+ Integer.toString(mvi.getVolume()));
+ serializer.attribute("", ATTR_MUTED,
+ Boolean.toString(mvi.isMuted()));
+ if (mvi.getAudioWaveformFilename() != null) {
+ serializer.attribute("", ATTR_AUDIO_WAVEFORM_FILENAME,
+ mvi.getAudioWaveformFilename());
+ }
+ } else if (mediaItem instanceof MediaImageItem) {
+ serializer.attribute("", ATTR_DURATION,
+ Long.toString(mediaItem.getTimelineDuration()));
}
+
+ final List<Overlay> overlays = mediaItem.getAllOverlays();
+ if (overlays.size() > 0) {
+ serializer.startTag("", TAG_OVERLAYS);
+ for (Overlay overlay : overlays) {
+ serializer.startTag("", TAG_OVERLAY);
+ serializer.attribute("", ATTR_ID, overlay.getId());
+ serializer.attribute("",
+ ATTR_TYPE, overlay.getClass().getSimpleName());
+ serializer.attribute("", ATTR_BEGIN_TIME,
+ Long.toString(overlay.getStartTime()));
+ serializer.attribute("", ATTR_DURATION,
+ Long.toString(overlay.getDuration()));
+ if (overlay instanceof OverlayFrame) {
+ final OverlayFrame overlayFrame = (OverlayFrame)overlay;
+ overlayFrame.save(getPath());
+ if (overlayFrame.getBitmapImageFileName() != null) {
+ serializer.attribute("", ATTR_FILENAME,
+ overlayFrame.getBitmapImageFileName());
+ }
+
+ if (overlayFrame.getFilename() != null) {
+ serializer.attribute("",
+ ATTR_OVERLAY_RGB_FILENAME,
+ overlayFrame.getFilename());
+ serializer.attribute("", ATTR_OVERLAY_FRAME_WIDTH,
+ Integer.toString(overlayFrame.getOverlayFrameWidth()));
+ serializer.attribute("", ATTR_OVERLAY_FRAME_HEIGHT,
+ Integer.toString(overlayFrame.getOverlayFrameHeight()));
+ }
+
+ }
+
+ /**
+ * Save the user attributes
+ */
+ serializer.startTag("", TAG_OVERLAY_USER_ATTRIBUTES);
+ final Map<String, String> userAttributes = overlay.getUserAttributes();
+ for (String name : userAttributes.keySet()) {
+ final String value = userAttributes.get(name);
+ if (value != null) {
+ serializer.attribute("", name, value);
+ }
+ }
+ serializer.endTag("", TAG_OVERLAY_USER_ATTRIBUTES);
+
+ serializer.endTag("", TAG_OVERLAY);
+ }
+ serializer.endTag("", TAG_OVERLAYS);
+ }
+
+ final List<Effect> effects = mediaItem.getAllEffects();
+ if (effects.size() > 0) {
+ serializer.startTag("", TAG_EFFECTS);
+ for (Effect effect : effects) {
+ serializer.startTag("", TAG_EFFECT);
+ serializer.attribute("", ATTR_ID, effect.getId());
+ serializer.attribute("",
+ ATTR_TYPE, effect.getClass().getSimpleName());
+ serializer.attribute("", ATTR_BEGIN_TIME,
+ Long.toString(effect.getStartTime()));
+ serializer.attribute("", ATTR_DURATION,
+ Long.toString(effect.getDuration()));
+ if (effect instanceof EffectColor) {
+ final EffectColor colorEffect = (EffectColor)effect;
+ serializer.attribute("", ATTR_COLOR_EFFECT_TYPE,
+ Integer.toString(colorEffect.getType()));
+ if (colorEffect.getType() == EffectColor.TYPE_COLOR ||
+ colorEffect.getType() == EffectColor.TYPE_GRADIENT) {
+ serializer.attribute("", ATTR_COLOR_EFFECT_VALUE,
+ Integer.toString(colorEffect.getColor()));
+ }
+ } else if (effect instanceof EffectKenBurns) {
+ final Rect startRect = ((EffectKenBurns)effect).getStartRect();
+ serializer.attribute("", ATTR_START_RECT_LEFT,
+ Integer.toString(startRect.left));
+ serializer.attribute("", ATTR_START_RECT_TOP,
+ Integer.toString(startRect.top));
+ serializer.attribute("", ATTR_START_RECT_RIGHT,
+ Integer.toString(startRect.right));
+ serializer.attribute("", ATTR_START_RECT_BOTTOM,
+ Integer.toString(startRect.bottom));
+
+ final Rect endRect = ((EffectKenBurns)effect).getEndRect();
+ serializer.attribute("", ATTR_END_RECT_LEFT,
+ Integer.toString(endRect.left));
+ serializer.attribute("", ATTR_END_RECT_TOP,
+ Integer.toString(endRect.top));
+ serializer.attribute("", ATTR_END_RECT_RIGHT,
+ Integer.toString(endRect.right));
+ serializer.attribute("", ATTR_END_RECT_BOTTOM,
+ Integer.toString(endRect.bottom));
+ final MediaItem mItem = effect.getMediaItem();
+ serializer.attribute("", ATTR_GENERATED_IMAGE_CLIP,
+ ((MediaImageItem)mItem).getGeneratedImageClip());
+ }
+
+ serializer.endTag("", TAG_EFFECT);
+ }
+ serializer.endTag("", TAG_EFFECTS);
+ }
+
+ serializer.endTag("", TAG_MEDIA_ITEM);
}
+ serializer.endTag("", TAG_MEDIA_ITEMS);
- // This is necessary because the user may had called setDuration on
- // MediaImageItems
- computeTimelineDuration();
- }
+ serializer.startTag("", TAG_TRANSITIONS);
- /*
- * {@inheritDoc}
- */
- public void release() {
- stopPreview();
- }
+ for (Transition transition : mTransitions) {
+ serializer.startTag("", TAG_TRANSITION);
+ serializer.attribute("", ATTR_ID, transition.getId());
+ serializer.attribute("", ATTR_TYPE,
+ transition.getClass().getSimpleName());
+ serializer.attribute("", ATTR_DURATION,
+ Long.toString(transition.getDuration()));
+ serializer.attribute("", ATTR_BEHAVIOR,
+ Integer.toString(transition.getBehavior()));
+ serializer.attribute("", ATTR_IS_TRANSITION_GENERATED,
+ Boolean.toString(transition.isGenerated()));
+ if (transition.isGenerated() == true) {
+ serializer.attribute("", ATTR_GENERATED_TRANSITION_CLIP,
+ transition.mFilename);
+ }
+ final MediaItem afterMediaItem = transition.getAfterMediaItem();
+ if (afterMediaItem != null) {
+ serializer.attribute("", ATTR_AFTER_MEDIA_ITEM_ID,
+ afterMediaItem.getId());
+ }
- /*
- * {@inheritDoc}
- */
- public long getDuration() {
- // Since MediaImageItem can change duration we need to compute the
- // duration here
- computeTimelineDuration();
- return mDurationMs;
- }
+ final MediaItem beforeMediaItem = transition.getBeforeMediaItem();
+ if (beforeMediaItem != null) {
+ serializer.attribute("", ATTR_BEFORE_MEDIA_ITEM_ID,
+ beforeMediaItem.getId());
+ }
- /*
- * {@inheritDoc}
- */
- public int getAspectRatio() {
- return mAspectRatio;
+ if (transition instanceof TransitionSliding) {
+ serializer.attribute("", ATTR_DIRECTION,
+ Integer.toString(((TransitionSliding)transition).getDirection()));
+ } else if (transition instanceof TransitionAlpha) {
+ TransitionAlpha ta = (TransitionAlpha)transition;
+ serializer.attribute("", ATTR_BLENDING,
+ Integer.toString(ta.getBlendingPercent()));
+ serializer.attribute("", ATTR_INVERT,
+ Boolean.toString(ta.isInvert()));
+ if (ta.getMaskFilename() != null) {
+ serializer.attribute("", ATTR_MASK, ta.getMaskFilename());
+ }
+ }
+ serializer.endTag("", TAG_TRANSITION);
+ }
+ serializer.endTag("", TAG_TRANSITIONS);
+ serializer.startTag("", TAG_AUDIO_TRACKS);
+ for (AudioTrack at : mAudioTracks) {
+ serializer.startTag("", TAG_AUDIO_TRACK);
+ serializer.attribute("", ATTR_ID, at.getId());
+ serializer.attribute("", ATTR_FILENAME, at.getFilename());
+ serializer.attribute("", ATTR_START_TIME,
+ Long.toString(at.getStartTime()));
+ serializer.attribute("", ATTR_BEGIN_TIME,
+ Long.toString(at.getBoundaryBeginTime()));
+ serializer.attribute("", ATTR_END_TIME,
+ Long.toString(at.getBoundaryEndTime()));
+ serializer.attribute("", ATTR_VOLUME,
+ Integer.toString(at.getVolume()));
+ serializer.attribute("", ATTR_DUCK_ENABLED,
+ Boolean.toString(at.isDuckingEnabled()));
+ serializer.attribute("", ATTR_DUCKED_TRACK_VOLUME,
+ Integer.toString(at.getDuckedTrackVolume()));
+ serializer.attribute("", ATTR_DUCK_THRESHOLD,
+ Integer.toString(at.getDuckingThreshhold()));
+ serializer.attribute("", ATTR_MUTED, Boolean.toString(at.isMuted()));
+ serializer.attribute("", ATTR_LOOP, Boolean.toString(at.isLooping()));
+ if (at.getAudioWaveformFilename() != null) {
+ serializer.attribute("", ATTR_AUDIO_WAVEFORM_FILENAME,
+ at.getAudioWaveformFilename());
+ }
+
+ serializer.endTag("", TAG_AUDIO_TRACK);
+ }
+ serializer.endTag("", TAG_AUDIO_TRACKS);
+
+ serializer.endTag("", TAG_PROJECT);
+ serializer.endDocument();
+
+ /**
+ * Save the metadata XML file
+ */
+ final FileOutputStream out = new FileOutputStream(new File(getPath(),
+ PROJECT_FILENAME));
+ out.write(writer.toString().getBytes());
+ out.flush();
+ out.close();
}
/*
@@ -1097,7 +1573,9 @@ public class VideoEditorImpl implements VideoEditor {
*/
public void setAspectRatio(int aspectRatio) {
mAspectRatio = aspectRatio;
-
+ /**
+ * Invalidate all transitions
+ */
for (Transition transition : mTransitions) {
transition.invalidate();
}
@@ -1106,53 +1584,83 @@ public class VideoEditorImpl implements VideoEditor {
/*
* {@inheritDoc}
*/
- public long renderPreviewFrame(SurfaceHolder surfaceHolder, long timeMs) {
- if (mPreviewThread != null) {
- throw new IllegalStateException("Previewing is in progress");
- }
- return timeMs;
- }
+ public void startPreview(SurfaceHolder surfaceHolder, long fromMs, long toMs,
+ boolean loop, int callbackAfterFrameCount,
+ PreviewProgressListener listener) {
- /*
- * {@inheritDoc}
- */
- public synchronized void startPreview(SurfaceHolder surfaceHolder, long fromMs, long toMs,
- boolean loop, int callbackAfterFrameCount, PreviewProgressListener listener) {
+ if (surfaceHolder == null) {
+ throw new IllegalArgumentException();
+ }
+ if (listener == null) {
+ throw new IllegalArgumentException();
+ }
if (fromMs >= mDurationMs) {
- return;
+ throw new IllegalArgumentException("requested time not correct");
+ }
+
+ if (fromMs < 0) {
+ throw new IllegalArgumentException("requested time not correct");
+ }
+
+ boolean semAcquireDone = false;
+ try{
+ mPreviewSemaphore.acquire();
+ semAcquireDone = true;
+ } catch (InterruptedException ex) {
+ Log.e("VideoEditorImpl", "Sem acquire NOT successful in startPreview");
+ }
+
+ if (semAcquireDone) {
+ Surface mSurface = surfaceHolder.getSurface();
+
+ if (mSurface == null) {
+ Log.e("VideoEditoeImpl",
+ "Surface could not be retrieved from surface holder"); throw new
+ RuntimeException();
+ }
+
+ if (mMediaItems.size() > 0) {
+ try {
+ mMANativeHelper.previewStoryBoard(mMediaItems, mTransitions,
+ mAudioTracks, null);
+ mMANativeHelper.doPreview(mSurface, fromMs, toMs, loop,
+ callbackAfterFrameCount, listener);
+ mPreviewInProgress = true;
+ } catch (IllegalArgumentException ex) {
+ mPreviewSemaphore.release();
+ Log.e("VideoEditorImpl", "Illegal Argument exception in do preview");
+ throw ex;
+ } catch (IllegalStateException ex) {
+ mPreviewSemaphore.release();
+ Log.e("VideoEditorImpl", "Illegal State exception in do preview");
+ throw ex;
+ } catch (RuntimeException ex) {
+ mPreviewSemaphore.release();
+ Log.e("VideoEditorImpl", "Runtime exception in do preview");
+ throw ex;
+ }
+ }
+ /**
+ * release on complete by calling stopPreview
+ */
}
- mPreviewThread = new PreviewThread(fromMs, toMs, loop, callbackAfterFrameCount, listener);
- mPreviewThread.start();
}
/*
* {@inheritDoc}
*/
- public synchronized long stopPreview() {
- final long stopTimeMs;
- if (mPreviewThread != null) {
- stopTimeMs = mPreviewThread.stopPreview();
- mPreviewThread = null;
- } else {
- stopTimeMs = 0;
+ public long stopPreview() {
+ if (mPreviewInProgress) {
+ long result = mMANativeHelper.stopPreview();
+ mPreviewInProgress = false;
+ /**
+ * release the sem acquired in startPreview
+ */
+ mPreviewSemaphore.release();
+ return result;
}
- return stopTimeMs;
- }
-
- /**
- * Compute the duration
- */
- private void computeTimelineDuration() {
- mDurationMs = 0;
- final int mediaItemsCount = mMediaItems.size();
- for (int i = 0; i < mediaItemsCount; i++) {
- final MediaItem mediaItem = mMediaItems.get(i);
- mDurationMs += mediaItem.getTimelineDuration();
- if (mediaItem.getEndTransition() != null) {
- if (i < mediaItemsCount - 1) {
- mDurationMs -= mediaItem.getEndTransition().getDuration();
- }
- }
+ else {
+ return 0;
}
}
@@ -1209,7 +1717,7 @@ public class VideoEditorImpl implements VideoEditor {
/**
* Remove the transition after this media item
*
- * @param index The media item index
+ * @param mediaItem The media item
*/
private void removeTransitionAfter(int index) {
final MediaItem mediaItem = mMediaItems.get(index);
@@ -1220,7 +1728,9 @@ public class VideoEditorImpl implements VideoEditor {
it.remove();
t.invalidate();
mediaItem.setEndTransition(null);
- // Invalidate the reference in the next media item
+ /**
+ * Invalidate the reference in the next media item
+ */
if (index < mMediaItems.size() - 1) {
mMediaItems.get(index + 1).setBeginTransition(null);
}
@@ -1228,4 +1738,82 @@ public class VideoEditorImpl implements VideoEditor {
}
}
}
+
+ /**
+ * Compute the duration
+ */
+ private void computeTimelineDuration() {
+ mDurationMs = 0;
+ final int mediaItemsCount = mMediaItems.size();
+ for (int i = 0; i < mediaItemsCount; i++) {
+ final MediaItem mediaItem = mMediaItems.get(i);
+ mDurationMs += mediaItem.getTimelineDuration();
+ if (mediaItem.getEndTransition() != null) {
+ if (i < mediaItemsCount - 1) {
+ mDurationMs -= mediaItem.getEndTransition().getDuration();
+ }
+ }
+ }
+ }
+
+ /*
+ * Generate the project thumbnail
+ */
+ private void generateProjectThumbnail() {
+ /*
+ * If a thumbnail already exists, then delete it first
+ */
+ if ((new File(mProjectPath + "/" + THUMBNAIL_FILENAME)).exists()) {
+ (new File(mProjectPath + "/" + THUMBNAIL_FILENAME)).delete();
+ }
+ /*
+ * Generate a new thumbnail for the project from first media Item
+ */
+ if (mMediaItems.size() > 0) {
+ MediaItem mI = mMediaItems.get(0);
+ /*
+ * Lets initialise the width for default aspect ratio i.e 16:9
+ */
+ int height = 480;
+ int width = 854;
+ switch (getAspectRatio()) {
+ case MediaProperties.ASPECT_RATIO_3_2:
+ width = 720;
+ break;
+ case MediaProperties.ASPECT_RATIO_4_3:
+ width = 640;
+ break;
+ case MediaProperties.ASPECT_RATIO_5_3:
+ width = 800;
+ break;
+ case MediaProperties.ASPECT_RATIO_11_9:
+ width = 586;
+ break;
+ case MediaProperties.ASPECT_RATIO_16_9:
+ case MediaProperties.ASPECT_RATIO_UNDEFINED:
+ break;
+ }
+
+ Bitmap projectBitmap = null;
+ try {
+ projectBitmap = mI.getThumbnail(width, height, 500);
+ } catch (IllegalArgumentException e) {
+ throw new IllegalArgumentException ("Illegal Argument Error creating project thumbnail");
+ } catch (IOException e) {
+ throw new IllegalArgumentException ("IO Error creating project thumbnail");
+ }
+ try {
+ FileOutputStream stream = new FileOutputStream(mProjectPath + "/"
+ + THUMBNAIL_FILENAME);
+ projectBitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
+ stream.flush();
+ stream.close();
+ } catch (IOException e) {
+
+ throw new IllegalArgumentException ("Error creating project thumbnail");
+ } finally {
+ projectBitmap.recycle();
+ }
+ }
+ }
}
diff --git a/media/java/android/media/videoeditor/WaveformData.java b/media/java/android/media/videoeditor/WaveformData.java
index 1b865cadda92..6c10e3c2d009 100644..100755
--- a/media/java/android/media/videoeditor/WaveformData.java
+++ b/media/java/android/media/videoeditor/WaveformData.java
@@ -14,24 +14,29 @@
* limitations under the License.
*/
+
package android.media.videoeditor;
+import java.io.File;
+import java.io.FileInputStream;
import java.io.IOException;
/**
* Class which describes the waveform data of an audio track. The gain values
- * represent the average gain for an audio frame. For audio codecs which do
- * not operate on a per frame bases (eg. ALAW, ULAW) a reasonable audio frame
+ * represent the average gain for an audio frame. For audio codecs which do not
+ * operate on a per frame bases (eg. ALAW, ULAW) a reasonable audio frame
* duration will be assumed (eg. 50ms).
* {@hide}
*/
public class WaveformData {
- // Instance variables
+ /*
+ * Instance variables
+ */
private final int mFrameDurationMs;
private final int mFramesCount;
private final short[] mGains;
- /**
+ /*
* This constructor shall not be used
*/
@SuppressWarnings("unused")
@@ -41,18 +46,74 @@ public class WaveformData {
mGains = null;
}
- /**
+ /*
* Constructor
*
* @param audioWaveformFilename The name of the audio waveform file
+ *
+ * The file format is as following:
+ * <ul>
+ * <li>first 4 bytes provide the number of samples for each value, as
+ * big-endian signed</li>
+ * <li>4 following bytes is the total number of values in the file, as
+ * big-endian signed</li>
+ * <li>then, all values follow as bytes</li>
+ * </ul>
+ *
+ * @throws IOException on failure of file input stream operations
+ * @throws IllegalArgumentException if audioWaveformFilename is null
*/
WaveformData(String audioWaveformFilename) throws IOException {
- // TODO: Read these values from the file
- mFrameDurationMs = 20;
- mFramesCount = 300000 / mFrameDurationMs;
- mGains = new short[mFramesCount];
- for (int i = 0; i < mFramesCount; i++) {
- mGains[i] = (short)((i * 5) % 256);
+
+ if (audioWaveformFilename == null) {
+ throw new IllegalArgumentException("WaveformData : filename is null");
+ }
+
+ FileInputStream audioGraphFileReadHandle = null;
+
+ try {
+ final File audioGraphFileContext = new File(audioWaveformFilename);
+
+ audioGraphFileReadHandle = new FileInputStream(audioGraphFileContext);
+ /*
+ * Read frame duration
+ */
+ final byte tempFrameDuration[] = new byte[4];
+
+ audioGraphFileReadHandle.read(tempFrameDuration, 0, 4);
+
+ int tempFrameDurationMs = 0;
+ int tempFramesCounter = 0;
+ for (int i = 0; i < 4; i++) {
+ tempFrameDurationMs = (tempFrameDurationMs << 8);
+ tempFrameDurationMs = (tempFrameDurationMs | (tempFrameDuration[i] & 0xff));
+ }
+ mFrameDurationMs = tempFrameDurationMs;
+
+ /*
+ * Read count
+ */
+ final byte tempFramesCount[] = new byte[4];
+
+ audioGraphFileReadHandle.read(tempFramesCount, 0, 4);
+ for (int i = 0; i < 4; i++) {
+ tempFramesCounter = (tempFramesCounter << 8);
+ tempFramesCounter = (tempFramesCounter | (tempFramesCount[i] & 0xff));
+ }
+ mFramesCount = tempFramesCounter;
+
+ /*
+ * Capture the graph values
+ */
+ mGains = new short[mFramesCount];
+
+ for (int i = 0; i < mFramesCount; i++) {
+ mGains[i] = (short)audioGraphFileReadHandle.read();
+ }
+ } finally {
+ if (audioGraphFileReadHandle != null) {
+ audioGraphFileReadHandle.close();
+ }
}
}
@@ -72,7 +133,7 @@ public class WaveformData {
/**
* @return The array of frame gains. The size of the array is the frames
- * count. The values of the frame gains range from 0 to 256.
+ * count. The values of the frame gains range from 0 to 255.
*/
public short[] getFrameGains() {
return mGains;
diff --git a/media/jni/mediaeditor/Android.mk b/media/jni/mediaeditor/Android.mk
new file mode 100755
index 000000000000..9d6e7d4073ca
--- /dev/null
+++ b/media/jni/mediaeditor/Android.mk
@@ -0,0 +1,92 @@
+#
+# Copyright (C) 2011 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ VideoEditorMain.cpp \
+ VideoEditorClasses.cpp \
+ VideoEditorOsal.cpp \
+ VideoEditorJava.cpp \
+ VideoEditorPropertiesMain.cpp \
+ VideoEditorThumbnailMain.cpp \
+ VideoBrowserMain.c
+
+LOCAL_C_INCLUDES += \
+ $(TOP)/frameworks/base/core/jni \
+ $(TOP)/frameworks/base/include \
+ $(TOP)/frameworks/base/include/media \
+ $(TOP)/frameworks/base/media/libmediaplayerservice \
+ $(TOP)/frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/base/media/libstagefright/include \
+ $(TOP)/frameworks/base/media/libstagefright/rtsp \
+ $(JNI_H_INCLUDE) \
+ $(call include-path-for, corecg graphics) \
+ $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include \
+ $(TOP)/external/opencore/android \
+ $(TOP)/vendor/qcom/proprietary/qdsp6/mm-core/omxcore/inc \
+ $(TOP)/frameworks/base/core/jni/mediaeditor \
+ $(TOP)/frameworks/media/libvideoeditor/vss/inc \
+ $(TOP)/frameworks/media/libvideoeditor/vss/common/inc \
+ $(TOP)/frameworks/media/libvideoeditor/vss/mcs/inc \
+ $(TOP)/frameworks/media/libvideoeditor/vss/stagefrightshells/inc \
+ $(TOP)/frameworks/media/libvideoeditor/lvpp \
+ $(TOP)/frameworks/media/libvideoeditor/osal/inc
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ libutils \
+ libandroid_runtime \
+ libnativehelper \
+ libmedia \
+ libbinder \
+ libstagefright \
+ libstagefright_omx \
+ libsurfaceflinger_client \
+ libvideoeditorplayer
+
+
+LOCAL_CFLAGS += \
+ -DUSE_STAGEFRIGHT_CODECS \
+ -DUSE_STAGEFRIGHT_AUDIODEC \
+ -DUSE_STAGEFRIGHT_VIDEODEC \
+ -DUSE_STAGEFRIGHT_AUDIOENC \
+ -DUSE_STAGEFRIGHT_VIDEOENC \
+ -DUSE_STAGEFRIGHT_READERS \
+ -DUSE_STAGEFRIGHT_3GPP_READER
+
+
+LOCAL_LDFLAGS += -fuse-ld=bfd
+
+LOCAL_STATIC_LIBRARIES := \
+ libvideoeditor_core \
+ libstagefright_color_conversion \
+ libvideoeditor_3gpwriter \
+ libvideoeditor_mcs \
+ libvideoeditor_videofilters \
+ libvideoeditor_stagefrightshells \
+ libvideoeditor_osal
+
+LOCAL_MODULE:= libvideoeditor_jni
+
+# Don't prelink this library. For more efficient code, you may want
+# to add this library to the prelink map and set this to true.
+LOCAL_PRELINK_MODULE := false
+
+LOCAL_MODULE_TAGS := eng development
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/jni/mediaeditor/VideoBrowserInternal.h b/media/jni/mediaeditor/VideoBrowserInternal.h
new file mode 100755
index 000000000000..ed63129ca3cd
--- /dev/null
+++ b/media/jni/mediaeditor/VideoBrowserInternal.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef VIDEO_BROWSER_INTERNAL_H
+#define VIDEO_BROWSER_INTERNAL_H
+
+#include "VideoBrowserMain.h"
+
+#include "M4READER_Common.h"
+#include "M4DECODER_Common.h"
+
+
+#define VIDEO_BROWSER_BGR565
+
+
+#define VIDEO_BROWSER_PREDECODE_TIME 2000 /* In miliseconds */
+
+/*---------------------------- MACROS ----------------------------*/
+#define CHECK_PTR(fct, p, err, errValue) \
+{ \
+ if (M4OSA_NULL == p) \
+ { \
+ err = errValue ; \
+ M4OSA_TRACE1_1("" #fct "(L%d): " #p " is NULL, returning " #errValue "", __LINE__) ; \
+ goto fct##_cleanUp; \
+ } \
+}
+
+#define CHECK_ERR(fct, err) \
+{ \
+ if (M4OSA_ERR_IS_ERROR(err)) \
+ { \
+ M4OSA_TRACE1_2("" #fct "(L%d): ERROR 0x%.8x returned", __LINE__,err) ; \
+ goto fct##_cleanUp; \
+ } \
+ else if (M4OSA_ERR_IS_WARNING(err)) \
+ { \
+ M4OSA_TRACE2_2("" #fct "(L%d): WARNING 0x%.8x returned", __LINE__,err) ; \
+ } \
+}
+
+#define CHECK_STATE(fct, state, pC) \
+{ \
+ if (state != pC->m_state) \
+ { \
+ M4OSA_TRACE1_1("" #fct " called in bad state %d", pC->m_state) ; \
+ err = M4ERR_STATE ; \
+ goto fct##_cleanUp; \
+ } \
+}
+
+#define SAFE_FREE(p) \
+{ \
+ if (M4OSA_NULL != p) \
+ { \
+ M4OSA_free((M4OSA_MemAddr32)p) ; \
+ p = M4OSA_NULL ; \
+ } \
+}
+
+/*--- Video Browser state ---*/
+typedef enum
+{
+ VideoBrowser_kVBCreating,
+ VideoBrowser_kVBOpened,
+ VideoBrowser_kVBBrowsing
+} VideoBrowser_videoBrowerState;
+
+
+/*--- Video Browser execution context. ---*/
+typedef struct
+{
+ VideoBrowser_videoBrowerState m_state ;
+ VideoBrowser_videoBrowerDrawMode m_drawmode;
+
+ M4OSA_Context g_hbmp2;
+ M4OSA_Context dc;
+ M4OSA_Int16* g_bmPixels2;
+
+ /*--- Reader parameters ---*/
+ M4OSA_FileReadPointer m_fileReadPtr;
+ M4READER_GlobalInterface* m_3gpReader ;
+ M4READER_DataInterface* m_3gpData ;
+ M4READER_MediaType m_mediaType ;
+ M4OSA_Context m_pReaderCtx ;
+
+ M4_StreamHandler* m_pStreamHandler ;
+ M4_AccessUnit m_accessUnit ;
+
+ /*--- Decoder parameters ---*/
+ M4DECODER_VideoInterface* m_pDecoder ;
+ M4OSA_Context m_pDecoderCtx ;
+
+ /*--- Common display parameters ---*/
+ M4OSA_UInt32 m_x ;
+ M4OSA_UInt32 m_y ;
+ M4VIFI_ImagePlane m_outputPlane[3] ;
+
+ /*--- Current browsing time ---*/
+ M4OSA_UInt32 m_currentCTS ;
+
+ /*--- Platform dependent display parameters ---*/
+ M4OSA_Context m_pCoreContext ;
+
+ /*--- Callback function settings ---*/
+ videoBrowser_Callback m_pfCallback;
+ M4OSA_Void* m_pCallbackUserData;
+
+ /*--- Codec Loader core context ---*/
+ M4OSA_Context m_pCodecLoaderContext;
+
+ /*--- Required color type ---*/
+ VideoBrowser_VideoColorType m_frameColorType;
+
+} VideoBrowserContext;
+
+#endif /* VIDEO_BROWSER_INTERNAL_H */
diff --git a/media/jni/mediaeditor/VideoBrowserMain.c b/media/jni/mediaeditor/VideoBrowserMain.c
new file mode 100755
index 000000000000..0d40f5638ec6
--- /dev/null
+++ b/media/jni/mediaeditor/VideoBrowserMain.c
@@ -0,0 +1,593 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "VideoEditorVideoDecoder.h"
+#include "VideoEditor3gpReader.h"
+
+#include <utils/Log.h>
+#include "VideoBrowserInternal.h"
+#include "LVOSA_FileReader_optim.h"
+
+//#define M4OSA_TRACE_LEVEL 1
+#if (M4OSA_TRACE_LEVEL >= 1)
+#undef M4OSA_TRACE1_0
+#undef M4OSA_TRACE1_1
+#undef M4OSA_TRACE1_2
+#undef M4OSA_TRACE1_3
+
+#define M4OSA_TRACE1_0(a) __android_log_print(ANDROID_LOG_INFO, "Thumbnail", a);
+#define M4OSA_TRACE1_1(a,b) __android_log_print(ANDROID_LOG_INFO, "Thumbnail", a,b);
+#define M4OSA_TRACE1_2(a,b,c) __android_log_print(ANDROID_LOG_INFO, "Thumbnail", a,b,c);
+#define M4OSA_TRACE1_3(a,b,c,d) __android_log_print(ANDROID_LOG_INFO, "Thumbnail", a,b,c,d);
+#endif
+
+/******************************************************************************
+ * M4OSA_ERR videoBrowserSetWindow(
+ * M4OSA_Context pContext, M4OSA_UInt32 x,
+ * M4OSA_UInt32 y, M4OSA_UInt32 dx, M4OSA_UInt32 dy);
+ * @brief This function sets the size and the position of the display.
+ * @param pContext (IN) : Video Browser context
+ * @param pPixelArray (IN) : Array to hold the video frame.
+ * @param x (IN) : Horizontal position of the top left
+ * corner
+ * @param y (IN) : Vertical position of the top left corner
+ * @param dx (IN) : Width of the display window
+ * @param dy (IN) : Height of the video window
+ * @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE / M4ERR_ALLOC
+ ******************************************************************************/
+M4OSA_ERR videoBrowserSetWindow(
+ M4OSA_Context pContext,
+ M4OSA_Int32 *pPixelArray,
+ M4OSA_UInt32 x, M4OSA_UInt32 y,
+ M4OSA_UInt32 dx, M4OSA_UInt32 dy)
+{
+ VideoBrowserContext* pC = (VideoBrowserContext*)pContext;
+ M4OSA_ERR err = M4NO_ERROR;
+
+ M4OSA_TRACE2_5("videoBrowserSetWindow: entering with 0x%x %d %d %d %d ",
+ pContext, x, y, dx, dy);
+
+ /*--- Sanity checks ---*/
+ CHECK_PTR(videoBrowserSetWindow, pContext, err, M4ERR_PARAMETER);
+ CHECK_PTR(videoBrowserSetWindow, pPixelArray, err, M4ERR_PARAMETER);
+ CHECK_STATE(videoBrowserSetWindow, VideoBrowser_kVBOpened, pC);
+
+ pC->m_outputPlane[0].u_topleft = 0;
+
+ pC->m_outputPlane[0].u_height = dy;
+ pC->m_outputPlane[0].u_width = dx;
+ pC->m_x = x;
+ pC->m_y = y;
+
+ if (pC->m_frameColorType == VideoBrowser_kGB565) {
+ pC->m_outputPlane[0].u_stride = pC->m_outputPlane[0].u_width << 1;
+ pC->m_outputPlane[0].pac_data = (M4OSA_UInt8*)M4OSA_malloc(
+ pC->m_outputPlane[0].u_stride * pC->m_outputPlane[0].u_height,
+ VIDEOBROWSER, (M4OSA_Char *)"output plane");
+
+ CHECK_PTR(videoBrowserSetWindow,
+ pC->m_outputPlane[0].pac_data, err, M4ERR_ALLOC);
+ }
+ else if (pC->m_frameColorType == VideoBrowser_kYUV420) {
+ pC->m_outputPlane[0].u_stride = pC->m_outputPlane[0].u_width;
+ pC->m_outputPlane[1].u_height = pC->m_outputPlane[0].u_height >> 1;
+ pC->m_outputPlane[1].u_width = pC->m_outputPlane[0].u_width >> 1;
+ pC->m_outputPlane[1].u_topleft = 0;
+ pC->m_outputPlane[1].u_stride = pC->m_outputPlane[1].u_width;
+
+ pC->m_outputPlane[2].u_height = pC->m_outputPlane[0].u_height >> 1;
+ pC->m_outputPlane[2].u_width = pC->m_outputPlane[0].u_width >> 1;
+ pC->m_outputPlane[2].u_topleft = 0;
+ pC->m_outputPlane[2].u_stride = pC->m_outputPlane[2].u_width;
+
+ pC->m_outputPlane[0].pac_data = (M4OSA_UInt8*)pPixelArray;
+
+ CHECK_PTR(videoBrowserSetWindow,
+ pC->m_outputPlane[0].pac_data, err, M4ERR_ALLOC);
+
+ pC->m_outputPlane[1].pac_data =
+ pC->m_outputPlane[0].pac_data +
+ (pC->m_outputPlane[0].u_stride * pC->m_outputPlane[0].u_height);
+
+ pC->m_outputPlane[2].pac_data =
+ pC->m_outputPlane[1].pac_data +
+ (pC->m_outputPlane[1].u_stride * pC->m_outputPlane[1].u_height);
+ }
+
+
+ M4OSA_TRACE2_0("videoBrowserSetWindow returned NO ERROR");
+ return M4NO_ERROR;
+
+videoBrowserSetWindow_cleanUp:
+
+ M4OSA_TRACE2_1("videoBrowserSetWindow returned 0x%x", err);
+ return err;
+}
+
+/******************************************************************************
+* @brief This function allocates the resources needed for browsing a video file
+* @param ppContext (OUT): Pointer on a context filled by this function.
+* @param pURL (IN) : Path of File to browse
+* @param DrawMode (IN) : Indicate which method is used to draw (Direct draw etc...)
+* @param pfCallback (IN) : Callback function to be called when a frame must be displayed
+* @param pCallbackData (IN) : User defined data that will be passed as parameter of the callback
+* @param clrType (IN) : Required color type.
+* @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE / M4ERR_ALLOC
+******************************************************************************/
+M4OSA_ERR videoBrowserCreate(
+ M4OSA_Context* ppContext,
+ M4OSA_Char* pURL,
+ M4OSA_UInt32 DrawMode,
+ M4OSA_FileReadPointer* ptrF,
+ videoBrowser_Callback pfCallback,
+ M4OSA_Void* pCallbackData,
+ VideoBrowser_VideoColorType clrType)
+{
+ VideoBrowserContext* pContext = M4OSA_NULL;
+ M4READER_MediaFamily mediaFamily = M4READER_kMediaFamilyUnknown;
+ M4_StreamHandler* pStreamHandler = M4OSA_NULL;
+ M4_VideoStreamHandler* pVideoStreamHandler = M4OSA_NULL;
+ M4DECODER_VideoType decoderType;
+ M4DECODER_OutputFilter FilterOption;
+
+ M4OSA_Bool deb = M4OSA_TRUE;
+ M4OSA_ERR err = M4NO_ERROR;
+
+ M4OSA_TRACE1_2(
+ "videoBrowserCreate: entering with 0x%x 0x%x", ppContext, pURL);
+
+ /*--- Sanity checks ---*/
+ CHECK_PTR(videoBrowserCreate, ppContext, err, M4ERR_PARAMETER);
+ *ppContext = M4OSA_NULL ;
+ CHECK_PTR(videoBrowserCreate, pURL, err, M4ERR_PARAMETER);
+
+ /*--- Create context ---*/
+ pContext = (VideoBrowserContext*)M4OSA_malloc(
+ sizeof(VideoBrowserContext),
+ VIDEOBROWSER, (M4OSA_Char*)"Video browser context");
+
+ CHECK_PTR(videoBrowserCreate, pContext,err, M4ERR_ALLOC);
+ M4OSA_memset((M4OSA_MemAddr8)pContext, sizeof(VideoBrowserContext), 0);
+
+ /*--- Initialize the context parameters ---*/
+ pContext->m_state = VideoBrowser_kVBCreating ;
+ pContext->m_frameColorType = clrType;
+
+ /*--- Copy the file reader functions ---*/
+ M4OSA_memcpy((M4OSA_MemAddr8)&pContext->m_fileReadPtr,
+ (M4OSA_MemAddr8)ptrF,
+ sizeof(M4OSA_FileReadPointer)) ;
+
+ /* PR#SP00013 DGR bug 13 : first frame is not visible */
+ pContext->m_drawmode = DrawMode;
+
+
+ /* Retrieve the 3gp reader interface */
+ VideoEditor3gpReader_getInterface(&pContext->m_mediaType,
+ &pContext->m_3gpReader, &pContext->m_3gpData);
+
+ CHECK_PTR(videoBrowserCreate, pContext->m_3gpReader, err, M4ERR_ALLOC);
+ CHECK_PTR(videoBrowserCreate, pContext->m_3gpData, err, M4ERR_ALLOC);
+
+ /*--- Create the file reader ---*/
+ err = pContext->m_3gpReader->m_pFctCreate(&pContext->m_pReaderCtx);
+ CHECK_ERR(videoBrowserCreate, err);
+ CHECK_PTR(videoBrowserCreate, pContext->m_pReaderCtx, err, M4ERR_ALLOC);
+ pContext->m_3gpData->m_readerContext = pContext->m_pReaderCtx;
+
+ /*--- Set the OSAL file reader functions ---*/
+ err = pContext->m_3gpReader->m_pFctSetOption(
+ pContext->m_pReaderCtx,
+ M4READER_kOptionID_SetOsaFileReaderFctsPtr,
+ (M4OSA_DataOption)(&pContext->m_fileReadPtr));
+
+ CHECK_ERR(videoBrowserCreate, err) ;
+
+ /*--- Open the file ---*/
+ err = pContext->m_3gpReader->m_pFctOpen(pContext->m_pReaderCtx, pURL);
+ CHECK_ERR(videoBrowserCreate, err) ;
+
+ /*--- Try to find a video stream ---*/
+ while (err == M4NO_ERROR)
+ {
+ err = pContext->m_3gpReader->m_pFctGetNextStream(
+ pContext->m_pReaderCtx, &mediaFamily, &pStreamHandler);
+
+ /*in case we found a bifs stream or something else...*/
+ if ((err == M4ERR_READER_UNKNOWN_STREAM_TYPE) ||
+ (err == M4WAR_TOO_MUCH_STREAMS))
+ {
+ err = M4NO_ERROR;
+ continue;
+ }
+
+ if (err != M4WAR_NO_MORE_STREAM)
+ {
+ if (M4READER_kMediaFamilyVideo != mediaFamily)
+ {
+ err = M4NO_ERROR;
+ continue;
+ }
+
+ pContext->m_pStreamHandler = pStreamHandler;
+
+ err = pContext->m_3gpReader->m_pFctReset(
+ pContext->m_pReaderCtx, pContext->m_pStreamHandler);
+
+ CHECK_ERR(videoBrowserCreate, err);
+
+ err = pContext->m_3gpReader->m_pFctFillAuStruct(
+ pContext->m_pReaderCtx,
+ pContext->m_pStreamHandler,
+ &pContext->m_accessUnit);
+
+ CHECK_ERR(videoBrowserCreate, err);
+
+ pVideoStreamHandler =
+ (M4_VideoStreamHandler*)pContext->m_pStreamHandler;
+
+ switch (pContext->m_pStreamHandler->m_streamType)
+ {
+ case M4DA_StreamTypeVideoMpeg4:
+ case M4DA_StreamTypeVideoH263:
+ {
+ pContext->m_pCodecLoaderContext = M4OSA_NULL;
+ decoderType = M4DECODER_kVideoTypeMPEG4;
+
+ err = VideoEditorVideoDecoder_getInterface_MPEG4(
+ &decoderType, &pContext->m_pDecoder);
+
+ CHECK_ERR(videoBrowserCreate, err) ;
+
+ err = pContext->m_pDecoder->m_pFctCreate(
+ &pContext->m_pDecoderCtx,
+ pContext->m_pStreamHandler,
+ pContext->m_3gpData,
+ &pContext->m_accessUnit,
+ pContext->m_pCodecLoaderContext) ;
+
+ CHECK_ERR(videoBrowserCreate, err) ;
+ }
+ break;
+
+ case M4DA_StreamTypeVideoMpeg4Avc:
+ {
+ pContext->m_pCodecLoaderContext = M4OSA_NULL;
+
+ decoderType = M4DECODER_kVideoTypeAVC;
+ err = VideoEditorVideoDecoder_getInterface_H264(
+ &decoderType, &pContext->m_pDecoder);
+ CHECK_ERR(videoBrowserCreate, err) ;
+
+ err = pContext->m_pDecoder->m_pFctCreate(
+ &pContext->m_pDecoderCtx,
+ pContext->m_pStreamHandler,
+ pContext->m_3gpData,
+ &pContext->m_accessUnit,
+ pContext->m_pCodecLoaderContext) ;
+
+ CHECK_ERR(videoBrowserCreate, err) ;
+ }
+ break;
+
+ default:
+ err = M4ERR_VB_MEDIATYPE_NOT_SUPPORTED;
+ goto videoBrowserCreate_cleanUp;
+ }
+ }
+ }
+
+ if (err == M4WAR_NO_MORE_STREAM)
+ {
+ err = M4NO_ERROR ;
+ }
+
+ if (M4OSA_NULL == pContext->m_pStreamHandler)
+ {
+ err = M4ERR_VB_NO_VIDEO ;
+ goto videoBrowserCreate_cleanUp ;
+ }
+
+ err = pContext->m_pDecoder->m_pFctSetOption(
+ pContext->m_pDecoderCtx,
+ M4DECODER_kOptionID_DeblockingFilter,
+ (M4OSA_DataOption)&deb);
+
+ if (err == M4WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED)
+ {
+ err = M4NO_ERROR;
+ }
+ CHECK_ERR(videoBrowserCreate, err);
+
+ FilterOption.m_pFilterUserData = M4OSA_NULL;
+
+
+ if (pContext->m_frameColorType == VideoBrowser_kGB565) {
+ FilterOption.m_pFilterFunction =
+ (M4OSA_Void*)M4VIFI_ResizeBilinearYUV420toBGR565;
+ }
+ else if (pContext->m_frameColorType == VideoBrowser_kYUV420) {
+ FilterOption.m_pFilterFunction =
+ (M4OSA_Void*)M4VIFI_ResizeBilinearYUV420toYUV420;
+ }
+ else {
+ err = M4ERR_PARAMETER;
+ goto videoBrowserCreate_cleanUp;
+ }
+
+ err = pContext->m_pDecoder->m_pFctSetOption(
+ pContext->m_pDecoderCtx,
+ M4DECODER_kOptionID_OutputFilter,
+ (M4OSA_DataOption)&FilterOption);
+
+ CHECK_ERR(videoBrowserCreate, err);
+
+ /* store the callback details */
+ pContext->m_pfCallback = pfCallback;
+ pContext->m_pCallbackUserData = pCallbackData;
+ /* store the callback details */
+
+ pContext->m_state = VideoBrowser_kVBOpened;
+ *ppContext = pContext;
+
+ M4OSA_TRACE1_0("videoBrowserCreate returned NO ERROR");
+ return M4NO_ERROR;
+
+videoBrowserCreate_cleanUp:
+
+ if (M4OSA_NULL != pContext)
+ {
+ if (M4OSA_NULL != pContext->m_pDecoderCtx)
+ {
+ pContext->m_pDecoder->m_pFctDestroy(pContext->m_pDecoderCtx);
+ pContext->m_pDecoderCtx = M4OSA_NULL;
+ }
+
+ if (M4OSA_NULL != pContext->m_pReaderCtx)
+ {
+ pContext->m_3gpReader->m_pFctClose(pContext->m_pReaderCtx);
+ pContext->m_3gpReader->m_pFctDestroy(pContext->m_pReaderCtx);
+ pContext->m_pReaderCtx = M4OSA_NULL;
+ }
+ SAFE_FREE(pContext->m_pDecoder);
+ SAFE_FREE(pContext->m_3gpReader);
+ SAFE_FREE(pContext->m_3gpData);
+ SAFE_FREE(pContext);
+ }
+
+ M4OSA_TRACE2_1("videoBrowserCreate returned 0x%x", err);
+ return err;
+}
+
+/******************************************************************************
+* M4OSA_ERR videoBrowserCleanUp(M4OSA_Context pContext);
+* @brief This function frees the resources needed for browsing a
+* video file.
+* @param pContext (IN) : Video browser context
+* @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE
+******************************************************************************/
+M4OSA_ERR videoBrowserCleanUp(M4OSA_Context pContext)
+{
+ VideoBrowserContext* pC = (VideoBrowserContext*)pContext;
+ M4OSA_ERR err = M4NO_ERROR;
+
+ M4OSA_TRACE2_1("videoBrowserCleanUp: entering with 0x%x", pContext);
+
+ /*--- Sanity checks ---*/
+ CHECK_PTR(videoBrowserCleanUp, pContext, err, M4ERR_PARAMETER);
+
+ if (M4OSA_NULL != pC->m_pDecoderCtx)
+ {
+ pC->m_pDecoder->m_pFctDestroy(pC->m_pDecoderCtx);
+ pC->m_pDecoderCtx = M4OSA_NULL ;
+ }
+
+ if (M4OSA_NULL != pC->m_pReaderCtx)
+ {
+ pC->m_3gpReader->m_pFctClose(pC->m_pReaderCtx) ;
+ pC->m_3gpReader->m_pFctDestroy(pC->m_pReaderCtx);
+ pC->m_pReaderCtx = M4OSA_NULL;
+ }
+
+ SAFE_FREE(pC->m_pDecoder);
+ SAFE_FREE(pC->m_3gpReader);
+ SAFE_FREE(pC->m_3gpData);
+
+ if (pC->m_frameColorType != VideoBrowser_kYUV420) {
+ SAFE_FREE(pC->m_outputPlane[0].pac_data);
+ }
+ SAFE_FREE(pC);
+
+ M4OSA_TRACE2_0("videoBrowserCleanUp returned NO ERROR");
+ return M4NO_ERROR;
+
+videoBrowserCleanUp_cleanUp:
+
+ M4OSA_TRACE2_1("videoBrowserCleanUp returned 0x%x", err);
+ return err;
+}
+/******************************************************************************
+* M4OSA_ERR videoBrowserPrepareFrame(
+* M4OSA_Context pContext, M4OSA_UInt32* pTime);
+* @brief This function prepares the frame.
+* @param pContext (IN) : Video browser context
+* @param pTime (IN/OUT) : Pointer on the time to reach. Updated
+* by this function with the reached time
+* @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE / M4ERR_ALLOC
+******************************************************************************/
+M4OSA_ERR videoBrowserPrepareFrame(M4OSA_Context pContext, M4OSA_UInt32* pTime)
+{
+ VideoBrowserContext* pC = (VideoBrowserContext*)pContext;
+ M4OSA_ERR err = M4NO_ERROR;
+ M4OSA_UInt32 targetTime = 0;
+ M4OSA_UInt32 jumpTime = 0;
+ M4_MediaTime timeMS = 0;
+ M4OSA_Int32 rapTime = 0;
+ M4OSA_Bool isBackward = M4OSA_FALSE;
+ M4OSA_Bool bJumpNeeded = M4OSA_FALSE;
+
+
+ /*--- Sanity checks ---*/
+ CHECK_PTR(videoBrowserPrepareFrame, pContext, err, M4ERR_PARAMETER);
+ CHECK_PTR(videoBrowserPrepareFrame, pTime, err, M4ERR_PARAMETER);
+
+ targetTime = *pTime ;
+
+ /*--- Check the state, if this is the first call to this function
+ we move to the state "browsing" ---*/
+ if (VideoBrowser_kVBOpened == pC->m_state)
+ {
+ pC->m_state = VideoBrowser_kVBBrowsing;
+ }
+ else if (VideoBrowser_kVBBrowsing != pC->m_state)
+ {
+ err = M4ERR_STATE ;
+ goto videoBrowserPrepareFrame_cleanUp;
+ }
+
+ /*--- Check the duration ---*/
+ /*--- If we jump backward, we need to jump ---*/
+ if (targetTime < pC->m_currentCTS)
+ {
+ isBackward = M4OSA_TRUE;
+ bJumpNeeded = M4OSA_TRUE;
+ }
+ /*--- If we jumpt to a time greater than "currentTime" + "predecodeTime"
+ we need to jump ---*/
+ else if (targetTime > (pC->m_currentCTS + VIDEO_BROWSER_PREDECODE_TIME))
+ {
+ bJumpNeeded = M4OSA_TRUE;
+ }
+
+ if (M4OSA_TRUE == bJumpNeeded)
+ {
+ rapTime = targetTime;
+ /*--- Retrieve the previous RAP time ---*/
+ err = pC->m_3gpReader->m_pFctGetPrevRapTime(
+ pC->m_pReaderCtx, pC->m_pStreamHandler, &rapTime);
+
+ CHECK_ERR(videoBrowserPrepareFrame, err);
+
+ jumpTime = rapTime;
+
+ err = pC->m_3gpReader->m_pFctJump(pC->m_pReaderCtx,
+ pC->m_pStreamHandler,
+ (M4OSA_Int32*)&jumpTime);
+ CHECK_ERR(videoBrowserPrepareFrame, err);
+ }
+
+ timeMS = (M4_MediaTime)targetTime;
+ err = pC->m_pDecoder->m_pFctDecode(
+ pC->m_pDecoderCtx, &timeMS, bJumpNeeded);
+
+ if ((err != M4NO_ERROR) && (err != M4WAR_NO_MORE_AU))
+ {
+ return err;
+ }
+
+ // FIXME:
+ // Not sure that I understand why we need a second jump logic here
+ if ((timeMS >= pC->m_currentCTS) && (M4OSA_TRUE == isBackward))
+ {
+ jumpTime = rapTime;
+ err = pC->m_3gpReader->m_pFctJump(
+ pC->m_pReaderCtx, pC->m_pStreamHandler, (M4OSA_Int32*)&jumpTime);
+
+ CHECK_ERR(videoBrowserPrepareFrame, err);
+
+ timeMS = (M4_MediaTime)rapTime;
+ err = pC->m_pDecoder->m_pFctDecode(
+ pC->m_pDecoderCtx, &timeMS, M4OSA_TRUE);
+
+ if ((err != M4NO_ERROR) && (err != M4WAR_NO_MORE_AU))
+ {
+ return err;
+ }
+ }
+
+ err = pC->m_pDecoder->m_pFctRender(
+ pC->m_pDecoderCtx, &timeMS, pC->m_outputPlane, M4OSA_TRUE);
+
+ if (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)
+ {
+ err = M4NO_ERROR;
+ }
+ CHECK_ERR(videoBrowserPrepareFrame, err) ;
+
+ pC->m_currentCTS = (M4OSA_UInt32)timeMS;
+
+ *pTime = pC->m_currentCTS;
+
+ return M4NO_ERROR;
+
+videoBrowserPrepareFrame_cleanUp:
+
+ if ((M4WAR_INVALID_TIME == err) || (M4WAR_NO_MORE_AU == err))
+ {
+ err = M4NO_ERROR;
+ }
+ else if (M4OSA_NULL != pC)
+ {
+ pC->m_currentCTS = 0;
+ }
+
+ M4OSA_TRACE2_1("videoBrowserPrepareFrame returned 0x%x", err);
+ return err;
+}
+
+/******************************************************************************
+* M4OSA_ERR videoBrowserDisplayCurrentFrame(M4OSA_Context pContext);
+* @brief This function displays the current frame.
+* @param pContext (IN) : Video browser context
+* @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE / M4ERR_ALLOC
+******************************************************************************/
+M4OSA_ERR videoBrowserDisplayCurrentFrame(M4OSA_Context pContext)
+{
+ VideoBrowserContext* pC = (VideoBrowserContext*)pContext ;
+ M4OSA_ERR err = M4NO_ERROR ;
+
+ /*--- Sanity checks ---*/
+ CHECK_PTR(videoBrowserDisplayCurrentFrame, pContext, err, M4ERR_PARAMETER);
+
+ // Request display of the frame
+ pC->m_pfCallback((M4OSA_Context) pC, // VB context
+ VIDEOBROWSER_DISPLAY_FRAME, // action requested
+ M4NO_ERROR, // error code
+ (M4OSA_Void*) &(pC->m_outputPlane[0]), // image to be displayed
+ (M4OSA_Void*) pC->m_pCallbackUserData); // user-provided data
+
+#ifdef DUMPTOFILE
+ {
+ M4OSA_Context fileContext;
+ M4OSA_Char* fileName = "/sdcard/textBuffer_RGB565.rgb";
+ M4OSA_fileWriteOpen(&fileContext, (M4OSA_Void*) fileName,
+ M4OSA_kFileWrite | M4OSA_kFileCreate);
+
+ M4OSA_fileWriteData(fileContext,
+ (M4OSA_MemAddr8) pC->m_outputPlane[0].pac_data,
+ pC->m_outputPlane[0].u_height*pC->m_outputPlane[0].u_width*2);
+
+ M4OSA_fileWriteClose(fileContext);
+ }
+#endif
+
+ M4OSA_TRACE2_0("videoBrowserDisplayCurrentFrame returned NO ERROR") ;
+ return M4NO_ERROR;
+
+videoBrowserDisplayCurrentFrame_cleanUp:
+
+ M4OSA_TRACE2_1("videoBrowserDisplayCurrentFrame returned 0x%x", err) ;
+ return err;
+}
diff --git a/media/jni/mediaeditor/VideoBrowserMain.h b/media/jni/mediaeditor/VideoBrowserMain.h
new file mode 100755
index 000000000000..5156ebb2ff50
--- /dev/null
+++ b/media/jni/mediaeditor/VideoBrowserMain.h
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef VIDEO_BROWSER_MAIN_H
+#define VIDEO_BROWSER_MAIN_H
+
+/**
+ ************************************************************************
+ * @file VideoBrowserMain.h
+ * @brief Video browser Interface functions
+ ************************************************************************
+*/
+
+#define VIDEOBROWSER 0x423
+
+#include "M4OSA_Memory.h"
+#include "M4OSA_CharStar.h"
+#include "M4OSA_OptionID.h"
+#include "M4OSA_Debug.h"
+#include "M4VIFI_FiltersAPI.h"
+#include "M4OSA_FileReader.h"
+
+
+/**
+ ************************************************************************
+ * @brief Error codes definition.
+ * @note These value are the Browser engine specific error codes.
+ ************************************************************************
+*/
+#define M4ERR_VB_MEDIATYPE_NOT_SUPPORTED M4OSA_ERR_CREATE(M4_ERR, VIDEOBROWSER, 0x01)
+#define M4ERR_VB_NO_VIDEO M4OSA_ERR_CREATE(M4_ERR, VIDEOBROWSER, 0x02)
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Video Browser draw mode, extension for angle based bliting can be done
+ */
+typedef enum
+{
+ VideoBrowser_kVBNormalBliting
+} VideoBrowser_videoBrowerDrawMode;
+
+
+/*--- Video Browser output frame color type ---*/
+typedef enum
+{
+ VideoBrowser_kYUV420,
+ VideoBrowser_kGB565
+} VideoBrowser_VideoColorType;
+
+/**
+ ************************************************************************
+ * enumeration VideoBrowser_Notification
+ * @brief Video Browser notification type.
+ * @note This callback mechanism must be used to wait the completion of an asynchronous
+ * operation, before calling another API function.
+ ************************************************************************
+*/
+typedef enum
+{
+ /**
+ * A frame is ready to be displayed, it should be displayed in the callback function
+ * pCbData type = M4VIFI_ImagePlane*
+ */
+ VIDEOBROWSER_DISPLAY_FRAME = 0x00000001,
+ VIDEOBROWSER_NOTIFICATION_NONE = 0xffffffff
+}VideoBrowser_Notification;
+
+
+/**
+ ************************************************************************
+ * @brief videoBrowser_Callback type definition
+ * @param pInstance (IN) Video Browser context.
+ * @param notificationID (IN) Id of the callback which generated the error
+ * @param errCode (IN) Error code from the core
+ * @param pCbData (IN) pointer to data associated wit the callback.
+ * @param pCbUserData (IN) pointer to application user data passed in init.
+ * @note This callback mechanism is used to request display of an image
+ ************************************************************************
+*/
+typedef M4OSA_Void (*videoBrowser_Callback) (M4OSA_Context pInstance,
+ VideoBrowser_Notification notificationID,
+ M4OSA_ERR errCode,
+ M4OSA_Void* pCbData,
+ M4OSA_Void* pCallbackUserData);
+
+
+/******************************************************************************
+* @brief This function allocates the resources needed for browsing a video file.
+* @param ppContext (OUT): Pointer on a context filled by this function.
+* @param pURL (IN) : Path of File to browse
+* @param DrawMode (IN) : Indicate which method is used to draw (Direct draw etc...)
+* @param pfCallback (IN) : Callback function to be called when a frame must be displayed
+* @param pCallbackData (IN) : User defined data that will be passed as parameter of the callback
+* @param clrType (IN) : Required color type.
+* @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE / M4ERR_ALLOC
+******************************************************************************/
+M4OSA_ERR videoBrowserCreate(M4OSA_Context* ppContext, M4OSA_Char* pURL,
+ M4OSA_UInt32 DrawMode,
+ M4OSA_FileReadPointer* ptrF,
+ videoBrowser_Callback pfCallback,
+ M4OSA_Void* pCallbackData,
+ VideoBrowser_VideoColorType clrType);
+
+/******************************************************************************
+* @brief This function frees the resources needed for browsing a video file.
+* @param pContext (IN) : Video browser context
+* @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE
+******************************************************************************/
+M4OSA_ERR videoBrowserCleanUp(M4OSA_Context pContext) ;
+
+
+/******************************************************************************
+* @brief This function allocates the resources needed for browsing a video file.
+* @param pContext (IN) : Video browser context
+* @param pTime (IN/OUT) : Pointer on the time to reach. Updated by
+* this function with the reached time
+* @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE / M4ERR_ALLOC
+******************************************************************************/
+M4OSA_ERR videoBrowserPrepareFrame(M4OSA_Context pContext, M4OSA_UInt32* pTime);
+
+/******************************************************************************
+* @brief This function sets the size and the position of the display.
+* @param pContext (IN) : Video Browser context
+* @param pixelArray (IN) : Array to hold the video frame.
+* @param x (IN) : Horizontal position of the top left corner
+* @param y (IN) : Vertical position of the top left corner
+* @param dx (IN) : Width of the display window
+* @param dy (IN) : Height of the video window
+* @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE / M4ERR_ALLOC
+******************************************************************************/
+M4OSA_ERR videoBrowserSetWindow(M4OSA_Context pContext, M4OSA_Int32* pixelArray,
+ M4OSA_UInt32 x, M4OSA_UInt32 y,
+ M4OSA_UInt32 dx, M4OSA_UInt32 dy);
+
+/******************************************************************************
+* @brief This function displays the current frame.
+* @param pContext (IN) : Video browser context
+* @return M4NO_ERROR / M4ERR_PARAMETER / M4ERR_STATE / M4ERR_ALLOC
+******************************************************************************/
+M4OSA_ERR videoBrowserDisplayCurrentFrame(M4OSA_Context pContext);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* VIDEO_BROWSER_MAIN_H */
diff --git a/media/jni/mediaeditor/VideoEditorClasses.cpp b/media/jni/mediaeditor/VideoEditorClasses.cpp
new file mode 100755
index 000000000000..52e032a2a882
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorClasses.cpp
@@ -0,0 +1,3174 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <VideoEditorClasses.h>
+#include <VideoEditorJava.h>
+#include <VideoEditorLogging.h>
+#include <VideoEditorOsal.h>
+
+extern "C" {
+#include <M4OSA_Clock.h>
+#include <M4OSA_CharStar.h>
+#include <M4OSA_FileCommon.h>
+#include <M4OSA_FileReader.h>
+#include <M4OSA_FileWriter.h>
+#include <M4OSA_Memory.h>
+#include <M4OSA_Debug.h>
+#include <M4OSA_String.h>
+#include <M4OSA_Thread.h>
+#include <M4VSS3GPP_API.h>
+#include <M4xVSS_API.h>
+#include <M4VSS3GPP_ErrorCodes.h>
+#include <M4MCS_ErrorCodes.h>
+#include <M4READER_Common.h>
+#include <M4WRITER_common.h>
+#include <M4DECODER_Common.h>
+};
+
+#define VIDEOEDIT_PROP_JAVA_RESULT_STRING_MAX (128)
+
+#define VIDEOEDIT_JAVA__RESULT_STRING_MAX (128)
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(AudioEffect)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NONE", M4VSS3GPP_kAudioEffectType_None),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FADE_IN", M4VSS3GPP_kAudioEffectType_FadeIn),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FADE_OUT", M4VSS3GPP_kAudioEffectType_FadeOut)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(AudioEffect, AUDIO_EFFECT_CLASS_NAME, M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(AudioFormat)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NO_AUDIO", M4VIDEOEDITING_kNoneAudio),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("AMR_NB", M4VIDEOEDITING_kAMR_NB),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("AAC", M4VIDEOEDITING_kAAC),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("AAC_PLUS", M4VIDEOEDITING_kAACplus),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ENHANCED_AAC_PLUS", M4VIDEOEDITING_keAACplus),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MP3", M4VIDEOEDITING_kMP3),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("EVRC", M4VIDEOEDITING_kEVRC),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("PCM", M4VIDEOEDITING_kPCM),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NULL_AUDIO", M4VIDEOEDITING_kNullAudio),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("UNSUPPORTED_AUDIO", M4VIDEOEDITING_kUnsupportedAudio)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(AudioFormat, AUDIO_FORMAT_CLASS_NAME, M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(AudioSamplingFrequency)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FREQ_DEFAULT", M4VIDEOEDITING_kDefault_ASF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FREQ_8000", M4VIDEOEDITING_k8000_ASF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FREQ_16000", M4VIDEOEDITING_k16000_ASF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FREQ_22050", M4VIDEOEDITING_k22050_ASF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FREQ_24000", M4VIDEOEDITING_k24000_ASF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FREQ_32000", M4VIDEOEDITING_k32000_ASF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FREQ_44100", M4VIDEOEDITING_k44100_ASF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FREQ_48000", M4VIDEOEDITING_k48000_ASF)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(AudioSamplingFrequency,AUDIO_SAMPLING_FREQUENCY_CLASS_NAME,
+ M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(AudioTransition)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NONE", M4VSS3GPP_kAudioTransitionType_None),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("CROSS_FADE", M4VSS3GPP_kAudioTransitionType_CrossFade)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(AudioTransition, AUDIO_TRANSITION_CLASS_NAME, M4OSA_NULL,
+ M4OSA_NULL)
+
+
+static const char*
+videoEditClasses_getUnknownBitrateString(int bitrate)
+{
+ static char string[VIDEOEDIT_JAVA__RESULT_STRING_MAX] = "";
+
+ M4OSA_chrSPrintf((M4OSA_Char *)string, sizeof(string) - 1, (M4OSA_Char*)"%d", bitrate);
+
+ // Return the bitrate string.
+ return(string);
+}
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(Bitrate)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("VARIABLE", M4VIDEOEDITING_kVARIABLE_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("UNDEFINED", M4VIDEOEDITING_kUndefinedBitrate),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_9_2_KBPS", M4VIDEOEDITING_k9_2_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_12_2_KBPS", M4VIDEOEDITING_k12_2_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_16_KBPS", M4VIDEOEDITING_k16_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_24_KBPS", M4VIDEOEDITING_k24_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_32_KBPS", M4VIDEOEDITING_k32_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_48_KBPS", M4VIDEOEDITING_k48_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_64_KBPS", M4VIDEOEDITING_k64_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_96_KBPS", M4VIDEOEDITING_k96_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_128_KBPS", M4VIDEOEDITING_k128_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_192_KBPS", M4VIDEOEDITING_k192_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_256_KBPS", M4VIDEOEDITING_k256_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_288_KBPS", M4VIDEOEDITING_k288_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_384_KBPS", M4VIDEOEDITING_k384_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_512_KBPS", M4VIDEOEDITING_k512_KBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_800_KBPS", M4VIDEOEDITING_k800_KBPS),
+/*+ New Encoder bitrates */
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_2_MBPS", M4VIDEOEDITING_k2_MBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_5_MBPS", M4VIDEOEDITING_k5_MBPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BR_8_MBPS", M4VIDEOEDITING_k8_MBPS)
+/*- New Encoder bitrates */
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(Bitrate, BITRATE_CLASS_NAME,
+ videoEditClasses_getUnknownBitrateString, videoEditClasses_getUnknownBitrateString)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(ClipType)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("THREE_GPP", M4VIDEOEDITING_kFileType_3GPP),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MP4", M4VIDEOEDITING_kFileType_MP4),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("AMR", M4VIDEOEDITING_kFileType_AMR),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MP3", M4VIDEOEDITING_kFileType_MP3),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("PCM", M4VIDEOEDITING_kFileType_PCM),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("JPG", M4VIDEOEDITING_kFileType_JPG),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("UNSUPPORTED", M4VIDEOEDITING_kFileType_Unsupported)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(ClipType, FILE_TYPE_CLASS_NAME, M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(Engine)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("TASK_LOADING_SETTINGS", TASK_LOADING_SETTINGS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("TASK_ENCODING", TASK_ENCODING)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(Engine, MANUAL_EDIT_ENGINE_CLASS_NAME, M4OSA_NULL,
+ M4OSA_NULL)
+
+
+static const char*
+videoEditClasses_getUnknownErrorName(int error)
+{
+ static char string[VIDEOEDIT_JAVA__RESULT_STRING_MAX] = "ERR_INTERNAL";
+
+ // Format the unknown error string.
+ M4OSA_chrSPrintf((M4OSA_Char *)string, sizeof(string) - 1, (M4OSA_Char*)"ERR_INTERNAL(%s)",
+ videoEditOsal_getResultString(error));
+
+ // Return the error string.
+ return(string);
+}
+
+static const char*
+videoEditClasses_getUnknownErrorString(int error)
+{
+ // Return the result string.
+ return(videoEditOsal_getResultString(error));
+}
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(Error)
+{
+ // M4OSA_Clock.h
+ VIDEOEDIT_JAVA_CONSTANT_INIT("WAR_TIMESCALE_TOO_BIG", \
+ M4WAR_TIMESCALE_TOO_BIG ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_CLOCK_BAD_REF_YEAR", \
+ M4ERR_CLOCK_BAD_REF_YEAR ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_FILE_NOT_FOUND", \
+ M4ERR_FILE_NOT_FOUND ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("WAR_TRANSCODING_NECESSARY", \
+ M4VSS3GPP_WAR_TRANSCODING_NECESSARY ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("WAR_MAX_OUTPUT_SIZE_EXCEEDED", \
+ M4VSS3GPP_WAR_OUTPUTFILESIZE_EXCEED ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_BUFFER_OUT_TOO_SMALL", \
+ M4xVSSWAR_BUFFER_OUT_TOO_SMALL ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_NOMORE_SPACE_FOR_FILE", \
+ M4xVSSERR_NO_MORE_SPACE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_FILE_TYPE", \
+ M4VSS3GPP_ERR_INVALID_FILE_TYPE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_EFFECT_KIND", \
+ M4VSS3GPP_ERR_INVALID_EFFECT_KIND ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_VIDEO_EFFECT_TYPE", \
+ M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_AUDIO_EFFECT_TYPE", \
+ M4VSS3GPP_ERR_INVALID_AUDIO_EFFECT_TYPE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_VIDEO_TRANSITION_TYPE", \
+ M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_AUDIO_TRANSITION_TYPE", \
+ M4VSS3GPP_ERR_INVALID_AUDIO_TRANSITION_TYPE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_VIDEO_ENCODING_FRAME_RATE", \
+ M4VSS3GPP_ERR_INVALID_VIDEO_ENCODING_FRAME_RATE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_EXTERNAL_EFFECT_NULL", \
+ M4VSS3GPP_ERR_EXTERNAL_EFFECT_NULL ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_EXTERNAL_TRANSITION_NULL", \
+ M4VSS3GPP_ERR_EXTERNAL_TRANSITION_NULL ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_BEGIN_CUT_LARGER_THAN_DURATION", \
+ M4VSS3GPP_ERR_BEGIN_CUT_LARGER_THAN_DURATION ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_BEGIN_CUT_LARGER_THAN_END_CUT", \
+ M4VSS3GPP_ERR_BEGIN_CUT_LARGER_THAN_END_CUT ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_OVERLAPPING_TRANSITIONS", \
+ M4VSS3GPP_ERR_OVERLAPPING_TRANSITIONS ),
+#ifdef M4VSS3GPP_ERR_ANALYSIS_DATA_SIZE_TOO_SMALL
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_ANALYSIS_DATA_SIZE_TOO_SMALL", \
+ M4VSS3GPP_ERR_ANALYSIS_DATA_SIZE_TOO_SMALL ),
+#endif
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_3GPP_FILE", \
+ M4VSS3GPP_ERR_INVALID_3GPP_FILE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT", \
+ M4VSS3GPP_ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT", \
+ M4VSS3GPP_ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_AMR_EDITING_UNSUPPORTED", \
+ M4VSS3GPP_ERR_AMR_EDITING_UNSUPPORTED ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INPUT_VIDEO_AU_TOO_LARGE", \
+ M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INPUT_AUDIO_AU_TOO_LARGE", \
+ M4VSS3GPP_ERR_INPUT_AUDIO_AU_TOO_LARGE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INPUT_AUDIO_CORRUPTED_AU", \
+ M4VSS3GPP_ERR_INPUT_AUDIO_CORRUPTED_AU ),
+#ifdef M4VSS3GPP_ERR_INPUT_AUDIO_CORRUPTED_AMR_AU
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INPUT_AUDIO_CORRUPTED_AU", \
+ M4VSS3GPP_ERR_INPUT_AUDIO_CORRUPTED_AMR_AU ),
+#endif
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_ENCODER_ACCES_UNIT_ERROR", \
+ M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT", \
+ M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_EDITING_UNSUPPORTED_H263_PROFILE", \
+ M4VSS3GPP_ERR_EDITING_UNSUPPORTED_H263_PROFILE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE", \
+ M4VSS3GPP_ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_EDITING_UNSUPPORTED_MPEG4_RVLC", \
+ M4VSS3GPP_ERR_EDITING_UNSUPPORTED_MPEG4_RVLC ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT", \
+ M4VSS3GPP_ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE", \
+ M4VSS3GPP_ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE",\
+ M4VSS3GPP_ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_CLIP_ANALYSIS_VERSION", \
+ M4VSS3GPP_ERR_INVALID_CLIP_ANALYSIS_VERSION ),
+#ifdef M4VSS3GPP_ERR_INVALID_CLIP_ANALYSIS_PLATFORM
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INVALID_CLIP_ANALYSIS_PLATFORM", \
+ M4VSS3GPP_ERR_INVALID_CLIP_ANALYSIS_PLATFORM ),
+#endif
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INCOMPATIBLE_VIDEO_FORMAT", \
+ M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_FORMAT ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE", \
+ M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INCOMPATIBLE_VIDEO_TIME_SCALE", \
+ M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_TIME_SCALE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING", \
+ M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_UNSUPPORTED_MP3_ASSEMBLY", \
+ M4VSS3GPP_ERR_UNSUPPORTED_MP3_ASSEMBLY ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_NO_SUPPORTED_STREAM_IN_FILE", \
+ M4VSS3GPP_ERR_NO_SUPPORTED_STREAM_IN_FILE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_ADDVOLUME_EQUALS_ZERO", \
+ M4VSS3GPP_ERR_ADDVOLUME_EQUALS_ZERO ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION", \
+ M4VSS3GPP_ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT", \
+ M4VSS3GPP_ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_UNSUPPORTED_ADDED_AUDIO_STREAM", \
+ M4VSS3GPP_ERR_UNSUPPORTED_ADDED_AUDIO_STREAM ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_AUDIO_MIXING_UNSUPPORTED", \
+ M4VSS3GPP_ERR_AUDIO_MIXING_UNSUPPORTED ),
+#ifdef M4VSS3GPP_ERR_AUDIO_MIXING_MP3_UNSUPPORTED
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_AUDIO_MIXING_MP3_UNSUPPORTED", \
+ M4VSS3GPP_ERR_AUDIO_MIXING_MP3_UNSUPPORTED ),
+#endif
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK", \
+ M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK ),
+#ifdef M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_AAC
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_FEATURE_UNSUPPORTED_WITH_AAC", \
+ M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_AAC ),
+#endif
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_AUDIO_CANNOT_BE_MIXED", \
+ M4VSS3GPP_ERR_AUDIO_CANNOT_BE_MIXED ),
+#ifdef M4VSS3GPP_ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED", \
+ M4VSS3GPP_ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED ),
+#endif
+#ifdef M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_EVRC
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_FEATURE_UNSUPPORTED_WITH_EVRC", \
+ M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_EVRC ),
+#endif
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_H263_PROFILE_NOT_SUPPORTED", \
+ M4VSS3GPP_ERR_H263_PROFILE_NOT_SUPPORTED ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE", \
+ M4VSS3GPP_ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE ),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ERR_INTERNAL", \
+ M4NO_ERROR ),
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(Error, ERROR_CLASS_NAME,
+ videoEditClasses_getUnknownErrorName, videoEditClasses_getUnknownErrorString)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(FileType)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("THREE_GPP", VideoEditClasses_kFileType_3GPP),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MP4", VideoEditClasses_kFileType_MP4),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("AMR", VideoEditClasses_kFileType_AMR),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MP3", VideoEditClasses_kFileType_MP3),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("PCM", VideoEditClasses_kFileType_PCM),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("JPG", VideoEditClasses_kFileType_JPG),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("GIF", VideoEditClasses_kFileType_GIF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("PNG", VideoEditClasses_kFileType_PNG),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("UNSUPPORTED", VideoEditClasses_kFileType_Unsupported)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(FileType, FILE_TYPE_CLASS_NAME, M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(MediaRendering)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("RESIZING", M4xVSS_kResizing),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("CROPPING", M4xVSS_kCropping),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BLACK_BORDERS", M4xVSS_kBlackBorders)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(MediaRendering, MEDIA_RENDERING_CLASS_NAME,
+ M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(SlideDirection)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("RIGHT_OUT_LEFT_IN", M4xVSS_SlideTransition_RightOutLeftIn),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("LEFT_OUT_RIGTH_IN", M4xVSS_SlideTransition_LeftOutRightIn),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("TOP_OUT_BOTTOM_IN", M4xVSS_SlideTransition_TopOutBottomIn),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BOTTOM_OUT_TOP_IN", M4xVSS_SlideTransition_BottomOutTopIn)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(SlideDirection, SLIDE_DIRECTION_CLASS_NAME,
+ M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(TransitionBehaviour)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("SPEED_UP", M4VSS3GPP_TransitionBehaviour_SpeedUp),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("LINEAR", M4VSS3GPP_TransitionBehaviour_Linear),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("SPEED_DOWN", M4VSS3GPP_TransitionBehaviour_SpeedDown),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("SLOW_MIDDLE", M4VSS3GPP_TransitionBehaviour_SlowMiddle),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FAST_MIDDLE", M4VSS3GPP_TransitionBehaviour_FastMiddle)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(TransitionBehaviour, TRANSITION_BEHAVIOUR_CLASS_NAME,
+ M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(VideoEffect)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NONE", M4VSS3GPP_kVideoEffectType_None),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FADE_FROM_BLACK", M4VSS3GPP_kVideoEffectType_FadeFromBlack),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("CURTAIN_OPENING", M4VSS3GPP_kVideoEffectType_CurtainOpening),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FADE_TO_BLACK", M4VSS3GPP_kVideoEffectType_FadeToBlack),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("CURTAIN_CLOSING", M4VSS3GPP_kVideoEffectType_CurtainClosing),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("EXTERNAL", M4VSS3GPP_kVideoEffectType_External),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("BLACK_AND_WHITE", M4xVSS_kVideoEffectType_BlackAndWhite),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("PINK", M4xVSS_kVideoEffectType_Pink),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("GREEN", M4xVSS_kVideoEffectType_Green),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("SEPIA", M4xVSS_kVideoEffectType_Sepia),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NEGATIVE", M4xVSS_kVideoEffectType_Negative),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FRAMING", M4xVSS_kVideoEffectType_Framing),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("TEXT", M4xVSS_kVideoEffectType_Text),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ZOOM_IN", M4xVSS_kVideoEffectType_ZoomIn),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ZOOM_OUT", M4xVSS_kVideoEffectType_ZoomOut),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FIFTIES", M4xVSS_kVideoEffectType_Fifties),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("COLORRGB16", M4xVSS_kVideoEffectType_ColorRGB16),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("GRADIENT", M4xVSS_kVideoEffectType_Gradient),
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(VideoEffect, VIDEO_EFFECT_CLASS_NAME, M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(VideoFormat)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NO_VIDEO", M4VIDEOEDITING_kNoneVideo),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H263", M4VIDEOEDITING_kH263),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MPEG4", M4VIDEOEDITING_kMPEG4),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MPEG4_EMP", M4VIDEOEDITING_kMPEG4_EMP),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264", M4VIDEOEDITING_kH264),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NULL_VIDEO", M4VIDEOEDITING_kNullVideo),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("UNSUPPORTED", M4VIDEOEDITING_kUnsupportedVideo),
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(VideoFormat, VIDEO_FORMAT_CLASS_NAME, M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(VideoFrameRate)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FR_5_FPS", M4VIDEOEDITING_k5_FPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FR_7_5_FPS", M4VIDEOEDITING_k7_5_FPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FR_10_FPS", M4VIDEOEDITING_k10_FPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FR_12_5_FPS", M4VIDEOEDITING_k12_5_FPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FR_15_FPS", M4VIDEOEDITING_k15_FPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FR_20_FPS", M4VIDEOEDITING_k20_FPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FR_25_FPS", M4VIDEOEDITING_k25_FPS),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FR_30_FPS", M4VIDEOEDITING_k30_FPS)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(VideoFrameRate, VIDEO_FRAME_RATE_CLASS_NAME,
+ M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(VideoFrameSize)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("SQCIF", M4VIDEOEDITING_kSQCIF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("QQVGA", M4VIDEOEDITING_kQQVGA),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("QCIF", M4VIDEOEDITING_kQCIF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("QVGA", M4VIDEOEDITING_kQVGA),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("CIF", M4VIDEOEDITING_kCIF),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("VGA", M4VIDEOEDITING_kVGA),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("WVGA", M4VIDEOEDITING_kWVGA),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NTSC", M4VIDEOEDITING_kNTSC),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("nHD", M4VIDEOEDITING_k640_360),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("WVGA16x9", M4VIDEOEDITING_k854_480),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("V720p", M4VIDEOEDITING_kHD1280),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("W720p", M4VIDEOEDITING_kHD1080),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("S720p", M4VIDEOEDITING_kHD960)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(VideoFrameSize, VIDEO_FRAME_SIZE_CLASS_NAME,
+ M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(VideoProfile)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MPEG4_SP_LEVEL_0", \
+ M4VIDEOEDITING_kMPEG4_SP_Level_0),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MPEG4_SP_LEVEL_0B", \
+ M4VIDEOEDITING_kMPEG4_SP_Level_0b),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MPEG4_SP_LEVEL_1", \
+ M4VIDEOEDITING_kMPEG4_SP_Level_1),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MPEG4_SP_LEVEL_2", \
+ M4VIDEOEDITING_kMPEG4_SP_Level_2),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MPEG4_SP_LEVEL_3", \
+ M4VIDEOEDITING_kMPEG4_SP_Level_3),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MPEG4_SP_LEVEL_4A", \
+ M4VIDEOEDITING_kMPEG4_SP_Level_4a),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("MPEG4_SP_LEVEL_5", \
+ M4VIDEOEDITING_kMPEG4_SP_Level_5),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H263_PROFILE_0_LEVEL_10",\
+ M4VIDEOEDITING_kH263_Profile_0_Level_10),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H263_PROFILE_0_LEVEL_20",\
+ M4VIDEOEDITING_kH263_Profile_0_Level_20),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H263_PROFILE_0_LEVEL_30",\
+ M4VIDEOEDITING_kH263_Profile_0_Level_30),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H263_PROFILE_0_LEVEL_40",\
+ M4VIDEOEDITING_kH263_Profile_0_Level_40),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H263_PROFILE_0_LEVEL_45",\
+ M4VIDEOEDITING_kH263_Profile_0_Level_45),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_1", \
+ M4VIDEOEDITING_kH264_Profile_0_Level_1),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_1b",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_1b),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_1_1",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_1_1),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_1_2",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_1_2),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_1_3",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_1_3),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_2", \
+ M4VIDEOEDITING_kH264_Profile_0_Level_2),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_2_1",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_2_1),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_2_2",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_2_2),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_3", \
+ M4VIDEOEDITING_kH264_Profile_0_Level_3),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_3_1",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_3_1),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_3_2",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_3_2),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_4", \
+ M4VIDEOEDITING_kH264_Profile_0_Level_4),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_4_1",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_4_1),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_4_2",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_4_2),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_5", \
+ M4VIDEOEDITING_kH264_Profile_0_Level_5),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("H264_PROFILE_0_LEVEL_5_1",\
+ M4VIDEOEDITING_kH264_Profile_0_Level_5_1),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("OUT_OF_RANGE", \
+ M4VIDEOEDITING_kProfile_and_Level_Out_Of_Range)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(VideoProfile, VIDEO_PROFILE_CLASS_NAME, M4OSA_NULL,
+ M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANTS(VideoTransition)
+{
+ VIDEOEDIT_JAVA_CONSTANT_INIT("NONE", M4VSS3GPP_kVideoTransitionType_None),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("CROSS_FADE", M4VSS3GPP_kVideoTransitionType_CrossFade),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("EXTERNAL", M4VSS3GPP_kVideoTransitionType_External),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("ALPHA_MAGIC", M4xVSS_kVideoTransitionType_AlphaMagic),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("SLIDE_TRANSITION", M4xVSS_kVideoTransitionType_SlideTransition),
+ VIDEOEDIT_JAVA_CONSTANT_INIT("FADE_BLACK", M4xVSS_kVideoTransitionType_FadeBlack)
+};
+
+VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS(VideoTransition, VIDEO_TRANSITION_CLASS_NAME,
+ M4OSA_NULL, M4OSA_NULL)
+
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(AlphaMagic)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("file", "Ljava/lang/String;"),
+ VIDEOEDIT_JAVA_FIELD_INIT("blendingPercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("invertRotation", "Z" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("rgbWidth", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("rgbHeight", "I" )
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(AlphaMagic, ALPHA_MAGIC_SETTINGS_CLASS_NAME)
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(Properties)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("duration", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("fileType", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoFormat", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoDuration", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoBitrate", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("width", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("height", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("averageFrameRate", "F"),
+ VIDEOEDIT_JAVA_FIELD_INIT("profileAndLevel", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioFormat", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioDuration", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioBitrate", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioChannels", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioSamplingFrequency", "I")
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(Properties, PROPERTIES_CLASS_NAME)
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(BackgroundMusic)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("file", "Ljava/lang/String;"),
+ VIDEOEDIT_JAVA_FIELD_INIT("fileType", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("insertionTime", "J" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("volumePercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("beginLoop", "J" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("endLoop", "J" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("enableDucking", "Z" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("duckingThreshold","I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("lowVolume", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("isLooping", "Z" )
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(BackgroundMusic, BACKGROUND_MUSIC_SETTINGS_CLASS_NAME)
+
+/*
+VIDEOEDIT_JAVA_DEFINE_FIELDS(BestEditSettings)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("videoFormat", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoFrameSize", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioFormat", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioChannels", "I")
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(BestEditSettings, BEST_EDIT_SETTINGS_CLASS_NAME)
+*/
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(ClipSettings)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("clipPath", "Ljava/lang/String;"),
+ VIDEOEDIT_JAVA_FIELD_INIT("fileType", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("beginCutTime", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("endCutTime", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("beginCutPercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("endCutPercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("panZoomEnabled", "Z" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("panZoomPercentStart", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("panZoomTopLeftXStart", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("panZoomTopLeftYStart", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("panZoomPercentEnd", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("panZoomTopLeftXEnd", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("panZoomTopLeftYEnd", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("mediaRendering", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("rgbWidth", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("rgbHeight", "I" )
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(ClipSettings, CLIP_SETTINGS_CLASS_NAME)
+
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(EditSettings)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("clipSettingsArray", "[L"CLIP_SETTINGS_CLASS_NAME";" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("transitionSettingsArray", "[L"TRANSITION_SETTINGS_CLASS_NAME";" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("effectSettingsArray", "[L"EFFECT_SETTINGS_CLASS_NAME";" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoFrameRate", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("outputFile", "Ljava/lang/String;" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoFrameSize", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoFormat", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioFormat", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioSamplingFreq", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("maxFileSize", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioChannels", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoBitrate", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioBitrate", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("backgroundMusicSettings",\
+ "L"BACKGROUND_MUSIC_SETTINGS_CLASS_NAME";"),
+ VIDEOEDIT_JAVA_FIELD_INIT("primaryTrackVolume", "I" )
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(EditSettings, EDIT_SETTINGS_CLASS_NAME)
+
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(EffectSettings)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("startTime", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("duration", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoEffectType", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioEffectType", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("startPercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("durationPercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("framingFile", "Ljava/lang/String;"),
+ VIDEOEDIT_JAVA_FIELD_INIT("framingBuffer", "[I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("bitmapType", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("width", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("height", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("topLeftX", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("topLeftY", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("framingResize", "Z" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("framingScaledSize", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("text", "Ljava/lang/String;"),
+ VIDEOEDIT_JAVA_FIELD_INIT("textRenderingData", "Ljava/lang/String;"),
+ VIDEOEDIT_JAVA_FIELD_INIT("textBufferWidth", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("textBufferHeight", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("fiftiesFrameRate", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("rgb16InputColor", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("alphaBlendingStartPercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("alphaBlendingMiddlePercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("alphaBlendingEndPercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("alphaBlendingFadeInTimePercent", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("alphaBlendingFadeOutTimePercent", "I" )
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(EffectSettings, EFFECT_SETTINGS_CLASS_NAME)
+
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(Engine)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("mManualEditContext", "I")
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(Engine, MANUAL_EDIT_ENGINE_CLASS_NAME)
+
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(SlideTransitionSettings)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("direction", "I")
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(SlideTransitionSettings, SLIDE_TRANSITION_SETTINGS_CLASS_NAME)
+
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(TransitionSettings)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("duration", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("videoTransitionType", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("audioTransitionType", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("transitionBehaviour", "I" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("alphaSettings", "L"ALPHA_MAGIC_SETTINGS_CLASS_NAME";" ),
+ VIDEOEDIT_JAVA_FIELD_INIT("slideSettings", "L"SLIDE_TRANSITION_SETTINGS_CLASS_NAME";")
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(TransitionSettings, TRANSITION_SETTINGS_CLASS_NAME)
+
+
+VIDEOEDIT_JAVA_DEFINE_FIELDS(Version)
+{
+ VIDEOEDIT_JAVA_FIELD_INIT("major", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("minor", "I"),
+ VIDEOEDIT_JAVA_FIELD_INIT("revision", "I")
+};
+
+VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(Version, VERSION_CLASS_NAME)
+
+
+VIDEOEDIT_JAVA_DEFINE_METHODS(Engine)
+{
+ VIDEOEDIT_JAVA_METHOD_INIT("onProgressUpdate", "(II)V")
+};
+
+VIDEOEDIT_JAVA_DEFINE_METHOD_CLASS(Engine, MANUAL_EDIT_ENGINE_CLASS_NAME)
+
+
+static const char*
+videoEditClasses_getBrandString(M4OSA_UInt32 brand)
+{
+ static char brandString[11] = "0x00000000";
+ const char* pBrandString = M4OSA_NULL;
+ M4OSA_UInt8* pBrand = (M4OSA_UInt8*)&brand;
+ M4OSA_UInt32 brandHost = 0;
+
+ // Convert the brand from big endian to host.
+ brandHost = pBrand[0];
+ brandHost = brandHost << 8;
+ brandHost += pBrand[1];
+ brandHost = brandHost << 8;
+ brandHost += pBrand[2];
+ brandHost = brandHost << 8;
+ brandHost += pBrand[3];
+
+ switch (brandHost)
+ {
+ case M4VIDEOEDITING_BRAND_0000:
+ pBrandString = "0000";
+ break;
+ case M4VIDEOEDITING_BRAND_3G2A:
+ pBrandString = "3G2A";
+ break;
+ case M4VIDEOEDITING_BRAND_3GP4:
+ pBrandString = "3GP4";
+ break;
+ case M4VIDEOEDITING_BRAND_3GP5:
+ pBrandString = "3GP5";
+ break;
+ case M4VIDEOEDITING_BRAND_3GP6:
+ pBrandString = "3GP6";
+ break;
+ case M4VIDEOEDITING_BRAND_AVC1:
+ pBrandString = "AVC1";
+ break;
+ case M4VIDEOEDITING_BRAND_EMP:
+ pBrandString = "EMP";
+ break;
+ case M4VIDEOEDITING_BRAND_ISOM:
+ pBrandString = "ISOM";
+ break;
+ case M4VIDEOEDITING_BRAND_MP41:
+ pBrandString = "MP41";
+ break;
+ case M4VIDEOEDITING_BRAND_MP42:
+ pBrandString = "MP42";
+ break;
+ case M4VIDEOEDITING_BRAND_VFJ1:
+ pBrandString = "VFJ1";
+ break;
+ default:
+ M4OSA_chrSPrintf((M4OSA_Char *)brandString,
+ sizeof(brandString) - 1,
+ (M4OSA_Char*)"0x%08X", brandHost);
+ pBrandString = brandString;
+ break;
+ }
+
+ // Return the brand string.
+ return(pBrandString);
+}
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+static void
+videoEditClasses_logFtypBox(
+ M4VIDEOEDITING_FtypBox* pBox,
+ int indentation)
+{
+ // Check if memory was allocated for the FtypBox.
+ if (M4OSA_NULL != pBox)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c major_brand: %s", indentation, ' ',
+ videoEditClasses_getBrandString(pBox->major_brand));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c minor_version: %08X", indentation, ' ',
+ (unsigned int)pBox->minor_version);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c nbCompatibleBrands: %u", indentation, ' ',
+ (unsigned int)pBox->nbCompatibleBrands);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c compatible_brands:", indentation, ' ');
+ indentation += VIDEOEDIT_LOG_INDENTATION;
+ for (int i = 0; (i < (int)pBox->nbCompatibleBrands) &&\
+ (i < M4VIDEOEDITING_MAX_COMPATIBLE_BRANDS); i++)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c compatible_brand[%d]: %s", indentation, ' ',
+ i, videoEditClasses_getBrandString(pBox->compatible_brands[i]));
+ }
+ indentation -= VIDEOEDIT_LOG_INDENTATION;
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c <null>",
+ indentation, ' ');
+ }
+}
+#endif
+
+
+void
+videoEditClasses_init(
+ bool* pResult,
+ JNIEnv* pEnv)
+{
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",\
+ "videoEditClasses_init()");
+
+ // Initialize the constants.
+ videoEditJava_initAudioEffectConstants(pResult, pEnv);
+ videoEditJava_initAudioFormatConstants(pResult, pEnv);
+ videoEditJava_initAudioSamplingFrequencyConstants(pResult, pEnv);
+ videoEditJava_initAudioTransitionConstants(pResult, pEnv);
+ videoEditJava_initBitrateConstants(pResult, pEnv);
+ videoEditJava_initClipTypeConstants(pResult, pEnv);
+ videoEditJava_initEngineConstants(pResult, pEnv);
+ videoEditJava_initErrorConstants(pResult, pEnv);
+ videoEditJava_initFileTypeConstants(pResult, pEnv);
+ videoEditJava_initMediaRenderingConstants(pResult, pEnv);
+ videoEditJava_initSlideDirectionConstants(pResult, pEnv);
+ videoEditJava_initTransitionBehaviourConstants(pResult, pEnv);
+ videoEditJava_initVideoEffectConstants(pResult, pEnv);
+ videoEditJava_initVideoFormatConstants(pResult, pEnv);
+ videoEditJava_initVideoFrameRateConstants(pResult, pEnv);
+ videoEditJava_initVideoFrameSizeConstants(pResult, pEnv);
+ videoEditJava_initVideoProfileConstants(pResult, pEnv);
+ videoEditJava_initVideoTransitionConstants(pResult, pEnv);
+
+ // Initialize the fields.
+ videoEditJava_initAlphaMagicFields(pResult, pEnv);
+ videoEditJava_initBackgroundMusicFields(pResult, pEnv);
+ videoEditJava_initClipSettingsFields(pResult, pEnv);
+ videoEditJava_initEditSettingsFields(pResult, pEnv);
+ videoEditJava_initEffectSettingsFields(pResult, pEnv);
+ videoEditJava_initEngineFields(pResult, pEnv);
+ videoEditJava_initSlideTransitionSettingsFields(pResult, pEnv);
+ videoEditJava_initTransitionSettingsFields(pResult, pEnv);
+ videoEditJava_initVersionFields(pResult, pEnv);
+ // Initialize the methods.
+ videoEditJava_initEngineMethods(pResult, pEnv);
+ }
+}
+
+void
+videoEditPropClass_init(
+ bool* pResult,
+ JNIEnv* pEnv)
+{
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",\
+ "videoEditPropClass_init()");
+
+ // Initialize the constants.
+ videoEditJava_initAudioFormatConstants(pResult, pEnv);
+ videoEditJava_initErrorConstants(pResult, pEnv);
+ videoEditJava_initFileTypeConstants(pResult, pEnv);
+ videoEditJava_initVideoFormatConstants(pResult, pEnv);
+ videoEditJava_initVideoProfileConstants(pResult, pEnv);
+
+ // Initialize the fields.
+ videoEditJava_initPropertiesFields(pResult, pEnv);
+ }
+}
+
+void
+videoEditClasses_getAlphaMagicSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4xVSS_AlphaMagicSettings** ppSettings)
+{
+ VideoEditJava_AlphaMagicFieldIds fieldIds = {NULL, NULL, NULL, NULL, NULL};
+ M4xVSS_AlphaMagicSettings* pSettings = M4OSA_NULL;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_getAlphaMagicSettings()");
+
+ // Retrieve the field ids.
+ videoEditJava_getAlphaMagicFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Only validate the AlphaMagicSettings if the fields could be located.
+ if (*pResult)
+ {
+ // Check if the clip is set.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (NULL == object),
+ "alphaSettings is null");
+ }
+
+ // Only retrieve the AlphaMagicSettings if the fields could be located and validated.
+ if (*pResult)
+ {
+ // Allocate memory for the AlphaMagicSettings.
+ pSettings = (M4xVSS_AlphaMagicSettings*)videoEditOsal_alloc(pResult, pEnv,
+ sizeof(M4xVSS_AlphaMagicSettings), "AlphaMagicSettings");
+
+ // Check if memory could be allocated for the AlphaMagicSettings.
+ if (*pResult)
+ {
+ // Set the alpha magic file path (JPG file).
+ pSettings->pAlphaFilePath = (M4OSA_Char*)videoEditJava_getString(pResult, pEnv, object,
+ fieldIds.file, M4OSA_NULL);
+
+ // Check if the alpha magic file path is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (M4OSA_NULL == pSettings->pAlphaFilePath), "alphaSettings.file is null");
+ }
+
+ // Check if the alpha file path could be retrieved.
+ if (*pResult)
+ {
+ // Set the blending percentage between 0 and 100.
+ pSettings->blendingPercent = (M4OSA_UInt8)pEnv->GetIntField(object,
+ fieldIds.blendingPercent);
+
+ // Set the direct effect or reverse.
+ pSettings->isreverse = (M4OSA_Bool)pEnv->GetBooleanField(object,
+ fieldIds.invertRotation);
+
+ // Get the rgb width
+ pSettings->width = (M4OSA_UInt32) pEnv->GetIntField(object, fieldIds.rgbWidth );
+
+ pSettings->height = (M4OSA_UInt32) pEnv->GetIntField(object, fieldIds.rgbHeight );
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "((((((((((path %s", pSettings->pAlphaFilePath);
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "------- getAlphaMagicSettings width %d", pEnv->GetIntField(object,
+ fieldIds.rgbWidth ));
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "-------- getAlphaMagicSettings Height %d",
+ pEnv->GetIntField(object, fieldIds.rgbHeight ));
+ }
+
+ // Check if settings could be set.
+ if (*pResult)
+ {
+ // Return the settings.
+ (*ppSettings) = pSettings;
+ }
+ else
+ {
+ // Free the settings.
+ videoEditClasses_freeAlphaMagicSettings(&pSettings);
+ }
+ }
+}
+
+void
+videoEditClasses_freeAlphaMagicSettings(
+ M4xVSS_AlphaMagicSettings** ppSettings)
+{
+ // Check if memory was allocated for the AlphaMagicSettings.
+ if (M4OSA_NULL != (*ppSettings))
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_freeAlphaMagicSettings()");
+
+ // Free the alpha file path.
+ videoEditOsal_free((*ppSettings)->pAlphaFilePath);
+ (*ppSettings)->pAlphaFilePath = M4OSA_NULL;
+
+ // Free the settings structure.
+ videoEditOsal_free((*ppSettings));
+ (*ppSettings) = M4OSA_NULL;
+ }
+}
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditClasses_logAlphaMagicSettings(
+ M4xVSS_AlphaMagicSettings* pSettings,
+ int indentation)
+{
+ // Check if memory was allocated for the AlphaMagicSettings.
+ if (M4OSA_NULL != pSettings)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pAlphaFilePath: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->pAlphaFilePath) ? \
+ (char *)pSettings->pAlphaFilePath : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c blendingPercent: %u %%", indentation, ' ',
+ (unsigned int)pSettings->blendingPercent);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c isreverse: %s", indentation, ' ',
+ pSettings->isreverse ? "true" : "false");
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c <null>", indentation, ' ');
+ }
+}
+#endif
+
+
+void
+videoEditClasses_getBackgroundMusicSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4xVSS_BGMSettings** ppSettings)
+{
+ VideoEditJava_BackgroundMusicFieldIds fieldIds = {NULL, NULL, NULL, NULL,
+ NULL, NULL,NULL,NULL,NULL,NULL};
+ M4xVSS_BGMSettings* pSettings = M4OSA_NULL;
+ bool converted = true;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_getBackgroundMusicSettings()");
+
+ // Retrieve the field ids.
+ videoEditJava_getBackgroundMusicFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Only retrieve the BackgroundMusicSettings if the fields could be located.
+ if (*pResult)
+ {
+ // Check if the object is valid.
+ if (NULL != object)
+ {
+ // Allocate memory for the BackgroundMusicSettings.
+ pSettings = (M4xVSS_BGMSettings*)videoEditOsal_alloc(pResult, pEnv,
+ sizeof(M4xVSS_BGMSettings), "BackgroundMusicSettings");
+
+ // Check if memory could be allocated for the BackgroundMusicSettings.
+ if (*pResult)
+ {
+ // Set the input file path.
+ pSettings->pFile = (M4OSA_Char*)videoEditJava_getString(pResult, pEnv, object,
+ fieldIds.file, M4OSA_NULL);
+
+ // Check if the input file path is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (M4OSA_NULL == pSettings->pFile), "backgroundMusicSettings.file is null");
+ }
+
+ // Check if the input file path could be retrieved.
+ if (*pResult)
+ {
+ // Set the file type .3gp, .amr, .mp3.
+ pSettings->FileType = M4VIDEOEDITING_kFileType_PCM;
+ /*(M4VIDEOEDITING_FileType)videoEditJava_getClipTypeJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.fileType));*/
+
+ // Check if the file type is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "backgroundMusicSettings.fileType is invalid");
+ }
+
+ // Check if the file type could be retrieved.
+ if (*pResult)
+ {
+ // Set the time, in milliseconds, at which the added audio track is inserted.
+ pSettings->uiAddCts = (M4OSA_UInt32)pEnv->GetLongField(object,
+ fieldIds.insertionTime);
+
+ // Set the volume, in percentage (0..100), of the added audio track.
+ pSettings->uiAddVolume = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.volumePercent);
+
+ // Set the start time of the loop in milli seconds.
+ pSettings->uiBeginLoop = (M4OSA_UInt32)pEnv->GetLongField(object,
+ fieldIds.beginLoop);
+
+ // Set the end time of the loop in milli seconds.
+ pSettings->uiEndLoop = (M4OSA_UInt32)pEnv->GetLongField(object,
+ fieldIds.endLoop);
+ // Set the end time of the loop in milli seconds.
+ pSettings->b_DuckingNeedeed =
+ (M4OSA_Bool)pEnv->GetBooleanField(object, fieldIds.enableDucking);
+
+ // Set the end time of the loop in milli seconds.
+ pSettings->InDucking_threshold =
+ (M4OSA_Int32)pEnv->GetIntField(object, fieldIds.duckingThreshold);
+
+ // Set the end time of the loop in milli seconds.
+ pSettings->lowVolume =
+ (M4OSA_Float)(((M4OSA_Float)pEnv->GetIntField(object, fieldIds.lowVolume)));
+
+ // Set the end time of the loop in milli seconds.
+ pSettings->bLoop = (M4OSA_Bool)pEnv->GetBooleanField(object, fieldIds.isLooping);
+
+ // Set sampling freq and channels
+ pSettings->uiSamplingFrequency = M4VIDEOEDITING_k32000_ASF;
+ pSettings->uiNumChannels = 2;
+ }
+
+ // Check if settings could be set.
+ if (*pResult)
+ {
+ // Return the settings.
+ (*ppSettings) = pSettings;
+ }
+ else
+ {
+ // Free the settings.
+ videoEditClasses_freeBackgroundMusicSettings(&pSettings);
+ }
+ }
+ }
+}
+
+void
+videoEditClasses_freeBackgroundMusicSettings(
+ M4xVSS_BGMSettings** ppSettings)
+{
+ // Check if memory was allocated for the BackgroundMusicSettings.
+ if (M4OSA_NULL != (*ppSettings))
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_freeBackgroundMusicSettings()");
+
+ // Free the input file path.
+ videoEditOsal_free((*ppSettings)->pFile);
+ (*ppSettings)->pFile = M4OSA_NULL;
+
+ // Free the settings structure.
+ videoEditOsal_free((*ppSettings));
+ (*ppSettings) = M4OSA_NULL;
+ }
+}
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditClasses_logBackgroundMusicSettings(
+ M4xVSS_BGMSettings* pSettings,
+ int indentation)
+{
+ // Check if memory was allocated for the BackgroundMusicSettings.
+ if (M4OSA_NULL != pSettings)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c pFile: %s",
+ indentation, ' ',
+ (M4OSA_NULL != pSettings->pFile) ? (char *)pSettings->pFile : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c FileType: %s", indentation, ' ',
+ videoEditJava_getClipTypeString(pSettings->FileType));
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c uiAddCts: %u ms",
+ indentation, ' ', (unsigned int)pSettings->uiAddCts);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c uiAddVolume: %u %%",
+ indentation, ' ', (unsigned int)pSettings->uiAddVolume);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c uiBeginLoop: %u ms",
+ indentation, ' ', (unsigned int)pSettings->uiBeginLoop);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c uiEndLoop: %u ms",
+ indentation, ' ', (unsigned int)pSettings->uiEndLoop);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c b_DuckingNeedeed:\
+ %u ", indentation, ' ', (bool)pSettings->b_DuckingNeedeed);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c InDucking_threshold: \
+ %u ms", indentation, ' ', (unsigned int)pSettings->InDucking_threshold);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c lowVolume: %2.2f ",\
+ indentation, ' ', (float)pSettings->lowVolume);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c bLoop: %u ms",\
+ indentation, ' ', (bool)pSettings->bLoop);
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c <null>",
+ indentation, ' ');
+ }
+}
+#endif
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditClasses_logClipProperties(
+ M4VIDEOEDITING_ClipProperties* pProperties,
+ int indentation)
+{
+ // Check if memory was allocated for the ClipProperties.
+ if (M4OSA_NULL != pProperties)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bAnalysed: %s", indentation, ' ',
+ pProperties->bAnalysed ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c Version: %d.%d.%d", indentation, ' ',
+ pProperties->Version[0], pProperties->Version[1], pProperties->Version[2]);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiClipDuration: %u", indentation, ' ',
+ (unsigned int)pProperties->uiClipDuration);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c FileType: %s", indentation, ' ',
+ videoEditJava_getClipTypeString(pProperties->FileType));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c ftyp:",
+ indentation, ' ');
+ videoEditClasses_logFtypBox(&pProperties->ftyp, indentation + VIDEOEDIT_LOG_INDENTATION);
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c VideoStreamType: %s", indentation, ' ',
+ videoEditJava_getVideoFormatString(pProperties->VideoStreamType));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiClipVideoDuration: %u", indentation, ' ',
+ (unsigned int)pProperties->uiClipVideoDuration);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiVideoBitrate: %s", indentation, ' ',
+ videoEditJava_getBitrateString(pProperties->uiVideoBitrate));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiVideoMaxAuSize: %u", indentation, ' ',
+ (unsigned int)pProperties->uiVideoMaxAuSize);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiVideoWidth: %u", indentation, ' ',
+ (unsigned int)pProperties->uiVideoWidth);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiVideoHeight: %u", indentation, ' ',
+ (unsigned int)(unsigned int)pProperties->uiVideoHeight);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiVideoTimeScale: %u", indentation, ' ',
+ (unsigned int)pProperties->uiVideoTimeScale);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c fAverageFrameRate: %.3f", indentation, ' ',
+ pProperties->fAverageFrameRate);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c ProfileAndLevel: %s", indentation, ' ',
+ videoEditJava_getVideoProfileString(pProperties->ProfileAndLevel));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiH263level: %d", indentation, ' ',
+ pProperties->uiH263level);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiVideoProfile: %d", indentation, ' ',
+ pProperties->uiVideoProfile);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bMPEG4dataPartition: %s", indentation, ' ',
+ pProperties->bMPEG4dataPartition ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bMPEG4rvlc: %s", indentation, ' ',
+ pProperties->bMPEG4rvlc ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bMPEG4resynchMarker: %s", indentation, ' ',
+ pProperties->bMPEG4resynchMarker ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c AudioStreamType: %s", indentation, ' ',
+ videoEditJava_getAudioFormatString(pProperties->AudioStreamType));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiClipAudioDuration: %u", indentation, ' ',
+ (unsigned int)pProperties->uiClipAudioDuration);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiAudioBitrate: %s", indentation, ' ',
+ videoEditJava_getBitrateString(pProperties->uiAudioBitrate));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiAudioMaxAuSize: %u", indentation, ' ',
+ (unsigned int)pProperties->uiAudioMaxAuSize);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiNbChannels: %u", indentation, ' ',
+ (unsigned int)pProperties->uiNbChannels);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiSamplingFrequency: %u", indentation, ' ',
+ (unsigned int)pProperties->uiSamplingFrequency);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiExtendedSamplingFrequency: %u", indentation, ' ',
+ (unsigned int)pProperties->uiExtendedSamplingFrequency);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiDecodedPcmSize: %u", indentation, ' ',
+ (unsigned int)pProperties->uiDecodedPcmSize);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bVideoIsEditable: %s", indentation, ' ',
+ pProperties->bVideoIsEditable ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bAudioIsEditable: %s", indentation, ' ',
+ pProperties->bAudioIsEditable ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bVideoIsCompatibleWithMasterClip: %s", indentation, ' ',
+ pProperties->bVideoIsCompatibleWithMasterClip ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bAudioIsCompatibleWithMasterClip: %s", indentation, ' ',
+ pProperties->bAudioIsCompatibleWithMasterClip ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiClipAudioVolumePercentage: %d", indentation, ' ',
+ pProperties->uiClipAudioVolumePercentage);
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES", "%*c <null>",
+ indentation, ' ');
+ }
+}
+#endif
+
+void
+videoEditClasses_getClipSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4VSS3GPP_ClipSettings** ppSettings)
+{
+ VideoEditJava_ClipSettingsFieldIds fieldIds = {NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL};
+ M4VSS3GPP_ClipSettings* pSettings = M4OSA_NULL;
+ M4OSA_ERR result = M4NO_ERROR;
+ bool converted = true;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_getClipSettings()");
+
+ // Retrieve the field ids.
+ videoEditJava_getClipSettingsFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Only validate the ClipSettings if the fields could be located.
+ if (*pResult)
+ {
+ // Check if the clip is set.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (NULL == object),
+ "clip is null");
+ }
+
+ // Only retrieve the ClipSettings if the fields could be located and validated.
+ if (*pResult)
+ {
+ // Allocate memory for the ClipSettings.
+ pSettings = (M4VSS3GPP_ClipSettings *)videoEditOsal_alloc(pResult, pEnv,
+ sizeof(M4VSS3GPP_ClipSettings), "ClipSettings");
+
+ // Check if memory could be allocated for the ClipSettings.
+ if (*pResult)
+ {
+ // Log the API call.
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "M4xVSS_CreateClipSettings()");
+
+ // Initialize the ClipSettings.
+ result = M4xVSS_CreateClipSettings(pSettings, NULL, 0, 0);
+
+ // Log the result.
+ VIDEOEDIT_LOG_RESULT(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ videoEditOsal_getResultString(result));
+
+ // Check if the initialization succeeded.
+ videoEditJava_checkAndThrowRuntimeException(pResult, pEnv,
+ (M4NO_ERROR != result), result);
+ }
+
+ // Check if the allocation and initialization succeeded
+ //(required because pSettings is dereferenced).
+ if (*pResult)
+ {
+ // Set the input file path.
+ pSettings->pFile = (M4OSA_Char*)videoEditJava_getString(pResult, pEnv, object,
+ fieldIds.clipPath, &pSettings->filePathSize);
+
+ // Check if the file path is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (M4OSA_NULL == pSettings->pFile), "clip.clipPath is null");
+ }
+
+ // Check if the input file could be retrieved.
+ if (*pResult)
+ {
+ // Set the file type .3gp, .amr, .mp3.
+ pSettings->FileType = (M4VIDEOEDITING_FileType)videoEditJava_getClipTypeJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.fileType));
+
+ if ( pSettings->FileType == M4VIDEOEDITING_kFileType_JPG)
+ {
+ pSettings->FileType = M4VIDEOEDITING_kFileType_ARGB8888;
+ }
+
+ // Check if the file type is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "clip.fileType is invalid");
+ }
+
+ // Check if the file type could be retrieved.
+ if (*pResult)
+ {
+ // Set the begin cut time, in milliseconds.
+ pSettings->uiBeginCutTime =
+ (M4OSA_UInt32)pEnv->GetIntField(object, fieldIds.beginCutTime);
+
+ // Set the end cut time, in milliseconds.
+ pSettings->uiEndCutTime = (M4OSA_UInt32)pEnv->GetIntField(object, fieldIds.endCutTime);
+
+ // Set the begin cut time, in percent of clip duration (only for 3GPP clip !).
+ pSettings->xVSS.uiBeginCutPercent =
+ (M4OSA_UInt32)pEnv->GetIntField(object, fieldIds.beginCutPercent);
+
+ // Set the end cut time, in percent of clip duration (only for 3GPP clip !).
+ pSettings->xVSS.uiEndCutPercent =
+ (M4OSA_UInt32)pEnv->GetIntField(object, fieldIds.endCutPercent);
+
+ // Set the duration of the clip, if different from 0,
+ // has priority on uiEndCutTime or uiEndCutPercent.
+ pSettings->xVSS.uiDuration = 0;
+
+ // Set whether or not the pan and zoom mode is enabled.
+ pSettings->xVSS.isPanZoom =
+ (M4OSA_Bool)pEnv->GetBooleanField(object, fieldIds.panZoomEnabled);
+
+ // Set the pan and zoom start zoom percentage.
+ pSettings->xVSS.PanZoomXa =
+ (M4OSA_UInt16)pEnv->GetIntField(object, fieldIds.panZoomPercentStart);
+
+ // Set the pan and zoom start x.
+ pSettings->xVSS.PanZoomTopleftXa =
+ (M4OSA_UInt16)pEnv->GetIntField(object, fieldIds.panZoomTopLeftXStart);
+
+ // Set the pan and zoom start y.
+ pSettings->xVSS.PanZoomTopleftYa =
+ (M4OSA_UInt16)pEnv->GetIntField(object, fieldIds.panZoomTopLeftYStart);
+
+ // Set the pan and zoom end zoom percentage.
+ pSettings->xVSS.PanZoomXb =
+ (M4OSA_UInt16)pEnv->GetIntField(object, fieldIds.panZoomPercentEnd);
+
+ // Set the pan and zoom end x.
+ pSettings->xVSS.PanZoomTopleftXb =
+ (M4OSA_UInt16)pEnv->GetIntField(object, fieldIds.panZoomTopLeftXEnd);
+
+ // Set the pan and zoom end y.
+ pSettings->xVSS.PanZoomTopleftYb =
+ (M4OSA_UInt16)pEnv->GetIntField(object, fieldIds.panZoomTopLeftYEnd);
+
+ // Set the media rendering mode, only used with JPEG to crop, resize,
+ // or render black borders.
+ pSettings->xVSS.MediaRendering =
+ (M4xVSS_MediaRendering)videoEditJava_getMediaRenderingJavaToC(
+ &converted, pEnv->GetIntField(object,fieldIds.mediaRendering));
+
+ // Check if the media rendering is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv, !converted,
+ "clip.mediaRendering is invalid");
+
+ // Capture the rgb file width and height
+ pSettings->ClipProperties.uiStillPicWidth =
+ (M4OSA_UInt16)pEnv->GetIntField(object, fieldIds.rgbFileWidth);
+ pSettings->ClipProperties.uiStillPicHeight =
+ (M4OSA_UInt16)pEnv->GetIntField(object, fieldIds.rgbFileHeight);
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", \
+ "getClipSettings-- rgbFileWidth %d ",
+ pSettings->ClipProperties.uiStillPicWidth);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", \
+ "getClipSettings-- rgbFileHeight %d ",
+ pSettings->ClipProperties.uiStillPicHeight);
+ }
+
+ // Check if settings could be set.
+ if (*pResult)
+ {
+ // Return the settings.
+ (*ppSettings) = pSettings;
+ }
+ else
+ {
+ // Free the settings.
+ videoEditClasses_freeClipSettings(&pSettings);
+ }
+ }
+}
+
+void
+videoEditClasses_createClipSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ M4VSS3GPP_ClipSettings* pSettings,
+ jobject* pObject)
+{
+ VideoEditJava_ClipSettingsFieldIds fieldIds = {NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL, NULL};
+ jclass clazz = NULL;
+ jobject object = NULL;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_createClipSettings()");
+
+ // Retrieve the class.
+ videoEditJava_getClipSettingsClass(pResult, pEnv, &clazz);
+
+ // Retrieve the field ids.
+ videoEditJava_getClipSettingsFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Only create an object if the class and fields could be located.
+ if (*pResult)
+ {
+ // Allocate a new object.
+ object = pEnv->AllocObject(clazz);
+ if (NULL != object)
+ {
+ // Set the clipPath field.
+ pEnv->SetObjectField(object, fieldIds.clipPath, NULL);
+
+ // Set the fileType field.
+ pEnv->SetIntField(object, fieldIds.fileType, videoEditJava_getClipTypeCToJava(
+ pSettings->FileType));
+
+ // Set the beginCutTime field.
+ pEnv->SetIntField(object, fieldIds.beginCutTime, pSettings->uiBeginCutTime);
+
+ // Set the endCutTime field.
+ pEnv->SetIntField(object, fieldIds.endCutTime, pSettings->uiEndCutTime);
+
+ // Set the beginCutPercent field.
+ pEnv->SetIntField(object, fieldIds.beginCutPercent, pSettings->xVSS.uiBeginCutPercent);
+
+ // Set the endCutPercent field.
+ pEnv->SetIntField(object, fieldIds.endCutPercent, pSettings->xVSS.uiEndCutPercent);
+
+ // Set the panZoomEnabled field.
+ pEnv->SetBooleanField(object, fieldIds.panZoomEnabled, pSettings->xVSS.isPanZoom);
+
+ // Set the panZoomPercentStart field.
+ pEnv->SetIntField(object, fieldIds.panZoomPercentStart,
+ (100 - pSettings->xVSS.PanZoomXa));
+
+ // Set the panZoomTopLeftXStart field.
+ pEnv->SetIntField(object, fieldIds.panZoomTopLeftXStart,
+ pSettings->xVSS.PanZoomTopleftXa);
+
+ // Set the panZoomTopLeftYStart field.
+ pEnv->SetIntField(object, fieldIds.panZoomTopLeftYStart,
+ pSettings->xVSS.PanZoomTopleftYa);
+
+ // Set the panZoomPercentEnd field.
+ pEnv->SetIntField(object, fieldIds.panZoomPercentEnd,
+ (100 - pSettings->xVSS.PanZoomXb));
+
+ // Set the panZoomTopLeftXEnd field.
+ pEnv->SetIntField(object, fieldIds.panZoomTopLeftXEnd,
+ pSettings->xVSS.PanZoomTopleftXb);
+
+ // Set the panZoomTopLeftYEnd field.
+ pEnv->SetIntField(object, fieldIds.panZoomTopLeftYEnd,
+ pSettings->xVSS.PanZoomTopleftYb);
+
+ // Set the mediaRendering field.
+ pEnv->SetIntField(object, fieldIds.mediaRendering,
+ videoEditJava_getMediaRenderingCToJava(pSettings->xVSS.MediaRendering));
+
+ // Set the rgb file width and height
+ pEnv->SetIntField(object, fieldIds.rgbFileWidth,
+ pSettings->ClipProperties.uiStillPicWidth );
+
+ pEnv->SetIntField(object, fieldIds.rgbFileHeight,
+ pSettings->ClipProperties.uiStillPicHeight );
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "rgbFileWeight %d rgbFileHeight %d ",
+ pSettings->ClipProperties.uiStillPicWidth ,
+ pSettings->ClipProperties.uiStillPicHeight);
+
+ // Return the object.
+ (*pObject) = object;
+ }
+ }
+}
+void
+videoEditPropClass_createProperties(
+ bool* pResult,
+ JNIEnv* pEnv,
+ VideoEditPropClass_Properties* pProperties,
+ jobject* pObject)
+{
+ VideoEditJava_PropertiesFieldIds fieldIds = {NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL};
+ jclass clazz = NULL;
+ jobject object = NULL;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "videoEditPropClass_createProperties()");
+
+ // Retrieve the class.
+ videoEditJava_getPropertiesClass(pResult, pEnv, &clazz);
+
+ // Retrieve the field ids.
+ videoEditJava_getPropertiesFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Only create an object if the class and fields could be located.
+ if (*pResult)
+ {
+ // Allocate a new object.
+ object = pEnv->AllocObject(clazz);
+ if (NULL != object)
+ {
+ // Set the duration field.
+ pEnv->SetIntField(object, fieldIds.duration, pProperties->uiClipDuration);
+
+ // Set the fileType field.
+ pEnv->SetIntField(object, fieldIds.fileType,
+ videoEditJava_getFileTypeCToJava(pProperties->FileType));
+
+ // Set the videoFormat field.
+ pEnv->SetIntField(object, fieldIds.videoFormat,
+ videoEditJava_getVideoFormatCToJava(pProperties->VideoStreamType));
+
+ // Set the videoDuration field.
+ pEnv->SetIntField(object, fieldIds.videoDuration, pProperties->uiClipVideoDuration);
+
+ // Set the videoBitrate field.
+ pEnv->SetIntField(object, fieldIds.videoBitrate, pProperties->uiVideoBitrate);
+
+ // Set the width field.
+ pEnv->SetIntField(object, fieldIds.width, pProperties->uiVideoWidth);
+
+ // Set the height field.
+ pEnv->SetIntField(object, fieldIds.height, pProperties->uiVideoHeight);
+
+ // Set the averageFrameRate field.
+ pEnv->SetFloatField(object, fieldIds.averageFrameRate, pProperties->fAverageFrameRate);
+
+ // Set the profileAndLevel field.
+ pEnv->SetIntField(object, fieldIds.profileAndLevel,
+ videoEditJava_getVideoProfileCToJava(pProperties->ProfileAndLevel));
+
+ // Set the audioFormat field.
+ pEnv->SetIntField(object, fieldIds.audioFormat,
+ videoEditJava_getAudioFormatCToJava(pProperties->AudioStreamType));
+
+ // Set the audioDuration field.
+ pEnv->SetIntField(object, fieldIds.audioDuration, pProperties->uiClipAudioDuration);
+
+ // Set the audioBitrate field.
+ pEnv->SetIntField(object, fieldIds.audioBitrate, pProperties->uiAudioBitrate);
+
+ // Set the audioChannels field.
+ pEnv->SetIntField(object, fieldIds.audioChannels, pProperties->uiNbChannels);
+
+ // Set the audioSamplingFrequency field.
+ pEnv->SetIntField(object, fieldIds.audioSamplingFrequency,
+ pProperties->uiSamplingFrequency);
+
+ // Return the object.
+ (*pObject) = object;
+ }
+ }
+}
+
+void
+videoEditClasses_freeClipSettings(
+ M4VSS3GPP_ClipSettings** ppSettings)
+{
+ // Check if memory was allocated for the ClipSettings.
+ if (M4OSA_NULL != (*ppSettings))
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_freeClipSettings()");
+
+ // Free the input file path.
+ videoEditOsal_free((*ppSettings)->pFile);
+ (*ppSettings)->pFile = M4OSA_NULL;
+ (*ppSettings)->filePathSize = 0;
+
+ // Free the clip settings.
+ M4xVSS_FreeClipSettings((*ppSettings));
+
+ // Free the settings structure.
+ videoEditOsal_free((*ppSettings));
+ (*ppSettings) = M4OSA_NULL;
+ }
+}
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditClasses_logClipSettings(
+ M4VSS3GPP_ClipSettings* pSettings,
+ int indentation)
+{
+ // Check if memory was allocated for the ClipSettings.
+ if (M4OSA_NULL != pSettings)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pFile: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->pFile) ? (char*)pSettings->pFile : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c FileType: %s", indentation, ' ',
+ videoEditJava_getClipTypeString(pSettings->FileType));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c filePathSize: %u", indentation, ' ',
+ (unsigned int)pSettings->filePathSize);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c ClipProperties:", indentation, ' ');
+ videoEditClasses_logClipProperties(&pSettings->ClipProperties,
+ indentation + VIDEOEDIT_LOG_INDENTATION);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiBeginCutTime: %u ms", indentation, ' ',
+ (unsigned int)pSettings->uiBeginCutTime);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiEndCutTime: %u ms", indentation, ' ',
+ (unsigned int)pSettings->uiEndCutTime);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiBeginCutPercent: %u %%", indentation, ' ',
+ (unsigned int)pSettings->xVSS.uiBeginCutPercent);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiEndCutPercent: %u %%", indentation, ' ',
+ (unsigned int)pSettings->xVSS.uiEndCutPercent);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiDuration: %u ms", indentation, ' ',
+ (unsigned int)pSettings->xVSS.uiDuration);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c isPanZoom: %s", indentation, ' ',
+ pSettings->xVSS.isPanZoom ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c PanZoomXa: %d ms", indentation, ' ',
+ pSettings->xVSS.PanZoomXa);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c PanZoomTopleftXa: %d ms", indentation, ' ',
+ pSettings->xVSS.PanZoomTopleftXa);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c PanZoomTopleftYa: %d ms", indentation, ' ',
+ pSettings->xVSS.PanZoomTopleftYa);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c PanZoomXb: %d ms", indentation, ' ',
+ pSettings->xVSS.PanZoomXb);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c PanZoomTopleftXb: %d ms", indentation, ' ',
+ pSettings->xVSS.PanZoomTopleftXb);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c PanZoomTopleftYb: %d ms", indentation, ' ',
+ pSettings->xVSS.PanZoomTopleftYb);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c MediaRendering: %s", indentation, ' ',
+ videoEditJava_getMediaRenderingString(pSettings->xVSS.MediaRendering));
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c <null>", indentation, ' ');
+ }
+}
+#endif
+
+
+void
+videoEditClasses_getEditSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4VSS3GPP_EditSettings** ppSettings,
+ bool flag)
+{
+ VideoEditJava_EditSettingsFieldIds fieldIds ={NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL,NULL};
+ jobjectArray clipSettingsArray = NULL;
+ jsize clipSettingsArraySize = 0;
+ jobject clipSettings = NULL;
+ jobjectArray transitionSettingsArray = NULL;
+ jsize transitionSettingsArraySize = 0;
+ jobject transitionSettings = NULL;
+ jobjectArray effectSettingsArray = NULL;
+ jsize effectSettingsArraySize = 0;
+ jobject effectSettings = NULL;
+ jobject backgroundMusicSettings = NULL;
+ int audioChannels = 0;
+ M4VSS3GPP_EditSettings* pSettings = M4OSA_NULL;
+ bool converted = true;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_getEditSettings()");
+
+ // Retrieve the field ids.
+ videoEditJava_getEditSettingsFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Only retrieve the EditSettings if the previous action succeeded.
+ if (*pResult)
+ {
+ // Check if the object is valid.
+ if (NULL != object)
+ {
+ // Retrieve the clipSettingsArray.
+ videoEditJava_getArray(pResult, pEnv, object,
+ fieldIds.clipSettingsArray,
+ &clipSettingsArray,
+ &clipSettingsArraySize);
+
+ // Retrieve the transitionSettingsArray.
+ videoEditJava_getArray(pResult, pEnv, object,
+ fieldIds.transitionSettingsArray,
+ &transitionSettingsArray,
+ &transitionSettingsArraySize);
+
+ // Retrieve the effectSettingsArray.
+ videoEditJava_getArray(pResult, pEnv, object,
+ fieldIds.effectSettingsArray,
+ &effectSettingsArray,
+ &effectSettingsArraySize);
+
+ // Retrieve the backgroundMusicSettings.
+ videoEditJava_getObject(pResult, pEnv, object, fieldIds.backgroundMusicSettings,
+ &backgroundMusicSettings);
+
+ // Check if the arrays and background music settings object could be retrieved.
+ if (*pResult)
+ {
+ // Retrieve the number of channels.
+ audioChannels = pEnv->GetIntField(object, fieldIds.audioChannels);
+ }
+ }
+ }
+
+ // Only validate the EditSettings if the fields could be located.
+ if (*pResult)
+ {
+ // Check if there is at least one clip.
+ //videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ // (clipSettingsArraySize < 1),
+ // "there should be at least one clip");
+ if(clipSettingsArraySize < 1) {
+ return;
+ }
+ if(flag)
+ {
+ // Check if there are clips.
+ if ((clipSettingsArraySize != 0) || (transitionSettingsArraySize != 0))
+ {
+ // The number of transitions must be equal to the number of clips - 1.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (clipSettingsArraySize != (transitionSettingsArraySize + 1)),
+ "the number of transitions should be equal to the number of clips - 1");
+ }
+ }
+ }
+
+ // Only retrieve the EditSettings if the fields could be located.
+ if (*pResult)
+ {
+ // Check if the object is valid.
+ if (NULL != object)
+ {
+ // Allocate memory for the EditSettings.
+ pSettings = (M4VSS3GPP_EditSettings*)videoEditOsal_alloc(pResult, pEnv,
+ sizeof(M4VSS3GPP_EditSettings), "EditSettings");
+
+ // Check if memory could be allocated for the EditSettings.
+ if (*pResult)
+ {
+ // Set the number of clips that will be edited.
+ pSettings->uiClipNumber = clipSettingsArraySize;
+
+ // Check if the clip settings array contains items.
+ if (clipSettingsArraySize > 0)
+ {
+ // Allocate memory for the clip settings array.
+ pSettings->pClipList = (M4VSS3GPP_ClipSettings **)videoEditOsal_alloc(pResult,
+ pEnv,
+ clipSettingsArraySize * sizeof(M4VSS3GPP_ClipSettings *),
+ "ClipSettingsArray");
+ if (*pResult)
+ {
+ // Loop over all clip settings objects.
+ for (int i = 0; ((*pResult) && (i < clipSettingsArraySize)); i++)
+ {
+ // Get the clip settings object.
+ clipSettings = pEnv->GetObjectArrayElement(clipSettingsArray, i);
+
+ // Get the clip settings.
+ videoEditClasses_getClipSettings(pResult, pEnv, clipSettings,
+ &pSettings->pClipList[i]);
+ }
+ }
+ }
+
+ // Check if the transition settings array contains items.
+ if (transitionSettingsArraySize > 0)
+ {
+ // Allocate memory for the transition settings array.
+ pSettings->pTransitionList =
+ (M4VSS3GPP_TransitionSettings **)videoEditOsal_alloc(pResult,
+ pEnv, transitionSettingsArraySize * sizeof(M4VSS3GPP_TransitionSettings *),
+ "TransitionSettingsArray");
+ if (*pResult)
+ {
+ // Loop over all transition settings objects.
+ for (int i = 0; ((*pResult) && (i < transitionSettingsArraySize)); i++)
+ {
+ // Get the transition settings object.
+ transitionSettings =
+ pEnv->GetObjectArrayElement(transitionSettingsArray, i);
+
+ // Get the transition settings.
+ videoEditClasses_getTransitionSettings(pResult, pEnv,
+ transitionSettings, &pSettings->pTransitionList[i]);
+ }
+ }
+ }
+
+ // Check if the effect settings array contains items.
+ if (effectSettingsArraySize > 0)
+ {
+ // Allocate memory for the effect settings array.
+ pSettings->Effects = (M4VSS3GPP_EffectSettings*)videoEditOsal_alloc(pResult,
+ pEnv,
+ effectSettingsArraySize * sizeof(M4VSS3GPP_EffectSettings),
+ "EffectSettingsArray");
+ if (*pResult)
+ {
+ // Loop over all effect settings objects.
+ for (int i = 0; ((*pResult) && (i < effectSettingsArraySize)); i++)
+ {
+ // Get the effect settings object.
+ effectSettings = pEnv->GetObjectArrayElement(effectSettingsArray, i);
+
+ // Get the effect settings.
+ videoEditClasses_getEffectSettings(pResult, pEnv, effectSettings,
+ &pSettings->Effects[i]);
+ }
+ }
+ }
+
+ // Check if the clips, transitions and effects could be set.
+ if (*pResult)
+ {
+ // Set the number of effects in the clip.
+ pSettings->nbEffects = (M4OSA_UInt8)effectSettingsArraySize;
+
+ // Set the frame rate of the output video.
+ pSettings->videoFrameRate =
+ (M4VIDEOEDITING_VideoFramerate)videoEditJava_getVideoFrameRateJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.videoFrameRate));
+
+ // Check if the frame rate is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "editSettings.videoFrameRate is invalid");
+ }
+
+ // Check if the frame rate could be set.
+ if (*pResult)
+ {
+ // Set the path of the output file.
+ pSettings->pOutputFile = (M4OSA_Char*)videoEditJava_getString(pResult, pEnv,
+ object, fieldIds.outputFile, &pSettings->uiOutputPathSize);
+ }
+
+ // Check if path of the output file could be set.
+ if (*pResult)
+ {
+ // Set the path of the temporary file produced when using
+ // the constant memory 3gp writer.
+ pSettings->pTemporaryFile = M4OSA_NULL;
+
+ // Set the output video size.
+ pSettings->xVSS.outputVideoSize =
+ (M4VIDEOEDITING_VideoFrameSize)videoEditJava_getVideoFrameSizeJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.videoFrameSize));
+
+ // Check if the output video size is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "editSettings.videoFrameSize is invalid");
+ }
+
+ // Check if the output video size could be set.
+ if (*pResult)
+ {
+ // Set the output video format.
+ pSettings->xVSS.outputVideoFormat =
+ (M4VIDEOEDITING_VideoFormat)videoEditJava_getVideoFormatJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.videoFormat));
+
+ // Check if the output video format is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "editSettings.videoFormat is invalid");
+ }
+
+ // Check if the output video format could be set.
+ if (*pResult)
+ {
+ // Set the output audio format.
+ pSettings->xVSS.outputAudioFormat =
+ (M4VIDEOEDITING_AudioFormat)videoEditJava_getAudioFormatJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.audioFormat));
+
+ // Check if the output audio format is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "editSettings.audioFormat is invalid");
+ }
+
+ // Check if the output audio format could be set.
+ if (*pResult)
+ {
+ // Set the output audio sampling frequency when not replacing the audio,
+ // or replacing it with MP3 audio.
+ pSettings->xVSS.outputAudioSamplFreq =
+ (M4VIDEOEDITING_AudioSamplingFrequency)\
+ videoEditJava_getAudioSamplingFrequencyJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.audioSamplingFreq));
+
+ // Check if the output audio sampling frequency is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "editSettings.audioSamplingFreq is invalid");
+ }
+
+ // Check if the output audio sampling frequency could be set.
+ if (*pResult)
+ {
+ // Check if the number of audio channels is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ ((0 != audioChannels ) ||
+ ((M4VIDEOEDITING_kNoneAudio != pSettings->xVSS.outputAudioFormat) &&
+ (M4VIDEOEDITING_kNullAudio != pSettings->xVSS.outputAudioFormat) ) ) &&
+ (1 != audioChannels ) &&
+ (2 != audioChannels ),
+ "editSettings.audioChannels must be set to 0, 1 or 2");
+ }
+
+ // Check if the number of audio channels is valid.
+ if (*pResult)
+ {
+ // Set the maximum output file size (MMS usecase).
+ pSettings->xVSS.outputFileSize = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.maxFileSize);
+
+ // Whether or not the audio is mono, only valid for AAC.
+ pSettings->xVSS.bAudioMono = (M4OSA_Bool)(1 == audioChannels);
+
+ // Set the output video bitrate.
+ pSettings->xVSS.outputVideoBitrate = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.videoBitrate);
+
+ // Set the output audio bitrate.
+ pSettings->xVSS.outputAudioBitrate = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.audioBitrate);
+
+ // Set the background music settings.
+ videoEditClasses_getBackgroundMusicSettings(pResult, pEnv,
+ backgroundMusicSettings, &pSettings->xVSS.pBGMtrack);
+
+ // Set the text rendering function (will be set elsewhere).
+ pSettings->xVSS.pTextRenderingFct = M4OSA_NULL;
+ pSettings->PTVolLevel =
+ (M4OSA_Float)pEnv->GetIntField(object, fieldIds.primaryTrackVolume);
+ }
+ }
+
+ // Check if settings could be set.
+ if (*pResult)
+ {
+ // Return the settings.
+ (*ppSettings) = pSettings;
+ }
+ else
+ {
+ // Free the settings.
+ videoEditClasses_freeEditSettings(&pSettings);
+ }
+ }
+ }
+}
+
+void
+videoEditClasses_freeEditSettings(
+ M4VSS3GPP_EditSettings** ppSettings)
+{
+ // Check if memory was allocated for the EditSettings.
+ if (M4OSA_NULL != (*ppSettings))
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_freeEditSettings()");
+
+ // Free the background music settings.
+ videoEditClasses_freeBackgroundMusicSettings(&(*ppSettings)->xVSS.pBGMtrack);
+
+ // Free the path of the output file.
+ videoEditOsal_free((*ppSettings)->pOutputFile);
+ (*ppSettings)->pOutputFile = M4OSA_NULL;
+ (*ppSettings)->uiOutputPathSize = 0;
+
+ // Check if the EffectSettings should be freed.
+ if (M4OSA_NULL != (*ppSettings)->Effects)
+ {
+ // Loop over all effect settings.
+ for (int i = 0; i < (*ppSettings)->nbEffects; i++)
+ {
+ // Free the effect settings.
+ videoEditClasses_freeEffectSettings(&(*ppSettings)->Effects[i]);
+ }
+
+ // Free the memory for the effect settings array.
+ videoEditOsal_free((*ppSettings)->Effects);
+ (*ppSettings)->Effects = M4OSA_NULL;
+ }
+
+ // Reset the number of effects in the clip.
+ (*ppSettings)->nbEffects = 0;
+
+ // Check if there are clips.
+ if (0 < (*ppSettings)->uiClipNumber)
+ {
+ // Check if the TransitionSettings should be freed.
+ if (M4OSA_NULL != (*ppSettings)->pTransitionList)
+ {
+ // Loop over all transition settings.
+ for (int i = 0; i < ((*ppSettings)->uiClipNumber - 1); i++)
+ {
+ // Free the transition settings.
+ videoEditClasses_freeTransitionSettings(&(*ppSettings)->pTransitionList[i]);
+ }
+
+ // Free the memory for the transition settings array.
+ videoEditOsal_free((*ppSettings)->pTransitionList);
+ (*ppSettings)->pTransitionList = M4OSA_NULL;
+ }
+
+ // Check if the ClipSettings should be freed.
+ if (M4OSA_NULL != (*ppSettings)->pClipList)
+ {
+ // Loop over all clip settings.
+ for (int i = 0; i < (*ppSettings)->uiClipNumber; i++)
+ {
+ // Free the clip settings.
+ videoEditClasses_freeClipSettings(&(*ppSettings)->pClipList[i]);
+ }
+
+ // Free the memory for the clip settings array.
+ videoEditOsal_free((*ppSettings)->pClipList);
+ (*ppSettings)->pClipList = M4OSA_NULL;
+ }
+ }
+
+ // Reset the number of clips.
+ (*ppSettings)->uiClipNumber = 0;
+
+ // Free the settings structure.
+ videoEditOsal_free((*ppSettings));
+ (*ppSettings) = M4OSA_NULL;
+ }
+}
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditClasses_logEditSettings(
+ M4VSS3GPP_EditSettings* pSettings,
+ int indentation)
+{
+ // Check if memory was allocated for the EditSettings.
+ if (M4OSA_NULL != pSettings)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiClipNumber: %d", indentation, ' ',
+ pSettings->uiClipNumber);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiMasterClip: %d", indentation, ' ',
+ pSettings->uiMasterClip);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pClipList: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->pClipList) ? " " : "<null>");
+ if (M4OSA_NULL != pSettings->pClipList)
+ {
+ indentation += VIDEOEDIT_LOG_INDENTATION;
+ for (int i = 0; i < pSettings->uiClipNumber; i++)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pClipList[%d]:", indentation, ' ',
+ i);
+ videoEditClasses_logClipSettings(pSettings->pClipList[i],
+ indentation + VIDEOEDIT_LOG_INDENTATION);
+ }
+ indentation -= VIDEOEDIT_LOG_INDENTATION;
+ }
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pTransitionList: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->pTransitionList) ? " " : "<null>");
+ if (M4OSA_NULL != pSettings->pTransitionList)
+ {
+ indentation += VIDEOEDIT_LOG_INDENTATION;
+ for (int i = 0; i < (pSettings->uiClipNumber - 1); i++)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pTransitionList[%d]:", indentation, ' ', i);
+ videoEditClasses_logTransitionSettings(pSettings->pTransitionList[i],
+ indentation + VIDEOEDIT_LOG_INDENTATION);
+ }
+ indentation -= VIDEOEDIT_LOG_INDENTATION;
+ }
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c Effects: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->Effects) ? " " : "<null>");
+ if (M4OSA_NULL != pSettings->Effects)
+ {
+ indentation += VIDEOEDIT_LOG_INDENTATION;
+ for (int i = 0; i < pSettings->nbEffects; i++)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c Effects[%d]:", indentation, ' ', i);
+ videoEditClasses_logEffectSettings(&pSettings->Effects[i],
+ indentation + VIDEOEDIT_LOG_INDENTATION);
+ }
+ indentation -= VIDEOEDIT_LOG_INDENTATION;
+ }
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c nbEffects: %d", indentation, ' ',
+ pSettings->nbEffects);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c videoFrameRate: %s", indentation, ' ',
+ videoEditJava_getVideoFrameRateString(pSettings->videoFrameRate));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pOutputFile: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->pOutputFile) ? (char*)pSettings->pOutputFile : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiOutputPathSize: %u", indentation, ' ',
+ (unsigned int)pSettings->uiOutputPathSize);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pTemporaryFile: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->pTemporaryFile) ?\
+ (char*)pSettings->pTemporaryFile : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c outputVideoSize: %s", indentation, ' ',
+ videoEditJava_getVideoFrameSizeString(pSettings->xVSS.outputVideoSize));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c outputVideoFormat: %s", indentation, ' ',
+ videoEditJava_getVideoFormatString(pSettings->xVSS.outputVideoFormat));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c outputAudioFormat: %s", indentation, ' ',
+ videoEditJava_getAudioFormatString(pSettings->xVSS.outputAudioFormat));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c outputAudioSamplFreq: %s", indentation, ' ',
+ videoEditJava_getAudioSamplingFrequencyString(pSettings->xVSS.outputAudioSamplFreq));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c outputFileSize: %u", indentation, ' ',
+ (unsigned int)pSettings->xVSS.outputFileSize);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bAudioMono: %s", indentation, ' ',
+ pSettings->xVSS.bAudioMono ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c outputVideoBitrate: %s", indentation, ' ',
+ videoEditJava_getBitrateString(pSettings->xVSS.outputVideoBitrate));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c outputAudioBitrate: %s", indentation, ' ',
+ videoEditJava_getBitrateString(pSettings->xVSS.outputAudioBitrate));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pBGMtrack:", indentation, ' ');
+ videoEditClasses_logBackgroundMusicSettings(pSettings->xVSS.pBGMtrack,
+ indentation + VIDEOEDIT_LOG_INDENTATION);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pTextRenderingFct: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->xVSS.pTextRenderingFct) ? "set" : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c PTVolLevel: %u", indentation, ' ',
+ (unsigned int)pSettings->PTVolLevel);
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c <null>", indentation, ' ');
+ }
+}
+#endif
+
+
+void
+videoEditClasses_getEffectSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4VSS3GPP_EffectSettings* pSettings)
+{
+ VideoEditJava_EffectSettingsFieldIds fieldIds = {NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL};
+ bool converted = true;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_getEffectSettings()");
+
+ // Retrieve the field ids.
+ videoEditJava_getEffectSettingsFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Only validate the EffectSettings if the fields could be located.
+ if (*pResult)
+ {
+ // Check if the effect is set.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (NULL == object),
+ "effect is null");
+ }
+
+ // Only retrieve the EffectSettings if the fields could be located and validated.
+ if (*pResult)
+ {
+ // Set the start time in milliseconds.
+ pSettings->uiStartTime = (M4OSA_UInt32)pEnv->GetIntField(object, fieldIds.startTime);
+
+ // Set the duration in milliseconds.
+ pSettings->uiDuration = (M4OSA_UInt32)pEnv->GetIntField(object, fieldIds.duration);
+
+ // Set the video effect type, None, FadeIn, FadeOut, etc.
+ pSettings->VideoEffectType =
+ (M4VSS3GPP_VideoEffectType)videoEditJava_getVideoEffectJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.videoEffectType));
+
+ // Check if the video effect type is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "effect.videoEffectType is invalid");
+ }
+
+ // Check if the video effect type could be set.
+ if (*pResult)
+ {
+ // Set the external effect function.
+ pSettings->ExtVideoEffectFct = M4OSA_NULL;
+
+ // Set the context given to the external effect function.
+ pSettings->pExtVideoEffectFctCtxt = M4OSA_NULL;
+
+ // Set the audio effect type, None, FadeIn, FadeOut.
+ pSettings->AudioEffectType =
+ (M4VSS3GPP_AudioEffectType)videoEditJava_getAudioEffectJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.audioEffectType));
+
+ // Check if the audio effect type is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "effect.audioEffectType is invalid");
+ }
+
+ // Check if the audio effect type could be set.
+ if (*pResult)
+ {
+ // Set the start in percentage of the cut clip duration.
+ pSettings->xVSS.uiStartPercent = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.startPercent);
+
+ // Set the duration in percentage of the ((clip duration) - (effect starttime)).
+ pSettings->xVSS.uiDurationPercent = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.durationPercent);
+
+ // Set the framing file path (GIF/PNG file).
+ pSettings->xVSS.pFramingFilePath = (M4OSA_Char*)videoEditJava_getString(pResult, pEnv,
+ object, fieldIds.framingFile, M4OSA_NULL);
+
+ // Check if this is a framing effect.
+ if (M4xVSS_kVideoEffectType_Framing == (M4xVSS_VideoEffectType)pSettings->VideoEffectType)
+ {
+ // Check if the framing file path is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (M4OSA_NULL == pSettings->xVSS.pFramingFilePath), "effect.framingFile is null");
+ }
+ }
+
+ // Check if the framing file path could be retrieved.
+ if (*pResult)
+ {
+ // Set the Framing RGB565 buffer.
+ pSettings->xVSS.pFramingBuffer = M4OSA_NULL;
+
+ // Set the top-left X coordinate in the output picture
+ // where the added frame will be displayed.
+ pSettings->xVSS.topleft_x = (M4OSA_UInt32)pEnv->GetIntField(object, fieldIds.topLeftX);
+
+ // Set the top-left Y coordinate in the output picture
+ // where the added frame will be displayed.
+ pSettings->xVSS.topleft_y = (M4OSA_UInt32)pEnv->GetIntField(object, fieldIds.topLeftY);
+
+ // Set whether or not the framing image is resized to output video size.
+ pSettings->xVSS.bResize =
+ (M4OSA_Bool)pEnv->GetBooleanField(object, fieldIds.framingResize);
+
+ // Set the new size to which framing buffer needs to be resized to
+ pSettings->xVSS.framingScaledSize =
+ (M4VIDEOEDITING_VideoFrameSize)pEnv->GetIntField(object, fieldIds.framingScaledSize);
+
+ // Set the text buffer.
+ pSettings->xVSS.pTextBuffer = (M4OSA_Char*)videoEditJava_getString(pResult, pEnv, object,
+ fieldIds.text, &pSettings->xVSS.textBufferSize);
+ }
+
+ // Check if the text buffer could be retrieved.
+ if (*pResult)
+ {
+ // Set the data used by the font engine (size, color...).
+ pSettings->xVSS.pRenderingData = (M4OSA_Char*)videoEditJava_getString(pResult, pEnv,
+ object, fieldIds.textRenderingData, M4OSA_NULL);
+ }
+
+ // Check if the text rendering data could be retrieved.
+ if (*pResult)
+ {
+ // Set the text plane width.
+ pSettings->xVSS.uiTextBufferWidth = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.textBufferWidth);
+
+ // Set the text plane height.
+ pSettings->xVSS.uiTextBufferHeight = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.textBufferHeight);
+
+ // Set the processing rate of the effect added when using the Fifties effect.
+ pSettings->xVSS.uiFiftiesOutFrameRate = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.fiftiesFrameRate);
+
+ // Set the RGB16 input color of the effect added when using the rgb16 color effect.
+ pSettings->xVSS.uiRgb16InputColor = (M4OSA_UInt16)pEnv->GetIntField(object,
+ fieldIds.rgb16InputColor);
+
+ // Set the start percentage of Alpha blending.
+ pSettings->xVSS.uialphaBlendingStart = (M4OSA_UInt8)pEnv->GetIntField(object,
+ fieldIds.alphaBlendingStartPercent);
+
+ // Set the middle percentage of Alpha blending.
+ pSettings->xVSS.uialphaBlendingMiddle = (M4OSA_UInt8)pEnv->GetIntField(object,
+ fieldIds.alphaBlendingMiddlePercent);
+
+ // Set the end percentage of Alpha blending.
+ pSettings->xVSS.uialphaBlendingEnd = (M4OSA_UInt8)pEnv->GetIntField(object,
+ fieldIds.alphaBlendingEndPercent);
+
+ // Set the duration, in percentage of effect duration, of the FadeIn phase.
+ pSettings->xVSS.uialphaBlendingFadeInTime = (M4OSA_UInt8)pEnv->GetIntField(object,
+ fieldIds.alphaBlendingFadeInTimePercent);
+
+ // Set the duration, in percentage of effect duration, of the FadeOut phase.
+ pSettings->xVSS.uialphaBlendingFadeOutTime = (M4OSA_UInt8)pEnv->GetIntField(object,
+ fieldIds.alphaBlendingFadeOutTimePercent);
+
+ if (pSettings->xVSS.pFramingFilePath != M4OSA_NULL)
+ {
+ pSettings->xVSS.pFramingBuffer =
+ (M4VIFI_ImagePlane *)M4OSA_malloc(sizeof(M4VIFI_ImagePlane),
+ 0x00,(M4OSA_Char *)"framing buffer");
+ }
+
+ if (pSettings->xVSS.pFramingBuffer != M4OSA_NULL)
+ {
+ // OverFrame height and width
+ pSettings->xVSS.pFramingBuffer->u_width = pEnv->GetIntField(object,
+ fieldIds.width);
+
+ pSettings->xVSS.pFramingBuffer->u_height = pEnv->GetIntField(object,
+ fieldIds.height);
+
+ pSettings->xVSS.width = pSettings->xVSS.pFramingBuffer->u_width;
+ pSettings->xVSS.height = pSettings->xVSS.pFramingBuffer->u_height;
+ pSettings->xVSS.rgbType = M4VSS3GPP_kRGB888;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "pFramingBuffer u_width %d ", pSettings->xVSS.pFramingBuffer->u_width);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "pFramingBuffer u_height %d", pSettings->xVSS.pFramingBuffer->u_height);
+
+ }
+
+ // Check if settings could be set.
+ if (!(*pResult))
+ {
+ // Free the settings.
+ videoEditClasses_freeEffectSettings(pSettings);
+ }
+ }
+}
+
+void
+videoEditClasses_freeEffectSettings(
+ M4VSS3GPP_EffectSettings* pSettings)
+{
+ // Check if memory was allocated for the EffectSettings.
+ if (M4OSA_NULL != pSettings)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_freeEffectSettings()");
+
+ // Free the data used by the font engine (size, color...).
+ videoEditOsal_free(pSettings->xVSS.pRenderingData);
+ pSettings->xVSS.pRenderingData = M4OSA_NULL;
+
+ // Free the text buffer.
+ videoEditOsal_free(pSettings->xVSS.pTextBuffer);
+ pSettings->xVSS.pTextBuffer = M4OSA_NULL;
+ pSettings->xVSS.textBufferSize = 0;
+
+ // Free the framing file path.
+ videoEditOsal_free(pSettings->xVSS.pFramingFilePath);
+ pSettings->xVSS.pFramingFilePath = M4OSA_NULL;
+ }
+}
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditClasses_logEffectSettings(
+ M4VSS3GPP_EffectSettings* pSettings,
+ int indentation)
+{
+ // Check if memory was allocated for the EffectSettings.
+ if (M4OSA_NULL != pSettings)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiStartTime: %u ms", indentation, ' ',
+ (unsigned int)pSettings->uiStartTime);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiDuration: %u ms", indentation, ' ',
+ (unsigned int)pSettings->uiDuration);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c VideoEffectType: %s", indentation, ' ',
+ videoEditJava_getVideoEffectString(pSettings->VideoEffectType));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c ExtVideoEffectFct: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->ExtVideoEffectFct) ? "set" : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pExtVideoEffectFctCtxt: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->pExtVideoEffectFctCtxt) ? "set" : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c AudioEffectType: %s", indentation, ' ',
+ videoEditJava_getAudioEffectString(pSettings->AudioEffectType));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiStartPercent: %u %%", indentation, ' ',
+ (unsigned int)pSettings->xVSS.uiStartPercent);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiDurationPercent: %u %%", indentation, ' ',
+ (unsigned int)pSettings->xVSS.uiDurationPercent);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pFramingFilePath: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->xVSS.pFramingFilePath) ?\
+ (char*)pSettings->xVSS.pFramingFilePath : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pFramingBuffer: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->xVSS.pFramingBuffer) ? "set" : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c topleft_x: %u", indentation, ' ',
+ (unsigned int)pSettings->xVSS.topleft_x);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c topleft_y: %u", indentation, ' ',
+ (unsigned int)pSettings->xVSS.topleft_y);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c bResize: %s", indentation, ' ',
+ pSettings->xVSS.bResize ? "true" : "false");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pTextBuffer: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->xVSS.pTextBuffer) ?\
+ (char*)pSettings->xVSS.pTextBuffer : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c textBufferSize: %u", indentation, ' ',
+ (unsigned int)pSettings->xVSS.textBufferSize);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pRenderingData: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->xVSS.pRenderingData) ?\
+ (char*)pSettings->xVSS.pRenderingData : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiTextBufferWidth: %u", indentation, ' ',
+ (unsigned int)pSettings->xVSS.uiTextBufferWidth);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiTextBufferHeight: %u", indentation, ' ',
+ (unsigned int)pSettings->xVSS.uiTextBufferHeight);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiFiftiesOutFrameRate: %u", indentation, ' ',
+ (unsigned int)pSettings->xVSS.uiFiftiesOutFrameRate);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiRgb16InputColor: %d", indentation, ' ',
+ pSettings->xVSS.uiRgb16InputColor);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uialphaBlendingStart: %d %%", indentation, ' ',
+ pSettings->xVSS.uialphaBlendingStart);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uialphaBlendingMiddle: %d %%", indentation, ' ',
+ pSettings->xVSS.uialphaBlendingMiddle);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uialphaBlendingEnd: %d %%", indentation, ' ',
+ pSettings->xVSS.uialphaBlendingEnd);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uialphaBlendingFadeInTime: %d %%", indentation, ' ',
+ pSettings->xVSS.uialphaBlendingFadeInTime);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uialphaBlendingFadeOutTime: %d %%", indentation, ' ',
+ pSettings->xVSS.uialphaBlendingFadeOutTime);
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c <null>", indentation, ' ');
+ }
+}
+#endif
+
+
+void
+videoEditClasses_getSlideTransitionSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4xVSS_SlideTransitionSettings** ppSettings)
+{
+ VideoEditJava_SlideTransitionSettingsFieldIds fieldIds = {NULL};
+ M4xVSS_SlideTransitionSettings* pSettings = M4OSA_NULL;
+ bool converted = true;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_getSlideTransitionSettings()");
+
+ // Retrieve the field ids.
+ videoEditJava_getSlideTransitionSettingsFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+
+ // Only validate the SlideTransitionSettings if the fields could be located.
+ if (*pResult)
+ {
+ // Check if the clip is set.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (NULL == object),
+ "slideSettings is null");
+ }
+
+ // Only retrieve the SlideTransitionSettings if the fields could be located and validated.
+ if (*pResult)
+ {
+ // Allocate memory for the SlideTransitionSettings.
+ pSettings = (M4xVSS_SlideTransitionSettings*)videoEditOsal_alloc(pResult, pEnv,
+ sizeof(M4xVSS_SlideTransitionSettings), "SlideTransitionSettings");
+
+ // Check if memory could be allocated for the SlideTransitionSettings.
+ if (*pResult)
+ {
+ // Set the direction of the slide.
+ pSettings->direction =
+ (M4xVSS_SlideTransition_Direction)videoEditJava_getSlideDirectionJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.direction));
+
+ // Check if the direction is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ !converted, "slideSettings.direction is invalid");
+ }
+
+ // Check if settings could be set.
+ if (*pResult)
+ {
+ // Return the settings.
+ (*ppSettings) = pSettings;
+ }
+ else
+ {
+ // Free the settings.
+ videoEditClasses_freeSlideTransitionSettings(&pSettings);
+ }
+ }
+}
+
+void
+videoEditClasses_freeSlideTransitionSettings(
+ M4xVSS_SlideTransitionSettings** ppSettings)
+{
+ // Check if memory was allocated for the SlideTransitionSettings.
+ if (M4OSA_NULL != (*ppSettings))
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_freeSlideTransitionSettings()");
+
+ // Free the settings structure.
+ videoEditOsal_free((*ppSettings));
+ (*ppSettings) = M4OSA_NULL;
+ }
+}
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditClasses_logSlideTransitionSettings(
+ M4xVSS_SlideTransitionSettings* pSettings,
+ int indentation)
+{
+ // Check if memory was allocated for the SlideTransitionSettings.
+ if (M4OSA_NULL != pSettings)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c direction: %s", indentation, ' ',
+ videoEditJava_getSlideDirectionString(pSettings->direction));
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c <null>", indentation, ' ');
+ }
+}
+#endif
+
+
+void
+videoEditClasses_getTransitionSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4VSS3GPP_TransitionSettings** ppSettings)
+{
+ VideoEditJava_TransitionSettingsFieldIds fieldIds = {NULL, NULL, NULL, NULL, NULL, NULL};
+ jobject alphaSettings = NULL;
+ jobject slideSettings = NULL;
+ M4VSS3GPP_TransitionSettings* pSettings = M4OSA_NULL;
+ bool converted = true;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_getTransitionSettings()");
+
+ // Retrieve the field ids.
+ videoEditJava_getTransitionSettingsFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Only validate the TransitionSettings if the fields could be located.
+ if (*pResult)
+ {
+ // Check if the transition is set.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (NULL == object),
+ "transition is null");
+ }
+
+ // Check if the field ids could be located and validated.
+ if (*pResult)
+ {
+ // Retrieve the alphaSettings.
+ videoEditJava_getObject(pResult, pEnv, object, fieldIds.alphaSettings, &alphaSettings);
+
+ // Retrieve the slideSettings.
+ videoEditJava_getObject(pResult, pEnv, object, fieldIds.slideSettings, &slideSettings);
+ }
+
+ // Only retrieve the TransitionSettings if the fields could be located.
+ if (*pResult)
+ {
+ // Allocate memory for the TransitionSettings.
+ pSettings = (M4VSS3GPP_TransitionSettings*)videoEditOsal_alloc(pResult,
+ pEnv, sizeof(M4VSS3GPP_TransitionSettings), "TransitionSettings");
+
+ // Check if memory could be allocated for the TransitionSettings.
+ if (*pResult)
+ {
+ // Set the duration of the transition, in milliseconds (set to 0 to get no transition).
+ pSettings->uiTransitionDuration = (M4OSA_UInt32)pEnv->GetIntField(object,
+ fieldIds.duration);
+
+ // Set the type of the video transition.
+ pSettings->VideoTransitionType =
+ (M4VSS3GPP_VideoTransitionType)videoEditJava_getVideoTransitionJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.videoTransitionType));
+
+ // Check if the video transition type is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv, !converted,
+ "transition.videoTransitionType is invalid");
+ }
+
+ // Check if the video transition type could be set.
+ if (*pResult)
+ {
+ // Set the external transition video effect function.
+ pSettings->ExtVideoTransitionFct = M4OSA_NULL;
+
+ // Set the context of the external transition video effect function.
+ pSettings->pExtVideoTransitionFctCtxt = M4OSA_NULL;
+
+ // Set the type of the audio transition.
+ pSettings->AudioTransitionType =
+ (M4VSS3GPP_AudioTransitionType)videoEditJava_getAudioTransitionJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.audioTransitionType));
+
+ // Check if the audio transition type is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv, !converted,
+ "transition.audioTransitionType is invalid");
+ }
+
+ // Check if the audio transition type could be set.
+ if (*pResult)
+ {
+ // Set the transition behaviour.
+ pSettings->TransitionBehaviour =
+ (M4VSS3GPP_TransitionBehaviour)videoEditJava_getTransitionBehaviourJavaToC(
+ &converted, pEnv->GetIntField(object, fieldIds.transitionBehaviour));
+
+ // Check if the transition behaviour is valid.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv, !converted,
+ "transition.transitionBehaviour is invalid");
+ }
+
+ // Check if the audio transition behaviour could be set.
+ if (*pResult)
+ {
+ // Check if a slide transition or alpha magic setting object is expected.
+ if ((int)pSettings->VideoTransitionType == M4xVSS_kVideoTransitionType_SlideTransition)
+ {
+ // Set the slide transition settings.
+ videoEditClasses_getSlideTransitionSettings(pResult, pEnv, slideSettings,
+ &pSettings->xVSS.transitionSpecific.pSlideTransitionSettings);
+ }
+ else if ((int)pSettings->VideoTransitionType == M4xVSS_kVideoTransitionType_AlphaMagic)
+ {
+ // Set the alpha magic settings.
+ videoEditClasses_getAlphaMagicSettings(pResult, pEnv, alphaSettings,
+ &pSettings->xVSS.transitionSpecific.pAlphaMagicSettings);
+ }
+ }
+
+ // Check if settings could be set.
+ if (*pResult)
+ {
+ // Return the settings.
+ (*ppSettings) = pSettings;
+ }
+ else
+ {
+ // Free the settings.
+ videoEditClasses_freeTransitionSettings(&pSettings);
+ }
+ }
+}
+
+void
+videoEditClasses_freeTransitionSettings(
+ M4VSS3GPP_TransitionSettings** ppSettings)
+{
+ // Check if memory was allocated for the TransitionSettings.
+ if (M4OSA_NULL != (*ppSettings))
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_freeTransitionSettings()");
+
+ // Check if a slide transition or alpha magic setting structure is expected.
+ if ((int)(*ppSettings)->VideoTransitionType == M4xVSS_kVideoTransitionType_SlideTransition)
+ {
+ // Free the slide transition settings.
+ videoEditClasses_freeSlideTransitionSettings(
+ &(*ppSettings)->xVSS.transitionSpecific.pSlideTransitionSettings);
+ }
+ else
+ {
+ // Free the alpha magic settings.
+ videoEditClasses_freeAlphaMagicSettings(
+ &(*ppSettings)->xVSS.transitionSpecific.pAlphaMagicSettings);
+ }
+
+ // Free the settings structure.
+ videoEditOsal_free((*ppSettings));
+ (*ppSettings) = M4OSA_NULL;
+ }
+}
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditClasses_logTransitionSettings(
+ M4VSS3GPP_TransitionSettings* pSettings,
+ int indentation)
+{
+ // Check if memory was allocated for the TransitionSettings.
+ if (M4OSA_NULL != pSettings)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c uiTransitionDuration: %u ms", indentation, ' ',
+ (unsigned int)pSettings->uiTransitionDuration);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c VideoTransitionType: %s", indentation, ' ',
+ videoEditJava_getVideoTransitionString(pSettings->VideoTransitionType));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c ExtVideoTransitionFct: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->ExtVideoTransitionFct) ? "set" : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pExtVideoTransitionFctCtxt: %s", indentation, ' ',
+ (M4OSA_NULL != pSettings->pExtVideoTransitionFctCtxt) ? "set" : "<null>");
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c AudioTransitionType: %s", indentation, ' ',
+ videoEditJava_getAudioTransitionString(pSettings->AudioTransitionType));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c TransitionBehaviour: %s", indentation, ' ',
+ videoEditJava_getTransitionBehaviourString(pSettings->TransitionBehaviour));
+
+ // Check if a slide transition or alpha magic setting structure is expected.
+ if ((int)pSettings->VideoTransitionType == M4xVSS_kVideoTransitionType_SlideTransition)
+ {
+ // Log the slide transition settings.
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pSlideTransitionSettings:", indentation, ' ');
+ videoEditClasses_logSlideTransitionSettings\
+ (pSettings->xVSS.transitionSpecific.pSlideTransitionSettings,
+ indentation + VIDEOEDIT_LOG_INDENTATION);
+ }
+ else
+ {
+ // Log the alpha magic settings.
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c pAlphaMagicSettings:", indentation, ' ');
+ videoEditClasses_logAlphaMagicSettings\
+ (pSettings->xVSS.transitionSpecific.pAlphaMagicSettings,
+ indentation + VIDEOEDIT_LOG_INDENTATION);
+ }
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c <null>", indentation, ' ');
+ }
+}
+#endif
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditPropClass_logProperties(
+ VideoEditPropClass_Properties* pProperties,
+ int indentation)
+{
+ // Check if memory was allocated for the Properties.
+ if (M4OSA_NULL != pProperties)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c uiClipDuration: %u", indentation, ' ',
+ (unsigned int)pProperties->uiClipDuration);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c FileType: %s", indentation, ' ',
+ videoEditJava_getFileTypeString(pProperties->FileType));
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c VideoStreamType: %s", indentation, ' ',
+ videoEditJava_getVideoFormatString(pProperties->VideoStreamType));
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c uiClipVideoDuration: %u", indentation, ' ',
+ (unsigned int)pProperties->uiClipVideoDuration);
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c uiVideoBitrate: %s", indentation, ' ',
+ videoEditJava_getBitrateString(pProperties->uiVideoBitrate));
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c uiVideoWidth: %u", indentation, ' ',
+ (unsigned int)pProperties->uiVideoWidth);
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c uiVideoHeight: %u", indentation, ' ',
+ (unsigned int)(unsigned int)pProperties->uiVideoHeight);
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c fAverageFrameRate: %.3f", indentation, ' ',
+ pProperties->fAverageFrameRate);
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c ProfileAndLevel: %s", indentation, ' ',
+ videoEditJava_getVideoProfileString(pProperties->ProfileAndLevel));
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c AudioStreamType: %s", indentation, ' ',
+ videoEditJava_getAudioFormatString(pProperties->AudioStreamType));
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c uiClipAudioDuration: %u", indentation, ' ',
+ (unsigned int)pProperties->uiClipAudioDuration);
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c uiAudioBitrate: %s", indentation, ' ',
+ videoEditJava_getBitrateString(pProperties->uiAudioBitrate));
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c uiNbChannels: %u", indentation, ' ',
+ (unsigned int)pProperties->uiNbChannels);
+
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c uiSamplingFrequency: %u", indentation, ' ',
+ (unsigned int)pProperties->uiSamplingFrequency);
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_PROP_CLASSES",
+ "%*c <null>", indentation, ' ');
+ }
+}
+#endif
+
+
+void
+videoEditClasses_createVersion(
+ bool* pResult,
+ JNIEnv* pEnv,
+ M4_VersionInfo* pVersionInfo,
+ jobject* pObject)
+{
+ VideoEditJava_VersionFieldIds fieldIds = {NULL, NULL, NULL};
+ jclass clazz = NULL;
+ jobject object = NULL;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_createVersion()");
+
+ // Retrieve the class.
+ videoEditJava_getVersionClass(pResult, pEnv, &clazz);
+
+ // Retrieve the field ids.
+ videoEditJava_getVersionFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Only create an object if the class and fields could be located.
+ if (*pResult)
+ {
+ // Allocate a new object.
+ object = pEnv->AllocObject(clazz);
+
+ // check if alloc is done
+ videoEditJava_checkAndThrowRuntimeException(pResult, pEnv,
+ (NULL == object),
+ M4ERR_ALLOC);
+ if (NULL != object)
+ {
+ // Set the major field.
+ pEnv->SetIntField(object, fieldIds.major, pVersionInfo->m_major);
+
+ // Set the minor field.
+ pEnv->SetIntField(object, fieldIds.minor, pVersionInfo->m_minor);
+
+ // Set the revision field.
+ pEnv->SetIntField(object, fieldIds.revision, pVersionInfo->m_revision);
+
+ // Return the object.
+ (*pObject) = object;
+ }
+ }
+}
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+void
+videoEditClasses_logVersion(
+ M4_VersionInfo* pVersionInfo,
+ int indentation)
+{
+ // Check if memory was allocated for the Version.
+ if (M4OSA_NULL != pVersionInfo)
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c major: %u ms", indentation, ' ',
+ (unsigned int)pVersionInfo->m_major);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c minor: %u", indentation, ' ',
+ (unsigned int)pVersionInfo->m_minor);
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c revision: %u", indentation, ' ',
+ (unsigned int)pVersionInfo->m_revision);
+ }
+ else
+ {
+ VIDEOEDIT_LOG_SETTING(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "%*c <null>", indentation, ' ');
+ }
+}
+#endif
+
+
+void*
+videoEditClasses_getContext(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object)
+{
+ void* pContext = M4OSA_NULL;
+ jclass clazz = NULL;
+ VideoEditJava_EngineFieldIds fieldIds = {NULL};
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_getContext()");
+
+ // Retrieve the class.
+ videoEditJava_getEngineClass(pResult, pEnv, &clazz);
+
+ // Retrieve the field ids.
+ videoEditJava_getEngineFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Check if the class and field ids could be located.
+ if (*pResult)
+ {
+ // Retrieve the context pointer.
+ pContext = (void *)pEnv->GetIntField(object, fieldIds.context);
+ }
+
+ // Return the context pointer.
+ return(pContext);
+}
+
+void
+videoEditClasses_setContext(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ void* pContext)
+{
+ jclass clazz = NULL;
+ VideoEditJava_EngineFieldIds fieldIds = {NULL};
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "videoEditClasses_setContext()");
+
+ // Retrieve the class.
+ videoEditJava_getEngineClass(pResult, pEnv, &clazz);
+
+ // Retrieve the field ids.
+ videoEditJava_getEngineFieldIds(pResult, pEnv, &fieldIds);
+ }
+
+ // Check if the class and field ids could be located.
+ if (*pResult)
+ {
+ // Set the context field.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "The context value from JAVA before setting is = 0x%x",
+ pEnv->GetIntField(object, fieldIds.context));
+
+ pEnv->SetIntField(object, fieldIds.context, (int)pContext);
+ M4OSA_TRACE1_1("The context value in JNI is = 0x%x",pContext);
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_CLASSES",
+ "The context value from JAVA after setting is = 0x%x",
+ pEnv->GetIntField(object, fieldIds.context));
+ }
+}
+
diff --git a/media/jni/mediaeditor/VideoEditorClasses.h b/media/jni/mediaeditor/VideoEditorClasses.h
new file mode 100755
index 000000000000..3c8f05588b04
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorClasses.h
@@ -0,0 +1,589 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_EDITOR_CLASSES_H
+#define VIDEO_EDITOR_CLASSES_H
+
+#include <VideoEditorJava.h>
+#include <VideoEditorClasses.h>
+/**
+ ************************************************************************
+ * @file VideoEditorClasses.h
+ * @brief Interface for JNI methods/defines that have specific
+ * access to class, objects and method Ids defined in Java layer
+ ************************************************************************
+*/
+
+
+extern "C" {
+#include <M4xVSS_API.h>
+#include <M4VSS3GPP_API.h>
+#include <M4VSS3GPP_ErrorCodes.h>
+#include <M4MCS_ErrorCodes.h>
+#include <M4READER_Common.h>
+#include <M4WRITER_common.h>
+};
+
+/*
+ * Java layer class/object name strings
+ */
+#define PACKAGE_NAME "android/media/videoeditor"
+
+#define MANUAL_EDIT_ENGINE_CLASS_NAME PACKAGE_NAME"/MediaArtistNativeHelper"
+#define MEDIA_PROPERTIES_ENGINE_CLASS_NAME PACKAGE_NAME"/MediaArtistNativeHelper"
+
+#define AUDIO_FORMAT_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$AudioFormat"
+#define RESULTS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$Results"
+#define VERSION_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$Version"
+#define AUDIO_SAMPLING_FREQUENCY_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$AudioSamplingFrequency"
+#define BITRATE_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$Bitrate"
+#define ERROR_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$Result"
+#define FILE_TYPE_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$FileType"
+#define MEDIA_RENDERING_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$MediaRendering"
+#define VIDEO_FORMAT_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$VideoFormat"
+#define VIDEO_FRAME_RATE_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$VideoFrameRate"
+#define VIDEO_FRAME_SIZE_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$VideoFrameSize"
+#define VIDEO_PROFILE_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$VideoProfile"
+#define ALPHA_MAGIC_SETTINGS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$AlphaMagicSettings"
+#define AUDIO_EFFECT_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$AudioEffect"
+#define AUDIO_TRANSITION_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$AudioTransition"
+#define BACKGROUND_MUSIC_SETTINGS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$BackgroundMusicSettings"
+#define CLIP_SETTINGS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$ClipSettings"
+#define EDIT_SETTINGS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$EditSettings"
+#define EFFECT_SETTINGS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$EffectSettings"
+#define SLIDE_DIRECTION_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$SlideDirection"
+#define SLIDE_TRANSITION_SETTINGS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$SlideTransitionSettings"
+#define TRANSITION_BEHAVIOUR_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$TransitionBehaviour"
+#define TRANSITION_SETTINGS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$TransitionSettings"
+#define VIDEO_EFFECT_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$VideoEffect"
+#define VIDEO_TRANSITION_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$VideoTransition"
+#define PREVIEW_CLIPS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$PreviewClips"
+#define PREVIEW_SETTING_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$PreviewSettings"
+#define PREVIEW_PROPERTIES_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$PreviewClipProperties"
+#define AUDIO_SETTINGS_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$AudioSettings"
+#define PROPERTIES_CLASS_NAME MANUAL_EDIT_ENGINE_CLASS_NAME"$Properties"
+
+#define TASK_IDLE 0
+#define TASK_LOADING_SETTINGS 1
+#define TASK_ENCODING 2
+
+/*
+ * File type enum
+ */
+typedef enum
+{
+ VideoEditClasses_kFileType_3GPP,
+ VideoEditClasses_kFileType_MP4,
+ VideoEditClasses_kFileType_AMR,
+ VideoEditClasses_kFileType_MP3,
+ VideoEditClasses_kFileType_PCM,
+ VideoEditClasses_kFileType_JPG,
+ VideoEditClasses_kFileType_GIF,
+ VideoEditClasses_kFileType_PNG,
+ VideoEditClasses_kFileType_Unsupported
+} VideoEditClasses_FileType;
+
+/*
+ * Alpha magic transition structure
+ */
+typedef struct
+{
+ jfieldID file;
+ jfieldID blendingPercent;
+ jfieldID invertRotation;
+ jfieldID rgbWidth;
+ jfieldID rgbHeight;
+} VideoEditJava_AlphaMagicFieldIds;
+
+typedef struct
+{
+ jfieldID file;
+ jfieldID fileType;
+ jfieldID insertionTime;
+ jfieldID volumePercent;
+ jfieldID beginLoop;
+ jfieldID endLoop;
+ jfieldID enableDucking;
+ jfieldID duckingThreshold;
+ jfieldID lowVolume;
+ jfieldID isLooping;
+} VideoEditJava_BackgroundMusicFieldIds;
+/*
+ * Structure to hold media properties from native layer
+ */
+typedef struct {
+ M4OSA_UInt32 uiClipDuration;
+ VideoEditClasses_FileType FileType;
+ M4VIDEOEDITING_VideoFormat VideoStreamType;
+ M4OSA_UInt32 uiClipVideoDuration;
+ M4OSA_UInt32 uiVideoBitrate;
+ M4OSA_UInt32 uiVideoWidth;
+ M4OSA_UInt32 uiVideoHeight;
+ M4OSA_Float fAverageFrameRate;
+ M4VIDEOEDITING_VideoProfileAndLevel ProfileAndLevel;
+ M4VIDEOEDITING_AudioFormat AudioStreamType;
+ M4OSA_UInt32 uiClipAudioDuration;
+ M4OSA_UInt32 uiAudioBitrate;
+ M4OSA_UInt32 uiNbChannels;
+ M4OSA_UInt32 uiSamplingFrequency;
+} VideoEditPropClass_Properties;
+
+typedef struct
+{
+ jfieldID duration;
+ jfieldID fileType;
+ jfieldID videoFormat;
+ jfieldID videoDuration;
+ jfieldID videoBitrate;
+ jfieldID width;
+ jfieldID height;
+ jfieldID averageFrameRate;
+ jfieldID profileAndLevel;
+ jfieldID audioFormat;
+ jfieldID audioDuration;
+ jfieldID audioBitrate;
+ jfieldID audioChannels;
+ jfieldID audioSamplingFrequency;
+} VideoEditJava_PropertiesFieldIds;
+
+
+typedef struct
+{
+ jfieldID clipPath;
+ jfieldID fileType;
+ jfieldID beginCutTime;
+ jfieldID endCutTime;
+ jfieldID beginCutPercent;
+ jfieldID endCutPercent;
+ jfieldID panZoomEnabled;
+ jfieldID panZoomPercentStart;
+ jfieldID panZoomTopLeftXStart;
+ jfieldID panZoomTopLeftYStart;
+ jfieldID panZoomPercentEnd;
+ jfieldID panZoomTopLeftXEnd;
+ jfieldID panZoomTopLeftYEnd;
+ jfieldID mediaRendering;
+ jfieldID rgbFileWidth;
+ jfieldID rgbFileHeight;
+} VideoEditJava_ClipSettingsFieldIds;
+
+typedef struct
+{
+ jfieldID clipSettingsArray;
+ jfieldID transitionSettingsArray;
+ jfieldID effectSettingsArray;
+ jfieldID videoFrameRate;
+ jfieldID outputFile;
+ jfieldID videoFrameSize;
+ jfieldID videoFormat;
+ jfieldID audioFormat;
+ jfieldID audioSamplingFreq;
+ jfieldID maxFileSize;
+ jfieldID audioChannels;
+ jfieldID videoBitrate;
+ jfieldID audioBitrate;
+ jfieldID backgroundMusicSettings;
+ jfieldID primaryTrackVolume;
+} VideoEditJava_EditSettingsFieldIds;
+
+
+typedef struct
+{
+ jfieldID startTime;
+ jfieldID duration;
+ jfieldID videoEffectType;
+ jfieldID audioEffectType;
+ jfieldID startPercent;
+ jfieldID durationPercent;
+ jfieldID framingFile;
+ jfieldID framingBuffer;
+ jfieldID bitmapType;
+ jfieldID width;
+ jfieldID height;
+ jfieldID topLeftX;
+ jfieldID topLeftY;
+ jfieldID framingResize;
+ jfieldID framingScaledSize;
+ jfieldID text;
+ jfieldID textRenderingData;
+ jfieldID textBufferWidth;
+ jfieldID textBufferHeight;
+ jfieldID fiftiesFrameRate;
+ jfieldID rgb16InputColor;
+ jfieldID alphaBlendingStartPercent;
+ jfieldID alphaBlendingMiddlePercent;
+ jfieldID alphaBlendingEndPercent;
+ jfieldID alphaBlendingFadeInTimePercent;
+ jfieldID alphaBlendingFadeOutTimePercent;
+} VideoEditJava_EffectSettingsFieldIds;
+
+typedef struct
+{
+ jfieldID context;
+} VideoEditJava_EngineFieldIds;
+
+typedef struct
+{
+ jfieldID direction;
+} VideoEditJava_SlideTransitionSettingsFieldIds;
+
+typedef struct
+{
+ jfieldID duration;
+ jfieldID videoTransitionType;
+ jfieldID audioTransitionType;
+ jfieldID transitionBehaviour;
+ jfieldID alphaSettings;
+ jfieldID slideSettings;
+} VideoEditJava_TransitionSettingsFieldIds;
+
+typedef struct
+{
+ jfieldID major;
+ jfieldID minor;
+ jfieldID revision;
+} VideoEditJava_VersionFieldIds;
+
+
+typedef struct
+{
+ jmethodID onProgressUpdate;
+} VideoEditJava_EngineMethodIds;
+
+
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(AudioEffect )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(AudioFormat )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(AudioSamplingFrequency)
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(AudioTransition )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(Bitrate )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(Engine )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(Error )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(FileType )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(MediaRendering )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(SlideDirection )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(TransitionBehaviour )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(VideoEffect )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(VideoFormat )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(VideoFrameRate )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(VideoFrameSize )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(VideoProfile )
+VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(VideoTransition )
+
+
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(AlphaMagic )
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(BackgroundMusic )
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(ClipSettings )
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(ClipSettings )
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(EditSettings )
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(EffectSettings )
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(Engine )
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(SlideTransitionSettings )
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(TransitionSettings )
+VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(Version )
+
+VIDEOEDIT_JAVA_DECLARE_METHOD_CLASS(Engine )
+
+/*
+ * Init all Edit settings related structures
+ */
+void
+videoEditClasses_init(
+ bool* pResult,
+ JNIEnv* pEnv);
+/**
+ ************************************************************************
+ * @brief Media Properties init function.
+ * @param pResult (OUT) Pointer to hold result
+ * @param pEnv (IN) JVM Interface pointer
+ ************************************************************************
+*/
+void
+videoEditPropClass_init(
+ bool* pResult,
+ JNIEnv* pEnv);
+/**
+ ************************************************************************
+ * @brief Interface to populate Media Properties.
+ * @param pResult (IN/OUT) Pointer to hold result
+ * @param pEnv (IN) JVM Interface pointer
+ * @param pProperties (IN) Media propeties structure pointer
+ * @param pObject (OUT) Java object to hold media
+ * properties for java layer.
+ ************************************************************************
+*/
+void
+videoEditPropClass_createProperties(
+ bool* pResult,
+ JNIEnv* pEnv,
+ VideoEditPropClass_Properties* pProperties,
+ jobject* pObject);
+
+/**
+ ************************************************************************
+ * @brief Interface to log/display media properties.
+ * @param pProperties (IN) Pointer holding media properties
+ * @param indentation (IN) Indentation to follow in display
+ ************************************************************************
+*/
+void
+videoEditPropClass_logProperties(
+ VideoEditPropClass_Properties* pProperties,
+ int indentation);
+
+/*
+ * Get alpha magic transition settings
+ */
+void
+videoEditClasses_getAlphaMagicSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4xVSS_AlphaMagicSettings** ppSettings);
+
+/*
+ * Free alpha magic transition settings structure
+ */
+void
+videoEditClasses_freeAlphaMagicSettings(
+ M4xVSS_AlphaMagicSettings** ppSettings);
+
+/*
+ * Log alpha magic transition settings
+ */
+void
+videoEditClasses_logAlphaMagicSettings(
+ M4xVSS_AlphaMagicSettings* pSettings,
+ int indentation);
+
+/*
+ * Get Background Track settings
+ */
+void
+videoEditClasses_getBackgroundMusicSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4xVSS_BGMSettings** ppSettings);
+
+/*
+ * Free Background Track settings structure
+ */
+void
+videoEditClasses_freeBackgroundMusicSettings(
+ M4xVSS_BGMSettings** ppSettings);
+
+/*
+ * Log Background Track settings
+ */
+void
+videoEditClasses_logBackgroundMusicSettings(
+ M4xVSS_BGMSettings* pSettings,
+ int indentation);
+
+/*
+ * Log clip properties
+ */
+void
+videoEditClasses_logClipProperties(
+ M4VIDEOEDITING_ClipProperties* pProperties,
+ int indentation);
+
+/*
+ * Get clip settings from Java
+ */
+void
+videoEditClasses_getClipSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4VSS3GPP_ClipSettings** ppSettings);
+/**
+ ************************************************************************
+ * @brief Interface function to retrieve media properties for a given
+ * file.
+ * @param pEnv (IN) Pointer holding media properties
+ * @param thiz (IN) Indentation to follow in display
+ * @param file (IN) File path for which media properties has
+ * to be retrieved.
+ ************************************************************************
+*/
+jobject
+videoEditProp_getProperties(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jstring file);
+
+/*
+ * Create/Set the clip settings to java Object
+ */
+void
+videoEditClasses_createClipSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ M4VSS3GPP_ClipSettings* pSettings,
+ jobject* pObject);
+
+/*
+ * Free clip settings structure
+ */
+void
+videoEditClasses_freeClipSettings(
+ M4VSS3GPP_ClipSettings** ppSettings);
+
+/*
+ * Log clip settings structure
+ */
+void
+videoEditClasses_logClipSettings(
+ M4VSS3GPP_ClipSettings* pSettings,
+ int indentation);
+
+/*
+ * Get Edit settings from Java
+ */
+void
+videoEditClasses_getEditSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4VSS3GPP_EditSettings** ppSettings,
+ bool flag);
+
+/*
+ * Free Edit Settings structure
+ */
+void
+videoEditClasses_freeEditSettings(
+ M4VSS3GPP_EditSettings** ppSettings);
+
+/*
+ * Log Edit settings structure
+ */
+void
+videoEditClasses_logEditSettings(
+ M4VSS3GPP_EditSettings* pSettings,
+ int indentation);
+
+/*
+ * Get Effect settings from Java
+ */
+void
+videoEditClasses_getEffectSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4VSS3GPP_EffectSettings* pSettings);
+
+/*
+ * Free Effect settings structure
+ */
+void
+videoEditClasses_freeEffectSettings(
+ M4VSS3GPP_EffectSettings* pSettings);
+
+/*
+ * Log Effect settings
+ */
+void
+videoEditClasses_logEffectSettings(
+ M4VSS3GPP_EffectSettings* pSettings,
+ int indentation);
+
+/*
+ * Get Transition-Sliding settings from Java
+ */
+void
+videoEditClasses_getSlideTransitionSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4xVSS_SlideTransitionSettings** ppSettings);
+
+/*
+ * Free Transition-Sliding structure
+ */
+void
+videoEditClasses_freeSlideTransitionSettings(
+ M4xVSS_SlideTransitionSettings** ppSettings);
+
+/*
+ * Free Transition-Sliding structure
+ */
+void
+videoEditClasses_logSlideTransitionSettings(
+ M4xVSS_SlideTransitionSettings* pSettings,
+ int indentation);
+
+/*
+ * Get Transition settings from Java
+ */
+void
+videoEditClasses_getTransitionSettings(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ M4VSS3GPP_TransitionSettings** ppSettings);
+
+/*
+ * Free Transition settings structure
+ */
+void
+videoEditClasses_freeTransitionSettings(
+ M4VSS3GPP_TransitionSettings** ppSettings);
+
+/*
+ * Log Transition settings
+ */
+void
+videoEditClasses_logTransitionSettings(
+ M4VSS3GPP_TransitionSettings* pSettings,
+ int indentation);
+
+/*
+ * Set version information to Java object
+ */
+void
+videoEditClasses_createVersion(
+ bool* pResult,
+ JNIEnv* pEnv,
+ M4_VersionInfo* pVersionInfo,
+ jobject* pObject);
+
+/*
+ * Log Version information
+ */
+void
+videoEditClasses_logVersion(
+ M4_VersionInfo* pVersionInfo,
+ int indentation);
+
+
+void*
+videoEditClasses_getContext(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object);
+
+void
+videoEditClasses_setContext(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ void* pContext);
+
+
+#endif // VIDEO_EDITOR_CLASSES_H
+
diff --git a/media/jni/mediaeditor/VideoEditorJava.cpp b/media/jni/mediaeditor/VideoEditorJava.cpp
new file mode 100755
index 000000000000..1d610f69c48b
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorJava.cpp
@@ -0,0 +1,885 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <VideoEditorClasses.h>
+#include <VideoEditorJava.h>
+#include <VideoEditorLogging.h>
+#include <VideoEditorOsal.h>
+
+extern "C" {
+#include <M4OSA_CharStar.h>
+};
+
+
+void
+videoEditJava_checkAndThrowIllegalArgumentException(
+ bool* pResult,
+ JNIEnv* pEnv,
+ bool condition,
+ const char* pMessage)
+{
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Check if the condition is true.
+ if (condition)
+ {
+ // Log the exception.
+ VIDEOEDIT_LOG_EXCEPTION(ANDROID_LOG_ERROR, "VIDEO_EDITOR_JAVA",\
+ "videoEditJava_checkAndThrowIllegalArgumentException, %s", pMessage);
+
+ // Reset the result flag.
+ (*pResult) = false;
+
+ // Throw an exception.
+ jniThrowException(pEnv, "java/lang/IllegalArgumentException", pMessage);
+ }
+ }
+}
+
+void
+videoEditJava_checkAndThrowRuntimeException(
+ bool* pResult,
+ JNIEnv* pEnv,
+ bool condition,
+ M4OSA_ERR result)
+{
+ const char* pMessage = NULL;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Check if the condition is true.
+ if (condition)
+ {
+ // Get the error string.
+ pMessage = videoEditJava_getErrorName(result);
+
+ // Log the exception.
+ VIDEOEDIT_LOG_EXCEPTION(ANDROID_LOG_ERROR, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_checkAndThrowRuntimeException, %s", pMessage);
+
+ // Reset the result flag.
+ (*pResult) = false;
+
+ // Throw an exception.
+ jniThrowException(pEnv, "java/lang/RuntimeException", pMessage);
+ }
+ }
+}
+
+void
+videoEditJava_checkAndThrowIllegalStateException(
+ bool* pResult,
+ JNIEnv* pEnv,
+ bool condition,
+ const char* pMessage)
+{
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Check if the condition is true.
+ if (condition)
+ {
+ // Log the exception.
+ VIDEOEDIT_LOG_EXCEPTION(ANDROID_LOG_ERROR, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_checkAndThrowIllegalStateException, %s", pMessage);
+
+ // Reset the result flag.
+ (*pResult) = false;
+
+ // Throw an exception.
+ jniThrowException(pEnv, "java/lang/IllegalStateException", pMessage);
+ }
+ }
+}
+
+void
+videoEditJava_getClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ const char* pName,
+ jclass* pClazz)
+{
+ // Only look for the class if locating the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getClass(%s)", pName);
+
+ // Look up the class.
+ jclass clazz = pEnv->FindClass(pName);
+
+ // Clear any resulting exceptions.
+ pEnv->ExceptionClear();
+
+ // Check if the class could be located.
+ if (NULL != clazz)
+ {
+ // Return the class.
+ (*pClazz) = clazz;
+ }
+ else
+ {
+ // Reset the result flag.
+ (*pResult) = false;
+
+ // Log the error.
+ VIDEOEDIT_LOG_EXCEPTION(ANDROID_LOG_ERROR, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getClass, error: unable to locate class %s", pName);
+
+ // Throw an exception.
+ jniThrowException(pEnv, "java/lang/ClassNotFoundException",
+ "unable to locate class");
+ }
+ }
+}
+
+void
+videoEditJava_getMethodId(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jclass clazz,
+ const char* pName,
+ const char* pType,
+ jmethodID* pMethodId)
+{
+ // Only look for the class if locating the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getMethodId(%s,%s)", pName, pType);
+
+ // Look up the method id.
+ jmethodID methodId = pEnv->GetMethodID(clazz, pName, pType);
+
+ // Clear any resulting exceptions.
+ pEnv->ExceptionClear();
+
+ // Check if the method could be located.
+ if (NULL != methodId)
+ {
+ // Return the method id.
+ (*pMethodId) = methodId;
+ }
+ else
+ {
+ // Reset the result flag.
+ (*pResult) = false;
+
+ // Log the error.
+ VIDEOEDIT_LOG_EXCEPTION(ANDROID_LOG_ERROR, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getMethodId, error: unable to locate method %s with type %s",
+ pName, pType);
+
+ // Throw an exception.
+ jniThrowException(pEnv, "java/lang/NoSuchMethodException", "unable to locate method");
+ }
+ }
+}
+
+void
+videoEditJava_getFieldId(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jclass clazz,
+ const char* pName,
+ const char* pType,
+ jfieldID* pFieldId)
+{
+ // Only look for the class if locating the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getFieldId(%s,%s)", pName, pType);
+
+ // Look up the field id.
+ jfieldID fieldId = pEnv->GetFieldID(clazz, pName, pType);
+
+ // Clear any resulting exceptions.
+ pEnv->ExceptionClear();
+
+ // Check if the field could be located.
+ if (NULL != fieldId)
+ {
+ // Return the field id.
+ (*pFieldId) = fieldId;
+ }
+ else
+ {
+ // Reset the result flag.
+ (*pResult) = false;
+
+ // Log the error.
+ VIDEOEDIT_LOG_EXCEPTION(ANDROID_LOG_ERROR, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getFieldId, error: unable to locate field %s with type %s",
+ pName, pType);
+
+ // Throw an exception.
+ jniThrowException(pEnv, "java/lang/NoSuchFieldException", "unable to locate field");
+ }
+ }
+}
+
+void
+videoEditJava_getObject(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ jfieldID objectFieldId,
+ jobject* pObject)
+{
+ // Only retrieve the array object and size if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getObject()");
+
+ // Retrieve the object.
+ (*pObject) = pEnv->GetObjectField(object, objectFieldId);
+
+ // Clear any resulting exceptions.
+ pEnv->ExceptionClear();
+ }
+}
+
+void
+videoEditJava_getArray(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ jfieldID arrayFieldId,
+ jobjectArray* pArray,
+ jsize* pArraySize)
+{
+ // Only retrieve the array object and size if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA", "videoEditJava_getArray()");
+
+ // Retrieve the array object.
+ jobjectArray array = (jobjectArray)pEnv->GetObjectField(object, arrayFieldId);
+ jsize arraySize = 0;
+
+ // Clear any resulting exceptions.
+ pEnv->ExceptionClear();
+
+ // Check if the array could be retrieved.
+ if (NULL != array)
+ {
+ // Retrieve the array size.
+ arraySize = pEnv->GetArrayLength(array);
+ }
+
+ // Return the array and its size.
+ (*pArray) = array;
+ (*pArraySize) = arraySize;
+ }
+}
+
+void*
+videoEditJava_getString(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ jfieldID stringFieldId,
+ M4OSA_UInt32* pLength)
+{
+ void* pString = M4OSA_NULL;
+ jstring string = NULL;
+ M4OSA_UInt32 length = 0;
+ M4OSA_Char* pLocal = M4OSA_NULL;
+ M4OSA_ERR result = M4NO_ERROR;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA", "videoEditJava_getString()");
+
+ // Check if an object containing a string was specified.
+ if (NULL != stringFieldId)
+ {
+ // Retrieve the string object.
+ string = (jstring)pEnv->GetObjectField(object, stringFieldId);
+
+ // Clear any resulting exceptions.
+ pEnv->ExceptionClear();
+ }
+ else
+ {
+ // The string itself was specified.
+ string = (jstring)object;
+ }
+
+ // Check if the string could be retrieved.
+ if (NULL != string)
+ {
+ // Get a local copy of the string.
+ pLocal = (M4OSA_Char*)pEnv->GetStringUTFChars(string, M4OSA_NULL);
+ if (M4OSA_NULL != pLocal)
+ {
+ // Determine the length of the path
+ // (add one extra character for the zero terminator).
+ length = M4OSA_chrLength(pLocal) + 1;
+
+ // Allocate memory for the string.
+ pString = videoEditOsal_alloc(pResult, pEnv, length, "String");
+ if (*pResult)
+ {
+ // Copy the string.
+ result = M4OSA_chrNCopy((M4OSA_Char*)pString, pLocal, length);
+
+ // Check if the copy succeeded.
+ videoEditJava_checkAndThrowRuntimeException(pResult, pEnv,
+ (M4NO_ERROR != result), result);
+
+ // Check if the string could not be copied.
+ if (!(*pResult))
+ {
+ // Free the allocated memory.
+ videoEditOsal_free(pString);
+ pString = M4OSA_NULL;
+ }
+ }
+
+ // Release the local copy of the string.
+ pEnv->ReleaseStringUTFChars(string, (const char *)pLocal);
+ }
+ }
+
+ // Check if the string was empty or could be copied.
+ if (*pResult)
+ {
+ // Check if the length was requested.
+ if (M4OSA_NULL != pLength)
+ {
+ // Return the length.
+ (*pLength) = length;
+ }
+ }
+ }
+
+ // Return the string.
+ return(pString);
+}
+
+void
+videoEditJava_getStaticIntField(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jclass clazz,
+ const char* pName,
+ int* pValue)
+{
+ // Only look for the class if locating the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getStaticIntField(%s)", pName);
+
+ // Look up the field id.
+ jfieldID fieldId = pEnv->GetStaticFieldID(clazz, pName, "I");
+
+ // Clear any resulting exceptions.
+ pEnv->ExceptionClear();
+
+ // Check if the field could be located.
+ if (NULL != fieldId)
+ {
+ // Retrieve the field value.
+ (*pValue) = pEnv->GetStaticIntField(clazz, fieldId);
+
+ // Log the value.
+ VIDEOEDIT_LOG_EXCEPTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getStaticIntField, %s = %d", pName, (*pValue));
+ }
+ else
+ {
+ // Reset the result flag.
+ (*pResult) = false;
+
+ // Log the error.
+ VIDEOEDIT_LOG_EXCEPTION(ANDROID_LOG_ERROR, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_getStaticIntField, error: unable to locate field %s", pName);
+
+ // Throw an exception.
+ jniThrowException(pEnv, "java/lang/NoSuchFieldException",
+ "unable to locate static field");
+ }
+ }
+}
+
+void
+videoEditJava_initConstantClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ VideoEditJava_ConstantsClass* pClass)
+{
+ bool gotten = true;
+ jclass clazz = NULL;
+ int index = 0;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_initConstantClass(%s)", pClass->pName);
+
+ // Only initialize the class once.
+ if (!pClass->initialized)
+ {
+ // Look up the class.
+ videoEditJava_getClass(pResult, pEnv, pClass->pName, &clazz);
+
+ // Loop over the constants.
+ for (index = 0; index < pClass->count; index++)
+ {
+ // Look up the constant.
+ videoEditJava_getStaticIntField(pResult, pEnv, clazz,
+ pClass->pConstants[index].pName,
+ &pClass->pConstants[index].java);
+ }
+
+ // Check if all constants could be located.
+ if (*pResult)
+ {
+ // Set the initialized flag.
+ pClass->initialized = true;
+ }
+ }
+ }
+}
+
+const char*
+videoEditJava_getConstantClassName(
+ const VideoEditJava_ConstantsClass* pClass,
+ int value,
+ VideoEditJava_UnknownConstant unknown)
+{
+ const char* pName = M4OSA_NULL;
+ int index = 0;
+
+ // Loop over the list with constants.
+ for (index = 0;
+ ((M4OSA_NULL == pName) && (index < pClass->count));
+ index++)
+ {
+ // Check if the specified value matches the c value of the constant.
+ if (value == pClass->pConstants[index].c)
+ {
+ // Set the name.
+ pName = pClass->pConstants[index].pName;
+ }
+ }
+
+ // Check if no constant was found.
+ if (M4OSA_NULL == pName)
+ {
+ // Check if a function was specified to handle this case.
+ if (M4OSA_NULL != unknown)
+ {
+ // Pass the constant to the specified unknown function.
+ pName = unknown(value);
+ }
+ else
+ {
+ // Set the description to a default value.
+ pName = "<unknown>";
+ }
+ }
+
+ // Return the result.
+ return(pName);
+}
+
+const char*
+videoEditJava_getConstantClassString(
+ const VideoEditJava_ConstantsClass* pClass,
+ int value,
+ VideoEditJava_UnknownConstant unknown)
+{
+ const char* pString = M4OSA_NULL;
+ int index = 0;
+
+ // Loop over the list with constants.
+ for (index = 0;
+ ((M4OSA_NULL == pString) && (index < pClass->count));
+ index++)
+ {
+ // Check if the specified value matches the c value of the constant.
+ if (value == pClass->pConstants[index].c)
+ {
+ // Set the description.
+ pString = pClass->pConstants[index].pDescription;
+ }
+ }
+
+ // Check if no constant was found.
+ if (M4OSA_NULL == pString)
+ {
+ // Check if a function was specified to handle this case.
+ if (M4OSA_NULL != unknown)
+ {
+ // Pass the constant to the specified unknown function.
+ pString = unknown(value);
+ }
+ else
+ {
+ // Set the description to a default value.
+ pString = "<unknown>";
+ }
+ }
+
+ // Return the result.
+ return(pString);
+}
+
+int
+videoEditJava_getConstantClassJavaToC(
+ bool* pResult,
+ const VideoEditJava_ConstantsClass* pClass,
+ int value)
+{
+ bool gotten = false;
+ int index = 0;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Loop over the list with constants.
+ for (index = 0; ((!gotten) && (index < pClass->count)); index++)
+ {
+ // Check if the specified value matches the java value of the constant.
+ if (value == pClass->pConstants[index].java)
+ {
+ // Set the value to the c value.
+ value = pClass->pConstants[index].c;
+
+ // Set the gotten flag.
+ gotten = true;
+ }
+ }
+
+ // Check if the value was not found.
+ if (!gotten)
+ {
+ (*pResult) = false;
+ }
+ }
+
+ // Return the translated value.
+ return(value);
+}
+
+int
+videoEditJava_getConstantClassJavaToC(
+ bool* pResult,
+ const VideoEditJava_ConstantsClass* pClass,
+ int value,
+ int unknown)
+{
+ bool gotten = false;
+ int index = 0;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Loop over the list with constants.
+ for (index = 0; ((!gotten) && (index < pClass->count)); index++)
+ {
+ // Check if the specified value matches the java value of the constant.
+ if (value == pClass->pConstants[index].java)
+ {
+ // Set the value to the c value.
+ value = pClass->pConstants[index].c;
+
+ // Set the gotten flag.
+ gotten = true;
+ }
+ }
+
+ // If the constant was not found, look for the specified unknown.
+ if (!gotten)
+ {
+ // Set the value to the c value.
+ value = unknown;
+ }
+ }
+
+ // Return the translated value.
+ return(value);
+}
+
+int
+videoEditJava_getConstantClassCToJava(
+ const VideoEditJava_ConstantsClass* pClass,
+ int value)
+{
+ bool gotten = false;
+ int index = 0;
+
+ // Loop over the list with constants.
+ for (index = 0; ((!gotten) && (index < pClass->count)); index++)
+ {
+ // Check if the specified value matches the c value of the constant.
+ if (value == pClass->pConstants[index].c)
+ {
+ // Set the value to the java value.
+ value = pClass->pConstants[index].java;
+
+ // Set the gotten flag.
+ gotten = true;
+ }
+ }
+
+ // Return the translated value.
+ return(value);
+}
+
+int
+videoEditJava_getConstantClassCToJava(
+ const VideoEditJava_ConstantsClass* pClass,
+ int value,
+ int unknown)
+{
+ bool gotten = false;
+ int index = 0;
+
+ // Loop over the list with constants.
+ for (index = 0; ((!gotten) && (index < pClass->count)); index++)
+ {
+ // Check if the specified value matches the c value of the constant.
+ if (value == pClass->pConstants[index].c)
+ {
+ // Set the value to the java value.
+ value = pClass->pConstants[index].java;
+
+ // Set the gotten flag.
+ gotten = true;
+ }
+ }
+
+ // If the constant was not found, look for the specified unknown.
+ if (!gotten)
+ {
+ // Loop over the list with constants.
+ for (index = 0; ((!gotten) && (index < pClass->count)); index++)
+ {
+ // Check if the specified value matches the java value of the constant.
+ if (unknown == pClass->pConstants[index].c)
+ {
+ // Set the value to the c value.
+ value = pClass->pConstants[index].java;
+
+ // Set the gotten flag.
+ gotten = true;
+ }
+ }
+ }
+
+ // Return the translated value.
+ return(value);
+}
+
+void
+videoEditJava_initFieldClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ VideoEditJava_FieldsClass* pClass)
+{
+ bool gotten = true;
+ jclass clazz = NULL;
+ int index = 0;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_initFieldClass(%s)", pClass->pName);
+
+ // Only initialize the class once.
+ if (!pClass->initialized)
+ {
+ // Look up the class.
+ videoEditJava_getClass(pResult, pEnv, pClass->pName, &clazz);
+
+ // Loop over the fields.
+ for (index = 0; index < pClass->count; index++)
+ {
+ // Look up the field id.
+ videoEditJava_getFieldId(
+ pResult,
+ pEnv,
+ clazz,
+ pClass->pFields[index].pName,
+ pClass->pFields[index].pType,
+ &pClass->pFields[index].fieldId);
+ }
+
+ // Check if all fields could be located.
+ if (*pResult)
+ {
+ // Set the initialized flag.
+ pClass->initialized = true;
+ }
+ }
+ }
+}
+
+void
+videoEditJava_fieldClassClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ const VideoEditJava_FieldsClass* pClass,
+ jclass* pClazz)
+{
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Check if the class is initialized.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv, (!pClass->initialized),
+ "field class not initialized");
+
+ // Get the class.
+ videoEditJava_getClass(pResult, pEnv, pClass->pName, pClazz);
+ }
+}
+
+void
+videoEditJava_fieldClassFieldIds(
+ bool* pResult,
+ JNIEnv* pEnv,
+ const VideoEditJava_FieldsClass* pClass,
+ int count,
+ VideoEditJava_FieldIds* pIds)
+{
+ int index = 0;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Check if the class is initialized.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv, (!pClass->initialized),
+ "field class not initialized");
+
+ // Check if the number of fields matches.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,
+ (pClass->count != count),
+ "field class type mismatch");
+
+ // Check if the class and object are valid.
+ if (*pResult)
+ {
+ // Loop over the class fields.
+ for (index = 0; index < count; index++)
+ {
+ // Copy the field ids.
+ pIds->fieldIds[index] = pClass->pFields[index].fieldId;
+ }
+ }
+ }
+}
+
+void
+videoEditJava_initMethodClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ VideoEditJava_MethodsClass* pClass)
+{
+ bool gotten = true;
+ jclass clazz = NULL;
+ int index = 0;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Log the function call.
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_JAVA",
+ "videoEditJava_initMethodClass(%s)", pClass->pName);
+
+ // Only initialize the class once.
+ if (!pClass->initialized)
+ {
+ // Look up the class.
+ videoEditJava_getClass(pResult, pEnv, pClass->pName, &clazz);
+
+ // Loop over the methods.
+ for (index = 0; index < pClass->count; index++)
+ {
+ // Look up the method id.
+ videoEditJava_getMethodId(
+ pResult,
+ pEnv,
+ clazz,
+ pClass->pMethods[index].pName,
+ pClass->pMethods[index].pType,
+ &pClass->pMethods[index].methodId);
+ }
+
+ // Check if all methods could be located.
+ if (*pResult)
+ {
+ // Set the initialized flag.
+ pClass->initialized = true;
+ }
+ }
+ }
+}
+
+void
+videoEditJava_methodClassMethodIds(
+ bool* pResult,
+ JNIEnv* pEnv,
+ const VideoEditJava_MethodsClass* pClass,
+ int count,
+ VideoEditJava_MethodIds* pIds)
+{
+ int index = 0;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Check if the class is initialized.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv, (!pClass->initialized),
+ "method class not initialized");
+
+ // Check if the number of methods matches.
+ videoEditJava_checkAndThrowIllegalArgumentException(pResult, pEnv,\
+ (pClass->count != count),
+ "method class type mismatch");
+
+ // Check if the class and object are valid.
+ if (*pResult)
+ {
+ // Loop over the class methods.
+ for (index = 0; index < count; index++)
+ {
+ // Copy the method ids.
+ pIds->methodIds[index] = pClass->pMethods[index].methodId;
+ }
+ }
+ }
+}
+
diff --git a/media/jni/mediaeditor/VideoEditorJava.h b/media/jni/mediaeditor/VideoEditorJava.h
new file mode 100755
index 000000000000..9d7f096bae77
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorJava.h
@@ -0,0 +1,506 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_EDiTOR_JAVA_H
+#define VIDEO_EDiTOR_JAVA_H
+
+#include <jni.h>
+#include <JNIHelp.h>
+
+/**
+ ************************************************************************
+ * @file VideoEditorJava.h
+ * @brief Interface for JNI methods that have specific access to
+ * class, objects and method Ids defined in Java layer
+ ************************************************************************
+*/
+
+extern "C" {
+#include <M4OSA_Types.h>
+#include <M4OSA_Error.h>
+}
+
+#define VIDEOEDIT_JAVA_CONSTANT_INIT(m_name, m_c) \
+ { m_name, \
+ 0, \
+ m_c, \
+ #m_c }
+
+#define VIDEOEDIT_JAVA_DEFINE_CONSTANTS(m_class) \
+static \
+VideoEditJava_Constant g##m_class##Constants [] =
+
+#define VIDEOEDIT_JAVA_DEFINE_CONSTANT_CLASS( \
+ m_class, \
+ m_name, \
+ m_unknownName, \
+ m_unknownString) \
+ \
+static VideoEditJava_ConstantsClass g##m_class##ConstantsClass = \
+{ m_name, \
+ &g##m_class##Constants[0], \
+ (sizeof(g##m_class##Constants) / sizeof(VideoEditJava_Constant)), \
+ false \
+}; \
+ \
+ \
+void videoEditJava_init##m_class##Constants( \
+ bool* pResult, \
+ JNIEnv* pEnv) \
+{ \
+ videoEditJava_initConstantClass( \
+ pResult, \
+ pEnv, \
+ &g##m_class##ConstantsClass); \
+} \
+ \
+const char* videoEditJava_get##m_class##Name( \
+ int value) \
+{ \
+ return(videoEditJava_getConstantClassName( \
+ &g##m_class##ConstantsClass, \
+ value, \
+ m_unknownName)); \
+} \
+ \
+const char* videoEditJava_get##m_class##String( \
+ int value) \
+{ \
+ return(videoEditJava_getConstantClassString( \
+ &g##m_class##ConstantsClass, \
+ value, \
+ m_unknownString)); \
+} \
+ \
+int \
+videoEditJava_get##m_class##JavaToC( \
+ bool* pResult, \
+ int value) \
+{ \
+ return(videoEditJava_getConstantClassJavaToC( \
+ pResult, \
+ &g##m_class##ConstantsClass, \
+ value)); \
+} \
+ \
+int \
+videoEditJava_get##m_class##JavaToC( \
+ bool* pResult, \
+ int value, \
+ int unknown) \
+{ \
+ return(videoEditJava_getConstantClassJavaToC( \
+ pResult, \
+ &g##m_class##ConstantsClass, \
+ value, \
+ unknown)); \
+} \
+ \
+int \
+videoEditJava_get##m_class##CToJava( \
+ int value) \
+{ \
+ return(videoEditJava_getConstantClassCToJava( \
+ &g##m_class##ConstantsClass, \
+ value)); \
+} \
+ \
+int \
+videoEditJava_get##m_class##CToJava( \
+ int value, \
+ int unknown) \
+{ \
+ return(videoEditJava_getConstantClassCToJava( \
+ &g##m_class##ConstantsClass, \
+ value, \
+ unknown)); \
+}
+
+
+#define VIDEOEDIT_JAVA_DECLARE_CONSTANT_CLASS(m_class) \
+void \
+videoEditJava_init##m_class##Constants( \
+ bool* pResult, \
+ JNIEnv* pEnv); \
+ \
+const char* \
+videoEditJava_get##m_class##Name( \
+ int value); \
+ \
+const char* \
+videoEditJava_get##m_class##String( \
+ int value); \
+ \
+int \
+videoEditJava_get##m_class##JavaToC( \
+ bool* pResult, \
+ int value, \
+ int unknown); \
+ \
+int \
+videoEditJava_get##m_class##JavaToC( \
+ bool* pResult, \
+ int value); \
+ \
+int \
+videoEditJava_get##m_class##CToJava( \
+ int value); \
+ \
+int \
+videoEditJava_get##m_class##CToJava( \
+ int value, \
+ int unknown);
+
+#define VIDEOEDIT_JAVA_FIELD_INIT(m_name, m_type) \
+ { m_name, \
+ m_type, \
+ NULL }
+
+#define VIDEOEDIT_JAVA_DEFINE_FIELDS(m_class) \
+static \
+VideoEditJava_Field g##m_class##Fields [] =
+
+#define VIDEOEDIT_JAVA_DEFINE_FIELD_CLASS(m_class, m_name) \
+static VideoEditJava_FieldsClass g##m_class##FieldsClass = \
+ { m_name, \
+ &g##m_class##Fields[0], \
+ (sizeof(g##m_class##Fields) / sizeof(VideoEditJava_Field)), \
+ false }; \
+ \
+void \
+videoEditJava_init##m_class##Fields( \
+ bool* pResult, \
+ JNIEnv* pEnv) \
+{ \
+ videoEditJava_initFieldClass( \
+ pResult, \
+ pEnv, \
+ &g##m_class##FieldsClass); \
+} \
+ \
+void \
+videoEditJava_get##m_class##Class( \
+ bool* pResult, \
+ JNIEnv* pEnv, \
+ jclass* pClazz) \
+{ \
+ videoEditJava_fieldClassClass( \
+ pResult, \
+ pEnv, \
+ &g##m_class##FieldsClass, \
+ pClazz); \
+} \
+ \
+void \
+videoEditJava_get##m_class##FieldIds( \
+ bool* pResult, \
+ JNIEnv* pEnv, \
+ VideoEditJava_##m_class##FieldIds* pIds) \
+{ \
+ videoEditJava_fieldClassFieldIds( \
+ pResult, \
+ pEnv, \
+ &g##m_class##FieldsClass, \
+ (sizeof(VideoEditJava_##m_class##FieldIds) / \
+ sizeof(jfieldID)), \
+ (VideoEditJava_FieldIds*)pIds); \
+}
+
+#define VIDEOEDIT_JAVA_DECLARE_FIELD_CLASS(m_class) \
+void \
+videoEditJava_init##m_class##Fields( \
+ bool* pResult, \
+ JNIEnv* pEnv); \
+ \
+void \
+videoEditJava_get##m_class##Class( \
+ bool* pResult, \
+ JNIEnv* pEnv, \
+ jclass* pClazz); \
+ \
+void \
+videoEditJava_get##m_class##FieldIds( \
+ bool* pResult, \
+ JNIEnv* pEnv, \
+ VideoEditJava_##m_class##FieldIds* pIds);
+
+
+#define VIDEOEDIT_JAVA_METHOD_INIT(m_name, m_type) \
+ { m_name, \
+ m_type, \
+ NULL }
+
+#define VIDEOEDIT_JAVA_DEFINE_METHODS(m_class) \
+static \
+VideoEditJava_Method g##m_class##Methods [] =
+
+#define VIDEOEDIT_JAVA_DEFINE_METHOD_CLASS(m_class, m_name) \
+static VideoEditJava_MethodsClass g##m_class##MethodsClass = \
+ { m_name, \
+ &g##m_class##Methods[0], \
+ (sizeof(g##m_class##Methods) / sizeof(VideoEditJava_Method)), \
+ false }; \
+ \
+void \
+videoEditJava_init##m_class##Methods( \
+ bool* pResult, \
+ JNIEnv* pEnv) \
+{ \
+ videoEditJava_initMethodClass( \
+ pResult, \
+ pEnv, \
+ &g##m_class##MethodsClass); \
+} \
+ \
+void \
+videoEditJava_get##m_class##MethodIds( \
+ bool* pResult, \
+ JNIEnv* pEnv, \
+ VideoEditJava_##m_class##MethodIds* pIds) \
+{ \
+ videoEditJava_methodClassMethodIds( \
+ pResult, \
+ pEnv, \
+ &g##m_class##MethodsClass, \
+ (sizeof(VideoEditJava_##m_class##MethodIds) / \
+ sizeof(jmethodID)), \
+ (VideoEditJava_MethodIds*)pIds); \
+}
+
+#define VIDEOEDIT_JAVA_DECLARE_METHOD_CLASS(m_class) \
+void \
+videoEditJava_init##m_class##Methods( \
+ bool* pResult, \
+ JNIEnv* pEnv); \
+ \
+void \
+videoEditJava_get##m_class##MethodIds( \
+ bool* pResult, \
+ JNIEnv* pEnv, \
+ VideoEditJava_##m_class##MethodIds* pIds);
+
+
+typedef struct
+{
+ const char* pName;
+ int java;
+ int c;
+ const char* pDescription;
+} VideoEditJava_Constant;
+
+typedef struct
+{
+ const char* pName;
+ VideoEditJava_Constant* pConstants;
+ int count;
+ bool initialized;
+} VideoEditJava_ConstantsClass;
+
+typedef const char* (*VideoEditJava_UnknownConstant)(int constant);
+
+typedef struct
+{
+ const char* pName;
+ const char* pType;
+ jfieldID fieldId;
+} VideoEditJava_Field;
+
+typedef struct
+{
+ const char* pName;
+ VideoEditJava_Field* pFields;
+ int count;
+ bool initialized;
+} VideoEditJava_FieldsClass;
+
+typedef struct
+{
+ jfieldID fieldIds[];
+} VideoEditJava_FieldIds;
+
+typedef struct
+{
+ const char* pName;
+ const char* pType;
+ jmethodID methodId;
+} VideoEditJava_Method;
+
+typedef struct
+{
+ const char* pName;
+ VideoEditJava_Method* pMethods;
+ int count;
+ bool initialized;
+} VideoEditJava_MethodsClass;
+
+typedef struct
+{
+ jmethodID methodIds[];
+} VideoEditJava_MethodIds;
+
+void
+videoEditJava_checkAndThrowIllegalArgumentException(
+ bool* pResult,
+ JNIEnv* pEnv,
+ bool condition,
+ const char* pMessage);
+
+void
+videoEditJava_checkAndThrowRuntimeException(
+ bool* pResult,
+ JNIEnv* pEnv,
+ bool condition,
+ M4OSA_ERR result);
+
+void
+videoEditJava_checkAndThrowIllegalStateException(
+ bool* pResult,
+ JNIEnv* pEnv,
+ bool condition,
+ const char* pMessage);
+
+void
+videoEditJava_getClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ const char* pName,
+ jclass* pClazz);
+
+void
+videoEditJava_getMethodId(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jclass clazz,
+ const char* pName,
+ const char* pType,
+ jmethodID* pMethodId);
+
+void videoEditJava_getFieldId(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jclass clazz,
+ const char* pName,
+ const char* pType,
+ jfieldID* pFieldId);
+
+void videoEditJava_getObject(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ jfieldID objectFieldId,
+ jobject* pObject);
+
+void videoEditJava_getArray(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ jfieldID arrayFieldId,
+ jobjectArray* pArray,
+ jsize* pArraySize);
+
+void* videoEditJava_getString(
+ bool* pResult,
+ JNIEnv* pEnv,
+ jobject object,
+ jfieldID stringFieldId,
+ M4OSA_UInt32* pLength);
+
+void videoEditJava_getStaticIntField(
+ bool* pResult,
+ JNIEnv* env,
+ jclass clazz,
+ const char* pName,
+ int* pValue);
+
+void
+videoEditJava_initConstantClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ VideoEditJava_ConstantsClass* pClass);
+
+const char*
+videoEditJava_getConstantClassName(
+ const VideoEditJava_ConstantsClass* pClass,
+ int value,
+ VideoEditJava_UnknownConstant unknown);
+
+const char*
+videoEditJava_getConstantClassString(
+ const VideoEditJava_ConstantsClass* pClass,
+ int value,
+ VideoEditJava_UnknownConstant unknown);
+
+int
+videoEditJava_getConstantClassJavaToC(
+ bool* pResult,
+ const VideoEditJava_ConstantsClass* pClass,
+ int value);
+
+int
+videoEditJava_getConstantClassJavaToC(
+ bool* pResult,
+ const VideoEditJava_ConstantsClass* pClass,
+ int value,
+ int unknown);
+
+int
+videoEditJava_getConstantClassCToJava(
+ const VideoEditJava_ConstantsClass* pClass,
+ int value);
+
+int
+videoEditJava_getConstantClassCToJava(
+ const VideoEditJava_ConstantsClass* pClass,
+ int value,
+ int unknown);
+
+void
+videoEditJava_initFieldClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ VideoEditJava_FieldsClass* pClass);
+
+void
+videoEditJava_fieldClassClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ const VideoEditJava_FieldsClass* pClass,
+ jclass* pClazz);
+
+void
+videoEditJava_fieldClassFieldIds(
+ bool* pResult,
+ JNIEnv* pEnv,
+ const VideoEditJava_FieldsClass* pClass,
+ int count,
+ VideoEditJava_FieldIds* pIds);
+
+void
+videoEditJava_initMethodClass(
+ bool* pResult,
+ JNIEnv* pEnv,
+ VideoEditJava_MethodsClass* pClass);
+
+void
+videoEditJava_methodClassMethodIds(
+ bool* pResult,
+ JNIEnv* pEnv,
+ const VideoEditJava_MethodsClass* pClass,
+ int count,
+ VideoEditJava_MethodIds* pIds);
+
+#endif // VIDEO_EDiTOR_JAVA_H
+
diff --git a/media/jni/mediaeditor/VideoEditorLogging.h b/media/jni/mediaeditor/VideoEditorLogging.h
new file mode 100755
index 000000000000..ca8c0474d9da
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorLogging.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_EDITOR_LOGGING_H
+#define VIDEO_EDITOR_LOGGING_H
+
+//#define VIDEOEDIT_LOGGING_ENABLED
+
+#define VIDEOEDIT_LOG_INDENTATION (3)
+
+#ifdef VIDEOEDIT_LOGGING_ENABLED
+
+#define VIDEOEDIT_LOG_ALLOCATION __android_log_print
+#define VIDEOEDIT_LOG_API __android_log_print
+#define VIDEOEDIT_LOG_ERROR __android_log_print
+#define VIDEOEDIT_LOG_EXCEPTION __android_log_print
+#define VIDEOEDIT_LOG_FUNCTION __android_log_print
+#define VIDEOEDIT_LOG_RESULT(x,y, ...) LOGI(y, __VA_ARGS__ )
+#define VIDEOEDIT_LOG_SETTING __android_log_print
+#define VIDEOEDIT_LOG_EDIT_SETTINGS(m_settings) videoEditClasses_logEditSettings\
+ (m_settings, VIDEOEDIT_LOG_INDENTATION)
+#define VIDEOEDIT_PROP_LOG_PROPERTIES(m_properties) videoEditPropClass_logProperties\
+ (m_properties, VIDEOEDIT_LOG_INDENTATION)
+#define VIDEOEDIT_PROP_LOG_RESULT __android_log_print
+
+#else
+
+#define VIDEOEDIT_LOG_ALLOCATION (void)
+#define VIDEOEDIT_LOG_API (void)
+#define VIDEOEDIT_LOG_ERROR (void)
+#define VIDEOEDIT_LOG_EXCEPTION (void)
+#define VIDEOEDIT_LOG_FUNCTION (void)
+#define VIDEOEDIT_LOG_RESULT (void)
+#define VIDEOEDIT_LOG_SETTING (void)
+#define VIDEOEDIT_LOG_EDIT_SETTINGS(m_settings) (void)m_settings
+#define VIDEOEDIT_PROP_LOG_PROPERTIES(m_properties) (void)m_properties
+#define VIDEOEDIT_PROP_LOG_RESULT (void)
+
+#endif
+
+#endif // VIDEO_EDITOR_LOGGING_H
+
diff --git a/media/jni/mediaeditor/VideoEditorMain.cpp b/media/jni/mediaeditor/VideoEditorMain.cpp
new file mode 100755
index 000000000000..e66e4b90fbc3
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorMain.cpp
@@ -0,0 +1,3056 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <dlfcn.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <VideoEditorClasses.h>
+#include <VideoEditorJava.h>
+#include <VideoEditorOsal.h>
+#include <VideoEditorLogging.h>
+#include <marker.h>
+#include <VideoEditorClasses.h>
+#include <VideoEditorThumbnailMain.h>
+#include <M4OSA_Debug.h>
+#include <M4xVSS_Internal.h>
+#include <surfaceflinger/Surface.h>
+#include <surfaceflinger/ISurface.h>
+#include "VideoEditorPreviewController.h"
+
+#include "VideoEditorMain.h"
+
+extern "C" {
+#include <M4OSA_Clock.h>
+#include <M4OSA_CharStar.h>
+#include <M4OSA_Error.h>
+#include <M4OSA_FileCommon.h>
+#include <M4OSA_FileReader.h>
+#include <M4OSA_FileWriter.h>
+#include <M4OSA_FileExtra.h>
+#include <M4OSA_Memory.h>
+#include <M4OSA_String.h>
+#include <M4OSA_Thread.h>
+#include <M4xVSS_API.h>
+#include <M4VSS3GPP_ErrorCodes.h>
+#include <M4MCS_API.h>
+#include <M4MCS_ErrorCodes.h>
+#include <M4MDP_API.h>
+#include <M4READER_Common.h>
+#include <M4WRITER_common.h>
+};
+
+
+using namespace android;
+
+#define THREAD_STACK_SIZE (65536)
+
+#define VIDEOEDITOR_VERSION_MAJOR 0
+#define VIDEOEDITOR_VERSION_MINOR 0
+#define VIDEOEDITOR_VERSION_REVISION 1
+
+
+typedef enum
+{
+ ManualEditState_NOT_INITIALIZED,
+ ManualEditState_INITIALIZED,
+ ManualEditState_ANALYZING,
+ ManualEditState_ANALYZING_ERROR,
+ ManualEditState_OPENED,
+ ManualEditState_SAVING,
+ ManualEditState_SAVING_ERROR,
+ ManualEditState_SAVED,
+ ManualEditState_STOPPING
+} ManualEditState;
+
+typedef struct
+{
+ JavaVM* pVM;
+ jobject engine;
+ jmethodID onCompletionMethodId;
+ jmethodID onErrorMethodId;
+ jmethodID onWarningMethodId;
+ jmethodID onProgressUpdateMethodId;
+ jmethodID onPreviewProgressUpdateMethodId;
+ M4xVSS_InitParams initParams;
+ void* pTextRendererHandle;
+ M4xVSS_getTextRgbBufferFct pTextRendererFunction;
+ M4OSA_Context engineContext;
+ ManualEditState state;
+ M4VSS3GPP_EditSettings* pEditSettings;
+ M4OSA_Context threadContext;
+ M4OSA_ERR threadResult;
+ M4OSA_UInt8 threadProgress;
+ VideoEditorPreviewController *mPreviewController;
+ M4xVSS_AudioMixingSettings *mAudioSettings;
+ /* Audio Graph changes */
+ M4OSA_Context pAudioGraphMCSCtx;
+ M4OSA_Bool bSkipState;
+ jmethodID onAudioGraphProgressUpdateMethodId;
+ Mutex mLock;
+} ManualEditContext;
+
+extern "C" M4OSA_ERR M4MCS_open_normalMode(
+ M4MCS_Context pContext,
+ M4OSA_Void* pFileIn,
+ M4VIDEOEDITING_FileType InputFileType,
+ M4OSA_Void* pFileOut,
+ M4OSA_Void* pTempFile);
+
+static M4OSA_ERR videoEditor_toUTF8Fct(
+ M4OSA_Void* pBufferIn,
+ M4OSA_UInt8* pBufferOut,
+ M4OSA_UInt32* bufferOutSize);
+
+static M4OSA_ERR videoEditor_fromUTF8Fct(
+ M4OSA_UInt8* pBufferIn,
+ M4OSA_Void* pBufferOut,
+ M4OSA_UInt32* bufferOutSize);
+
+static M4OSA_ERR videoEditor_getTextRgbBufferFct(
+ M4OSA_Void* pRenderingData,
+ M4OSA_Void* pTextBuffer,
+ M4OSA_UInt32 textBufferSize,
+ M4VIFI_ImagePlane** pOutputPlane);
+
+static void videoEditor_callOnProgressUpdate(
+ ManualEditContext* pContext,
+ int task,
+ int progress);
+
+static void videoEditor_freeContext(
+ JNIEnv* pEnv,
+ ManualEditContext** ppContext);
+
+static M4OSA_ERR videoEditor_threadProc(
+ M4OSA_Void* param);
+
+static jobject videoEditor_getVersion(
+ JNIEnv* pEnv,
+ jobject thiz);
+
+static void videoEditor_init(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jstring tempPath,
+ jstring textRendererPath);
+
+static void videoEditor_loadSettings(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jobject settings);
+
+static void videoEditor_unloadSettings(
+ JNIEnv* pEnv,
+ jobject thiz);
+
+
+static void videoEditor_stopEncoding(
+ JNIEnv* pEnv,
+ jobject thiz);
+
+static void videoEditor_release(
+ JNIEnv* pEnv,
+ jobject thiz);
+static int videoEditor_getPixels(
+ JNIEnv* env,
+ jobject thiz,
+ jstring path,
+ jintArray pixelArray,
+ M4OSA_UInt32 width,
+ M4OSA_UInt32 height,
+ M4OSA_UInt32 timeMS);
+static int videoEditor_getPixelsList(
+ JNIEnv* env,
+ jobject thiz,
+ jstring path,
+ jintArray pixelArray,
+ M4OSA_UInt32 width,
+ M4OSA_UInt32 height,
+ M4OSA_UInt32 deltatimeMS,
+ M4OSA_UInt32 noOfThumbnails,
+ M4OSA_UInt32 startTime,
+ M4OSA_UInt32 endTime);
+
+static void
+videoEditor_startPreview(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jobject mSurface,
+ jlong fromMs,
+ jlong toMs,
+ jint callbackInterval,
+ jboolean loop);
+
+static void
+videoEditor_populateSettings(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jobject settings,
+ jobject object,
+ jobject audioSettingObject);
+
+static void videoEditor_stopPreview(JNIEnv* pEnv,
+ jobject thiz);
+
+static jobject
+videoEditor_getProperties(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jstring file);
+
+static int videoEditor_renderPreviewFrame(JNIEnv* pEnv,
+ jobject thiz,
+ jobject mSurface,
+ jlong fromMs,
+ jint surfaceWidth,
+ jint surfaceHeight);
+
+static int videoEditor_registerManualEditMethods(
+ JNIEnv* pEnv);
+
+static void jniPreviewProgressCallback(void* cookie, M4OSA_UInt32 msgType,
+ M4OSA_UInt32 argc);
+
+static int videoEditor_renderMediaItemPreviewFrame(JNIEnv* pEnv,
+ jobject thiz,
+ jobject mSurface,
+ jstring filePath,
+ jint frameWidth,
+ jint frameHeight,
+ jint surfaceWidth,
+ jint surfaceHeight,
+ jlong fromMs);
+
+static int videoEditor_generateAudioWaveFormSync ( JNIEnv* pEnv,
+ jobject thiz,
+ jstring pcmfilePath,
+ jstring outGraphfilePath,
+ jint frameDuration,
+ jint channels,
+ jint samplesCount);
+
+static int videoEditor_generateAudioRawFile(JNIEnv* pEnv,
+ jobject thiz,
+ jstring infilePath,
+ jstring pcmfilePath );
+
+M4OSA_ERR videoEditor_generateAudio(JNIEnv* pEnv,ManualEditContext* pContext,
+ M4OSA_Char* infilePath,
+ M4OSA_Char* pcmfilePath );
+
+static int
+videoEditor_generateClip(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jobject settings);
+
+
+static JNINativeMethod gManualEditMethods[] = {
+ {"getVersion", "()L"VERSION_CLASS_NAME";",
+ (void *)videoEditor_getVersion },
+ {"_init", "(Ljava/lang/String;Ljava/lang/String;)V",
+ (void *)videoEditor_init },
+ {"nativeStartPreview", "(Landroid/view/Surface;JJIZ)V",
+ (void *)videoEditor_startPreview },
+ {"nativePopulateSettings",
+ "(L"EDIT_SETTINGS_CLASS_NAME";L"PREVIEW_PROPERTIES_CLASS_NAME";L"
+ AUDIO_SETTINGS_CLASS_NAME";)V",
+ (void *)videoEditor_populateSettings },
+ {"nativeRenderPreviewFrame", "(Landroid/view/Surface;JII)I",
+ (int *)videoEditor_renderPreviewFrame },
+ {"nativeRenderMediaItemPreviewFrame",
+ "(Landroid/view/Surface;Ljava/lang/String;IIIIJ)I",
+ (int *)videoEditor_renderMediaItemPreviewFrame },
+ {"nativeStopPreview", "()V",
+ (void *)videoEditor_stopPreview },
+ {"stopEncoding", "()V",
+ (void *)videoEditor_stopEncoding },
+ {"release", "()V",
+ (void *)videoEditor_release },
+ {"nativeGetPixels", "(Ljava/lang/String;[IIIJ)I",
+ (void*)videoEditor_getPixels },
+ {"nativeGetPixelsList", "(Ljava/lang/String;[IIIIIJJ)I",
+ (void*)videoEditor_getPixelsList },
+ {"getMediaProperties",
+ "(Ljava/lang/String;)Landroid/media/videoeditor/MediaArtistNativeHelper$Properties;",
+ (void *)videoEditor_getProperties },
+ {"nativeGenerateAudioGraph","(Ljava/lang/String;Ljava/lang/String;III)I",
+ (int *)videoEditor_generateAudioWaveFormSync },
+ {"nativeGenerateRawAudio", "(Ljava/lang/String;Ljava/lang/String;)I",
+ (int *)videoEditor_generateAudioRawFile },
+ {"nativeGenerateClip", "(L"EDIT_SETTINGS_CLASS_NAME";)I",
+ (void *)videoEditor_generateClip },
+};
+
+// temp file name of VSS out file
+#define TEMP_MCS_OUT_FILE_PATH "/tmpOut.3gp"
+
+void
+getClipSetting(
+ JNIEnv* pEnv,
+ jobject object,
+ M4VSS3GPP_ClipSettings* pSettings)
+{
+
+ jfieldID fid;
+ int field = 0;
+ bool needToBeLoaded = true;
+ jclass clazz = pEnv->FindClass(PROPERTIES_CLASS_NAME);
+
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == clazz),
+ "not initialized");
+
+ fid = pEnv->GetFieldID(clazz,"duration","I");
+ pSettings->ClipProperties.uiClipDuration = pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("duration = %d",pSettings->ClipProperties.uiClipDuration);
+
+ fid = pEnv->GetFieldID(clazz,"videoFormat","I");
+ pSettings->ClipProperties.VideoStreamType =
+ (M4VIDEOEDITING_VideoFormat)pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("videoFormat = %d",pSettings->ClipProperties.VideoStreamType);
+
+ fid = pEnv->GetFieldID(clazz,"videoDuration","I");
+ pSettings->ClipProperties.uiClipVideoDuration = pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("videoDuration = %d",
+ pSettings->ClipProperties.uiClipVideoDuration);
+
+ fid = pEnv->GetFieldID(clazz,"width","I");
+ pSettings->ClipProperties.uiVideoWidth = pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("width = %d",pSettings->ClipProperties.uiVideoWidth);
+
+ fid = pEnv->GetFieldID(clazz,"height","I");
+ pSettings->ClipProperties.uiVideoHeight = pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("height = %d",pSettings->ClipProperties.uiVideoHeight);
+
+ fid = pEnv->GetFieldID(clazz,"audioFormat","I");
+ pSettings->ClipProperties.AudioStreamType =
+ (M4VIDEOEDITING_AudioFormat)pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("audioFormat = %d",pSettings->ClipProperties.AudioStreamType);
+
+ fid = pEnv->GetFieldID(clazz,"audioDuration","I");
+ pSettings->ClipProperties.uiClipAudioDuration = pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("audioDuration = %d",
+ pSettings->ClipProperties.uiClipAudioDuration);
+
+ fid = pEnv->GetFieldID(clazz,"audioBitrate","I");
+ pSettings->ClipProperties.uiAudioBitrate = pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("audioBitrate = %d",pSettings->ClipProperties.uiAudioBitrate);
+
+ fid = pEnv->GetFieldID(clazz,"audioChannels","I");
+ pSettings->ClipProperties.uiNbChannels = pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("audioChannels = %d",pSettings->ClipProperties.uiNbChannels);
+
+ fid = pEnv->GetFieldID(clazz,"audioSamplingFrequency","I");
+ pSettings->ClipProperties.uiSamplingFrequency = pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("audioSamplingFrequency = %d",
+ pSettings->ClipProperties.uiSamplingFrequency);
+
+ fid = pEnv->GetFieldID(clazz,"audioVolumeValue","I");
+ pSettings->ClipProperties.uiClipAudioVolumePercentage =
+ pEnv->GetIntField(object,fid);
+ M4OSA_TRACE1_1("audioVolumeValue = %d",
+ pSettings->ClipProperties.uiClipAudioVolumePercentage);
+}
+
+static void jniPreviewProgressCallback (void* cookie, M4OSA_UInt32 msgType,
+ M4OSA_UInt32 argc)
+{
+ ManualEditContext *pContext = (ManualEditContext *)cookie;
+ JNIEnv* pEnv = NULL;
+ bool isFinished = false;
+ int currentMs = 0;
+ int error = M4NO_ERROR;
+
+ // Attach the current thread.
+ pContext->pVM->AttachCurrentThread(&pEnv, NULL);
+ switch(msgType)
+ {
+ case MSG_TYPE_PROGRESS_INDICATION:
+ currentMs = argc;
+ break;
+ case MSG_TYPE_PLAYER_ERROR:
+ currentMs = -1;
+ error = argc;
+ break;
+ case MSG_TYPE_PREVIEW_END:
+ isFinished = true;
+ break;
+ default:
+ break;
+ }
+
+ pEnv->CallVoidMethod(pContext->engine,
+ pContext->onPreviewProgressUpdateMethodId,
+ currentMs,isFinished);
+
+ // Detach the current thread.
+ pContext->pVM->DetachCurrentThread();
+
+}
+static void videoEditor_stopPreview(JNIEnv* pEnv,
+ jobject thiz)
+{
+ ManualEditContext* pContext = M4OSA_NULL;
+ bool needToBeLoaded = true;
+ // Get the context.
+ pContext =
+ (ManualEditContext*)videoEditClasses_getContext(&needToBeLoaded, pEnv, thiz);
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+ pContext->mPreviewController->stopPreview();
+}
+
+static int videoEditor_renderPreviewFrame(JNIEnv* pEnv,
+ jobject thiz,
+ jobject mSurface,
+ jlong fromMs,
+ jint surfaceWidth,
+ jint surfaceHeight )
+{
+ bool needToBeLoaded = true;
+ M4OSA_ERR result = M4NO_ERROR;
+ M4OSA_UInt32 timeMs = (M4OSA_UInt32)fromMs;
+ M4OSA_UInt32 i=0,tnTimeMs = 0, framesizeYuv =0;
+ M4VIFI_UInt8 *pixelArray = M4OSA_NULL;
+ M4OSA_UInt32 iCurrentClipIndex = 0, uiNumberOfClipsInStoryBoard =0,
+ uiClipDuration = 0, uiTotalClipDuration = 0,
+ iIncrementedDuration = 0;
+ VideoEditor_renderPreviewFrameStr frameStr;
+ M4OSA_Context tnContext = M4OSA_NULL;
+ const char* pMessage = NULL;
+ M4VIFI_ImagePlane *yuvPlane;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO,
+ "VIDEO_EDITOR", "surfaceWidth = %d",surfaceWidth);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO,
+ "VIDEO_EDITOR", "surfaceHeight = %d",surfaceHeight);
+ ManualEditContext* pContext = M4OSA_NULL;
+ // Get the context.
+ pContext =
+ (ManualEditContext*)videoEditClasses_getContext(&needToBeLoaded, pEnv, thiz);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO,
+ "VIDEO_EDITOR","pContext = 0x%x",pContext);
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext->mPreviewController),
+ "not initialized");
+
+ // Validate the mSurface parameter.
+ videoEditJava_checkAndThrowIllegalArgumentException(&needToBeLoaded, pEnv,
+ (NULL == mSurface),
+ "mSurface is null");
+ jclass surfaceClass = pEnv->FindClass("android/view/Surface");
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == surfaceClass),
+ "not initialized");
+
+ jfieldID surface_native =
+ pEnv->GetFieldID(surfaceClass, ANDROID_VIEW_SURFACE_JNI_ID, "I");
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == surface_native),
+ "not initialized");
+
+ Surface* const p = (Surface*)pEnv->GetIntField(mSurface, surface_native);
+ sp<Surface> previewSurface = sp<Surface>(p);
+
+ /* Determine the total number of clips, total duration*/
+ uiNumberOfClipsInStoryBoard = pContext->pEditSettings->uiClipNumber;
+
+ for (i = 0; i < uiNumberOfClipsInStoryBoard; i++) {
+ uiClipDuration = pContext->pEditSettings->pClipList[i]->uiEndCutTime -
+ pContext->pEditSettings->pClipList[i]->uiBeginCutTime;
+ uiTotalClipDuration += uiClipDuration;
+ }
+
+ /* determine the clip whose thumbnail needs to be rendered*/
+ if (timeMs == 0) {
+ iCurrentClipIndex = 0;
+ i=0;
+ } else {
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_renderPreviewFrame() timeMs=%d", timeMs);
+
+ if (timeMs > uiTotalClipDuration) {
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_renderPreviewFrame() timeMs > uiTotalClipDuration");
+ pMessage = videoEditJava_getErrorName(M4ERR_PARAMETER);
+ jniThrowException(pEnv, "java/lang/IllegalArgumentException", pMessage);
+ return -1;
+ }
+
+ for (i = 0; i < uiNumberOfClipsInStoryBoard; i++) {
+ if (timeMs < (iIncrementedDuration +
+ (pContext->pEditSettings->pClipList[i]->uiEndCutTime -
+ pContext->pEditSettings->pClipList[i]->uiBeginCutTime)))
+ {
+ iCurrentClipIndex = i;
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_renderPreviewFrame() iCurrentClipIndex=%d for timeMs=%d",
+ iCurrentClipIndex, timeMs);
+ break;
+ }
+ else {
+ iIncrementedDuration = iIncrementedDuration +
+ (pContext->pEditSettings->pClipList[i]->uiEndCutTime -
+ pContext->pEditSettings->pClipList[i]->uiBeginCutTime);
+ }
+ }
+ }
+ /* If timestamp is beyond story board duration, return*/
+ if (i >= uiNumberOfClipsInStoryBoard) {
+ if (timeMs == iIncrementedDuration) {
+ iCurrentClipIndex = i-1;
+ } else {
+ return -1;
+ }
+ }
+
+ /*+ Handle the image files here */
+ if (pContext->pEditSettings->pClipList[iCurrentClipIndex]->FileType ==
+ /*M4VIDEOEDITING_kFileType_JPG*/ M4VIDEOEDITING_kFileType_ARGB8888 ) {
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", " iCurrentClipIndex %d ", iCurrentClipIndex);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ " Height = %d",
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->ClipProperties.uiVideoHeight);
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ " Width = %d",
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->ClipProperties.uiVideoWidth);
+
+ LvGetImageThumbNail((const char *)pContext->pEditSettings->\
+ pClipList[iCurrentClipIndex]->pFile,
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->ClipProperties.uiVideoHeight,
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->ClipProperties.uiVideoWidth,
+ (M4OSA_Void **)&frameStr.pBuffer);
+ } else {
+ /* Handle 3gp/mp4 Clips here */
+ /* get thumbnail*/
+ result = ThumbnailOpen(&tnContext,
+ (const M4OSA_Char*)pContext->pEditSettings->\
+ pClipList[iCurrentClipIndex]->pFile, M4OSA_TRUE);
+ if (result != M4NO_ERROR || tnContext == M4OSA_NULL) {
+ return -1;
+ }
+
+ /* timeMs is relative to storyboard; in this api it shud be relative to this clip */
+ if ((i >= uiNumberOfClipsInStoryBoard) &&
+ (timeMs == iIncrementedDuration)) {
+ tnTimeMs = pContext->pEditSettings->\
+ pClipList[iCurrentClipIndex]->uiEndCutTime;
+ } else {
+ tnTimeMs = pContext->pEditSettings->\
+ pClipList[iCurrentClipIndex]->uiBeginCutTime
+ + (timeMs - iIncrementedDuration);
+ }
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "video width = %d",pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoWidth);
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "video height = %d",pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoHeight);
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "current clip index = %d",iCurrentClipIndex);
+
+ M4OSA_UInt32 width = pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoWidth;
+ M4OSA_UInt32 height = pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoHeight;
+
+ framesizeYuv = width * height * 1.5;
+
+ pixelArray = (M4VIFI_UInt8 *)M4OSA_malloc(framesizeYuv, M4VS,
+ (M4OSA_Char*)"videoEditor pixelArray");
+ if (pixelArray == M4OSA_NULL) {
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_renderPreviewFrame() malloc error");
+ ThumbnailClose(tnContext);
+ pMessage = videoEditJava_getErrorName(M4ERR_ALLOC);
+ jniThrowException(pEnv, "java/lang/RuntimeException", pMessage);
+ return -1;
+ }
+
+ result = ThumbnailGetPixels16(tnContext, (M4OSA_Int16 *)pixelArray,
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoWidth,
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoHeight,
+ &tnTimeMs);
+ if (result != M4NO_ERROR) {
+ M4OSA_free((M4OSA_MemAddr32)pixelArray);
+ ThumbnailClose(tnContext);
+ return -1;
+ }
+
+ ThumbnailClose(tnContext);
+ tnContext = M4OSA_NULL;
+
+#ifdef DUMPTOFILE
+ {
+ M4OSA_Context fileContext;
+ M4OSA_Char* fileName = (M4OSA_Char*)"/mnt/sdcard/FirstRGB565.raw";
+ M4OSA_fileExtraDelete((const M4OSA_Char *)fileName);
+ M4OSA_fileWriteOpen(&fileContext, (M4OSA_Void*) fileName,\
+ M4OSA_kFileWrite|M4OSA_kFileCreate);
+ M4OSA_fileWriteData(fileContext, (M4OSA_MemAddr8) pixelArray,
+ framesizeYuv);
+ M4OSA_fileWriteClose(fileContext);
+ }
+#endif
+
+ /**
+ * Allocate output YUV planes
+ */
+ yuvPlane = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), M4VS,
+ (M4OSA_Char*)"videoEditor_renderPreviewFrame Output plane YUV");
+ if (yuvPlane == M4OSA_NULL) {
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_renderPreviewFrame() malloc error for yuv plane");
+ M4OSA_free((M4OSA_MemAddr32)pixelArray);
+ pMessage = videoEditJava_getErrorName(M4ERR_ALLOC);
+ jniThrowException(pEnv, "java/lang/RuntimeException", pMessage);
+ return -1;
+ }
+
+ yuvPlane[0].u_width = width;
+ yuvPlane[0].u_height = height;
+ yuvPlane[0].u_topleft = 0;
+ yuvPlane[0].u_stride = width;
+ yuvPlane[0].pac_data = (M4VIFI_UInt8*)pixelArray;
+
+ yuvPlane[1].u_width = width>>1;
+ yuvPlane[1].u_height = height>>1;
+ yuvPlane[1].u_topleft = 0;
+ yuvPlane[1].u_stride = width>>1;
+ yuvPlane[1].pac_data = yuvPlane[0].pac_data
+ + yuvPlane[0].u_width * yuvPlane[0].u_height;
+ yuvPlane[2].u_width = (width)>>1;
+ yuvPlane[2].u_height = (height)>>1;
+ yuvPlane[2].u_topleft = 0;
+ yuvPlane[2].u_stride = (width)>>1;
+ yuvPlane[2].pac_data = yuvPlane[1].pac_data
+ + yuvPlane[1].u_width * yuvPlane[1].u_height;
+
+#ifdef DUMPTOFILE
+ {
+ M4OSA_Context fileContext;
+ M4OSA_Char* fileName = (M4OSA_Char*)"/mnt/sdcard/ConvertedYuv.yuv";
+ M4OSA_fileExtraDelete((const M4OSA_Char *)fileName);
+ M4OSA_fileWriteOpen(&fileContext, (M4OSA_Void*) fileName,\
+ M4OSA_kFileWrite|M4OSA_kFileCreate);
+ M4OSA_fileWriteData(fileContext,
+ (M4OSA_MemAddr8) yuvPlane[0].pac_data, framesizeYuv);
+ M4OSA_fileWriteClose(fileContext);
+ }
+#endif
+
+ /* Fill up the render structure*/
+ frameStr.pBuffer = (M4OSA_Void*)yuvPlane[0].pac_data;
+ }
+
+ frameStr.timeMs = timeMs; /* timestamp on storyboard*/
+ frameStr.uiSurfaceWidth =
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoWidth;
+ frameStr.uiSurfaceHeight =
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoHeight;
+ frameStr.uiFrameWidth =
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoWidth;
+ frameStr.uiFrameHeight =
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->\
+ ClipProperties.uiVideoHeight;
+ if (pContext->pEditSettings->nbEffects > 0) {
+ frameStr.bApplyEffect = M4OSA_TRUE;
+ } else {
+ frameStr.bApplyEffect = M4OSA_FALSE;
+ }
+ frameStr.clipBeginCutTime = iIncrementedDuration;
+ frameStr.clipEndCutTime =
+ iIncrementedDuration +
+ (pContext->pEditSettings->pClipList[iCurrentClipIndex]->uiEndCutTime -\
+ pContext->pEditSettings->pClipList[iCurrentClipIndex]->uiBeginCutTime);
+
+ pContext->mPreviewController->setPreviewFrameRenderingMode(
+ pContext->pEditSettings->\
+ pClipList[iCurrentClipIndex]->xVSS.MediaRendering,
+ pContext->pEditSettings->xVSS.outputVideoSize);
+
+ result = pContext->mPreviewController->renderPreviewFrame(previewSurface,
+ &frameStr);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+
+ if (pContext->pEditSettings->pClipList[iCurrentClipIndex]->FileType ==\
+ /*M4VIDEOEDITING_kFileType_JPG */ M4VIDEOEDITING_kFileType_ARGB8888) {
+ M4OSA_free((M4OSA_MemAddr32)frameStr.pBuffer);
+ } else {
+ M4OSA_free((M4OSA_MemAddr32)yuvPlane[0].pac_data);
+ M4OSA_free((M4OSA_MemAddr32)yuvPlane);
+ }
+ return tnTimeMs;
+}
+
+static int videoEditor_renderMediaItemPreviewFrame(JNIEnv* pEnv,
+ jobject thiz,
+ jobject mSurface,
+ jstring filePath,
+ jint frameWidth,
+ jint frameHeight,
+ jint surfaceWidth,
+ jint surfaceHeight,
+ jlong fromMs)
+{
+ bool needToBeLoaded = true;
+ M4OSA_ERR result = M4NO_ERROR;
+ M4OSA_UInt32 timeMs = (M4OSA_UInt32)fromMs;
+ M4OSA_UInt32 framesizeYuv =0;
+ M4VIFI_UInt8 *pixelArray = M4OSA_NULL;
+ VideoEditor_renderPreviewFrameStr frameStr;
+ M4OSA_Context tnContext = M4OSA_NULL;
+ const char* pMessage = NULL;
+ M4VIFI_ImagePlane yuvPlane[3], rgbPlane;
+
+ ManualEditContext* pContext = M4OSA_NULL;
+ // Get the context.
+ pContext =
+ (ManualEditContext*)videoEditClasses_getContext(&needToBeLoaded,
+ pEnv, thiz);
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext->mPreviewController),
+ "not initialized");
+
+ // Validate the mSurface parameter.
+ videoEditJava_checkAndThrowIllegalArgumentException(&needToBeLoaded, pEnv,
+ (NULL == mSurface),
+ "mSurface is null");
+ jclass surfaceClass = pEnv->FindClass("android/view/Surface");
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == surfaceClass),
+ "not initialized");
+
+ jfieldID surface_native =
+ pEnv->GetFieldID(surfaceClass, ANDROID_VIEW_SURFACE_JNI_ID, "I");
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == surface_native),
+ "not initialized");
+
+ Surface* const p = (Surface*)pEnv->GetIntField(mSurface, surface_native);
+ sp<Surface> previewSurface = sp<Surface>(p);
+
+
+ const char *pString = pEnv->GetStringUTFChars(filePath, NULL);
+ if (pString == M4OSA_NULL) {
+ if (pEnv != NULL) {
+ jniThrowException(pEnv, "java/lang/RuntimeException", "Input string null");
+ }
+ }
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_renderMediaItemPreviewFrame() timeMs=%d", timeMs);
+ /* get thumbnail*/
+ result = ThumbnailOpen(&tnContext,(const M4OSA_Char*)pString, M4OSA_TRUE);
+ if (result != M4NO_ERROR || tnContext == M4OSA_NULL) {
+ return timeMs;
+ }
+
+ framesizeYuv = ((frameWidth)*(frameHeight)*1.5);
+
+ pixelArray = (M4VIFI_UInt8 *)M4OSA_malloc(framesizeYuv, M4VS,\
+ (M4OSA_Char*)"videoEditor pixelArray");
+ if (pixelArray == M4OSA_NULL) {
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_renderPreviewFrame() malloc error");
+ ThumbnailClose(tnContext);
+ pMessage = videoEditJava_getErrorName(M4ERR_ALLOC);
+ jniThrowException(pEnv, "java/lang/RuntimeException", pMessage);
+ return timeMs;
+ }
+
+ result = ThumbnailGetPixels16(tnContext, (M4OSA_Int16 *)pixelArray,
+ frameWidth,
+ frameHeight, &timeMs);
+ if (result != M4NO_ERROR) {
+ M4OSA_free((M4OSA_MemAddr32)pixelArray);
+ ThumbnailClose(tnContext);
+ return fromMs;
+ }
+
+#ifdef DUMPTOFILESYSTEM
+ {
+ M4OSA_Context fileContext;
+ M4OSA_Char* fileName = (M4OSA_Char*)"/mnt/sdcard/FirstRGB565.rgb";
+ M4OSA_fileWriteOpen(&fileContext, (M4OSA_Void*) fileName,\
+ M4OSA_kFileWrite|M4OSA_kFileCreate);
+ M4OSA_fileWriteData(fileContext, (M4OSA_MemAddr8) pixelArray,
+ framesizeRgb);
+ M4OSA_fileWriteClose(fileContext);
+ }
+#endif
+
+ yuvPlane[0].pac_data = (M4VIFI_UInt8*)pixelArray;
+ yuvPlane[0].u_height = frameHeight;
+ yuvPlane[0].u_width = frameWidth;
+ yuvPlane[0].u_stride = yuvPlane[0].u_width;
+ yuvPlane[0].u_topleft = 0;
+
+ yuvPlane[1].u_height = frameHeight/2;
+ yuvPlane[1].u_width = frameWidth/2;
+ yuvPlane[1].u_stride = yuvPlane[1].u_width;
+ yuvPlane[1].u_topleft = 0;
+ yuvPlane[1].pac_data = yuvPlane[0].pac_data
+ + yuvPlane[0].u_width*yuvPlane[0].u_height;
+
+ yuvPlane[2].u_height = frameHeight/2;
+ yuvPlane[2].u_width = frameWidth/2;
+ yuvPlane[2].u_stride = yuvPlane[2].u_width;
+ yuvPlane[2].u_topleft = 0;
+ yuvPlane[2].pac_data = yuvPlane[0].pac_data
+ + yuvPlane[0].u_width*yuvPlane[0].u_height + \
+ (yuvPlane[0].u_width/2)*(yuvPlane[0].u_height/2);
+#ifdef DUMPTOFILESYSTEM
+ {
+ M4OSA_Context fileContext;
+ M4OSA_Char* fileName = (M4OSA_Char*)"/mnt/sdcard/ConvertedYuv.yuv";
+ M4OSA_fileWriteOpen(&fileContext, (M4OSA_Void*) fileName,\
+ M4OSA_kFileWrite|M4OSA_kFileCreate);
+ M4OSA_fileWriteData(fileContext, (M4OSA_MemAddr8) yuvPlane[0].pac_data,
+ framesizeYuv);
+ M4OSA_fileWriteClose(fileContext);
+ }
+#endif
+
+ /* Fill up the render structure*/
+ frameStr.pBuffer = (M4OSA_Void*)yuvPlane[0].pac_data;
+ frameStr.timeMs = timeMs; /* timestamp on storyboard*/
+ frameStr.uiSurfaceWidth = frameWidth;
+ frameStr.uiSurfaceHeight = frameHeight;
+ frameStr.uiFrameWidth = frameWidth;
+ frameStr.uiFrameHeight = frameHeight;
+ frameStr.bApplyEffect = M4OSA_FALSE;
+ // clip begin cuttime and end cuttime set to 0
+ // as its only required when effect needs to be applied while rendering
+ frameStr.clipBeginCutTime = 0;
+ frameStr.clipEndCutTime = 0;
+
+ /* pContext->mPreviewController->setPreviewFrameRenderingMode(M4xVSS_kBlackBorders,
+ (M4VIDEOEDITING_VideoFrameSize)(M4VIDEOEDITING_kHD960+1));*/
+ result
+ = pContext->mPreviewController->renderPreviewFrame(previewSurface,&frameStr);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+
+ /* free the pixelArray and yuvPlane[0].pac_data */
+ M4OSA_free((M4OSA_MemAddr32)yuvPlane[0].pac_data);
+
+ ThumbnailClose(tnContext);
+
+ return timeMs;
+}
+
+int videoEditor_generateAudioRawFile( JNIEnv* pEnv,
+ jobject thiz,
+ jstring infilePath,
+ jstring pcmfilePath)
+{
+ M4OSA_ERR result = M4NO_ERROR;
+ bool loaded = true;
+ ManualEditContext* pContext = M4OSA_NULL;
+
+
+
+ const char *pInputFile = pEnv->GetStringUTFChars(infilePath, NULL);
+ if (pInputFile == M4OSA_NULL) {
+ if (pEnv != NULL) {
+ jniThrowException(pEnv, "java/lang/RuntimeException", "Input string null");
+ }
+ }
+
+ const char *pStringOutPCMFilePath = pEnv->GetStringUTFChars(pcmfilePath, NULL);
+ if (pStringOutPCMFilePath == M4OSA_NULL) {
+ if (pEnv != NULL) {
+ jniThrowException(pEnv, "java/lang/RuntimeException", "Input string null");
+ }
+ }
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO,
+ "VIDEO_EDITOR", "videoEditor_generateAudioRawFile infilePath %s",
+ pInputFile);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO,
+ "VIDEO_EDITOR", "videoEditor_generateAudioRawFile pcmfilePath %s",
+ pStringOutPCMFilePath);
+ // Get the context.
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&loaded, pEnv, thiz);
+
+ result = videoEditor_generateAudio( pEnv, pContext, (M4OSA_Char*)pInputFile,
+ (M4OSA_Char*)pStringOutPCMFilePath);
+
+ return result;
+}
+
+M4OSA_ERR videoEditor_generateAudio(JNIEnv* pEnv,ManualEditContext* pContext,
+ M4OSA_Char* infilePath,
+ M4OSA_Char* pcmfilePath )
+{
+ bool needToBeLoaded = true;
+ M4OSA_ERR result = M4NO_ERROR;
+ M4MCS_Context mcsContext;
+ M4OSA_Char* pInputFile = M4OSA_NULL;
+ M4OSA_Char* pOutputFile = M4OSA_NULL;
+ M4OSA_Char* pTempPath = M4OSA_NULL;
+ M4MCS_OutputParams* pOutputParams = M4OSA_NULL;
+ M4MCS_EncodingParams* pEncodingParams = M4OSA_NULL;
+ M4OSA_Int32 pInputFileType = 0;
+ M4OSA_UInt8 threadProgress = 0;
+ M4OSA_Char* pTemp3gpFilePath = M4OSA_NULL;
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_generateAudio()");
+
+ videoEditJava_checkAndThrowIllegalArgumentException(&needToBeLoaded, pEnv,
+ (NULL == pContext),
+ "ManualEditContext is null");
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "M4MCS_init()");
+
+ pOutputParams = (M4MCS_OutputParams *)M4OSA_malloc(
+ sizeof(M4MCS_OutputParams),0x00,
+ (M4OSA_Char *)"M4MCS_OutputParams");
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pOutputParams),
+ "not initialized");
+
+ pEncodingParams = (M4MCS_EncodingParams *)M4OSA_malloc(
+ sizeof(M4MCS_EncodingParams),0x00,
+ (M4OSA_Char *)"M4MCS_EncodingParams");
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pEncodingParams),
+ "not initialized");
+ // Initialize the MCS library.
+ result = M4MCS_init(&mcsContext, pContext->initParams.pFileReadPtr,
+ pContext->initParams.pFileWritePtr);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,\
+ (M4NO_ERROR != result), result);
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == mcsContext),
+ "not initialized");
+ // generate the path for temp 3gp output file
+ pTemp3gpFilePath = (M4OSA_Char*) M4OSA_malloc (
+ (M4OSA_chrLength((M4OSA_Char*)pContext->initParams.pTempPath)
+ + M4OSA_chrLength ((M4OSA_Char*)TEMP_MCS_OUT_FILE_PATH)) , 0x0,
+ (M4OSA_Char*) "Malloc for temp 3gp file");
+ if ( pTemp3gpFilePath != M4OSA_NULL )
+ {
+ M4OSA_memset(pTemp3gpFilePath ,
+ M4OSA_chrLength((M4OSA_Char*)pContext->initParams.pTempPath)
+ + M4OSA_chrLength((M4OSA_Char*)TEMP_MCS_OUT_FILE_PATH), 0);
+ M4OSA_chrNCat ( (M4OSA_Char*)pTemp3gpFilePath,
+ (M4OSA_Char*)pContext->initParams.pTempPath ,
+ M4OSA_chrLength ((M4OSA_Char*)pContext->initParams.pTempPath));
+ M4OSA_chrNCat ( pTemp3gpFilePath , (M4OSA_Char*)TEMP_MCS_OUT_FILE_PATH,
+ M4OSA_chrLength ((M4OSA_Char*)TEMP_MCS_OUT_FILE_PATH));
+ }
+
+ pInputFile = (M4OSA_Char *) infilePath; //pContext->mAudioSettings->pFile;
+ //Delete this file later
+ pOutputFile = (M4OSA_Char *) pTemp3gpFilePath;
+ // Temp folder path for VSS use = ProjectPath
+ pTempPath = (M4OSA_Char *) pContext->initParams.pTempPath;
+ pInputFileType = (M4VIDEOEDITING_FileType)pContext->mAudioSettings->fileType;
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "TEMP_MCS_OUT_FILE_PATH len %d",
+ M4OSA_chrLength ((M4OSA_Char*)TEMP_MCS_OUT_FILE_PATH));
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "pTemp3gpFilePath %s",
+ pOutputFile);
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "M4MCS_open()");
+
+ result = M4MCS_open(mcsContext, pInputFile,
+ (M4VIDEOEDITING_FileType)pInputFileType,
+ pOutputFile, pTempPath);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+
+ pOutputParams->OutputFileType
+ = (M4VIDEOEDITING_FileType)M4VIDEOEDITING_kFileType_3GPP;
+ // Set the video format.
+ pOutputParams->OutputVideoFormat =
+ (M4VIDEOEDITING_VideoFormat)M4VIDEOEDITING_kNoneVideo;//M4VIDEOEDITING_kNoneVideo;
+ // Set the frame size.
+ pOutputParams->OutputVideoFrameSize
+ = (M4VIDEOEDITING_VideoFrameSize)M4VIDEOEDITING_kQCIF;
+ // Set the frame rate.
+ pOutputParams->OutputVideoFrameRate
+ = (M4VIDEOEDITING_VideoFramerate)M4VIDEOEDITING_k5_FPS;
+
+ // Set the audio format.
+ pOutputParams->OutputAudioFormat
+ = (M4VIDEOEDITING_AudioFormat)M4VIDEOEDITING_kAAC;
+ // Set the audio sampling frequency.
+ pOutputParams->OutputAudioSamplingFrequency =
+ (M4VIDEOEDITING_AudioSamplingFrequency)M4VIDEOEDITING_k32000_ASF;
+ // Set the audio mono.
+ pOutputParams->bAudioMono = false;
+ // Set the pcm file; null for now.
+ pOutputParams->pOutputPCMfile = (M4OSA_Char *)pcmfilePath;
+ //(M4OSA_Char *)"/sdcard/Output/AudioPcm.pcm";
+ // Set the audio sampling frequency.
+ pOutputParams->MediaRendering = (M4MCS_MediaRendering)M4MCS_kCropping;
+ // new params after integrating MCS 2.0
+ // Set the number of audio effects; 0 for now.
+ pOutputParams->nbEffects = 0;
+ // Set the audio effect; null for now.
+ pOutputParams->pEffects = NULL;
+ // Set the audio effect; null for now.
+ pOutputParams->bDiscardExif = M4OSA_FALSE;
+ // Set the audio effect; null for now.
+ pOutputParams->bAdjustOrientation = M4OSA_FALSE;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "M4MCS_setOutputParams()");
+ result = M4MCS_setOutputParams(mcsContext, pOutputParams);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+
+ // Set the video bitrate.
+ pEncodingParams->OutputVideoBitrate =
+ (M4VIDEOEDITING_Bitrate)M4VIDEOEDITING_kUndefinedBitrate;
+ // Set the audio bitrate.
+ pEncodingParams->OutputAudioBitrate
+ = (M4VIDEOEDITING_Bitrate)M4VIDEOEDITING_k128_KBPS;
+ // Set the end cut time in milliseconds.
+ pEncodingParams->BeginCutTime = 0;
+ // Set the end cut time in milliseconds.
+ pEncodingParams->EndCutTime = 0;
+ // Set the output file size in bytes.
+ pEncodingParams->OutputFileSize = 0;
+ // Set video time scale.
+ pEncodingParams->OutputVideoTimescale = 0;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "M4MCS_setEncodingParams()");
+ result = M4MCS_setEncodingParams(mcsContext, pEncodingParams);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "M4MCS_checkParamsAndStart()");
+ result = M4MCS_checkParamsAndStart(mcsContext);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "M4MCS_step()");
+
+ /*+ PROGRESS CB */
+ M4OSA_UInt8 curProgress = 0;
+ int lastProgress = 0;
+
+ LOGV("LVME_generateAudio Current progress is =%d", curProgress);
+ pEnv->CallVoidMethod(pContext->engine,
+ pContext->onProgressUpdateMethodId, 1/*task status*/,
+ curProgress/*progress*/);
+ do {
+ result = M4MCS_step(mcsContext, &curProgress);
+
+ if (result != M4NO_ERROR) {
+ LOGV("LVME_generateAudio M4MCS_step returned 0x%x",result);
+
+ if (result == M4MCS_WAR_TRANSCODING_DONE) {
+ LOGV("LVME_generateAudio MCS process ended");
+
+ // Send a progress notification.
+ curProgress = 100;
+ pEnv->CallVoidMethod(pContext->engine,
+ pContext->onProgressUpdateMethodId, 1/*task status*/,
+ curProgress);
+ LOGV("LVME_generateAudio Current progress is =%d", curProgress);
+ }
+ } else {
+ // Send a progress notification if needed
+ if (curProgress != lastProgress) {
+ lastProgress = curProgress;
+ pEnv->CallVoidMethod(pContext->engine,
+ pContext->onProgressUpdateMethodId, 0/*task status*/,
+ curProgress/*progress*/);
+ LOGV("LVME_generateAudio Current progress is =%d",curProgress);
+ }
+ }
+ } while (result == M4NO_ERROR);
+ /*- PROGRESS CB */
+
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4MCS_WAR_TRANSCODING_DONE != result), result);
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "M4MCS_abort()");
+ result = M4MCS_abort(mcsContext);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+
+ //pContext->mAudioSettings->pFile = pOutputParams->pOutputPCMfile;
+ M4OSA_fileExtraDelete((const M4OSA_Char *) pTemp3gpFilePath);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_generateAudio() EXIT ");
+
+ M4OSA_free((M4OSA_MemAddr32)pTemp3gpFilePath);
+ M4OSA_free((M4OSA_MemAddr32)pOutputParams);
+ M4OSA_free((M4OSA_MemAddr32)pEncodingParams);
+ return result;
+}
+
+static int removeAlphafromRGB8888 (
+ M4OSA_Char* pFramingFilePath,
+ M4xVSS_FramingStruct *pFramingCtx)
+{
+ M4OSA_UInt32 frameSize_argb = (pFramingCtx->width * pFramingCtx->height * 4); // aRGB data
+ M4OSA_Context lImageFileFp = M4OSA_NULL;
+ M4OSA_ERR err = M4NO_ERROR;
+
+ LOGV("removeAlphafromRGB8888: width %d", pFramingCtx->width);
+
+ M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_malloc(frameSize_argb, M4VS, (M4OSA_Char*)"Image argb data");
+ if (pTmpData == M4OSA_NULL) {
+ LOGE("Failed to allocate memory for Image clip");
+ return M4ERR_ALLOC;
+ }
+
+ /** Read the argb data from the passed file. */
+ M4OSA_ERR lerr = M4OSA_fileReadOpen(&lImageFileFp, (M4OSA_Void *) pFramingFilePath, M4OSA_kFileRead);
+
+
+ if ((lerr != M4NO_ERROR) || (lImageFileFp == M4OSA_NULL))
+ {
+ LOGE("removeAlphafromRGB8888: Can not open the file ");
+ M4OSA_free((M4OSA_MemAddr32)pTmpData);
+ return M4ERR_FILE_NOT_FOUND;
+ }
+
+
+ lerr = M4OSA_fileReadData(lImageFileFp, (M4OSA_MemAddr8)pTmpData, &frameSize_argb);
+ if (lerr != M4NO_ERROR)
+ {
+ LOGE("removeAlphafromRGB8888: can not read the data ");
+ M4OSA_fileReadClose(lImageFileFp);
+ M4OSA_free((M4OSA_MemAddr32)pTmpData);
+ return lerr;
+ }
+ M4OSA_fileReadClose(lImageFileFp);
+
+ M4OSA_UInt32 frameSize = (pFramingCtx->width * pFramingCtx->height * 3); //Size of RGB 888 data.
+
+ pFramingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_malloc(
+ sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"Image clip RGB888 data");
+ pFramingCtx->FramingRgb->pac_data = (M4VIFI_UInt8*)M4OSA_malloc(
+ frameSize, M4VS, (M4OSA_Char*)"Image clip RGB888 data");
+
+ if (pFramingCtx->FramingRgb == M4OSA_NULL)
+ {
+ LOGE("Failed to allocate memory for Image clip");
+ M4OSA_free((M4OSA_MemAddr32)pTmpData);
+ return M4ERR_ALLOC;
+ }
+
+ /** Remove the alpha channel */
+ for (int i = 0, j = 0; i < frameSize_argb; i++) {
+ if ((i % 4) == 0) continue;
+ pFramingCtx->FramingRgb->pac_data[j] = pTmpData[i];
+ j++;
+ }
+ M4OSA_free((M4OSA_MemAddr32)pTmpData);
+ return M4NO_ERROR;
+}
+
+static void
+videoEditor_populateSettings(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jobject settings,
+ jobject object,
+ jobject audioSettingObject)
+{
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_populateSettings()");
+
+ bool needToBeLoaded = true;
+ ManualEditContext* pContext = M4OSA_NULL;
+ M4OSA_ERR result = M4NO_ERROR;
+ jstring str = M4OSA_NULL;
+ jobjectArray propertiesClipsArray = M4OSA_NULL;
+ jobject properties = M4OSA_NULL;
+ jint* bitmapArray = M4OSA_NULL;
+ jobjectArray effectSettingsArray = M4OSA_NULL;
+ jobject effectSettings = M4OSA_NULL;
+ jintArray pixelArray = M4OSA_NULL;
+ int width = 0;
+ int height = 0;
+ int nbOverlays = 0;
+ int i,j = 0;
+ int *pOverlayIndex = M4OSA_NULL;
+
+ // Add a code marker (the condition must always be true).
+ ADD_CODE_MARKER_FUN(NULL != pEnv)
+
+ // Validate the settings parameter.
+ videoEditJava_checkAndThrowIllegalArgumentException(&needToBeLoaded, pEnv,
+ (NULL == settings),
+ "settings is null");
+ // Get the context.
+ pContext =
+ (ManualEditContext*)videoEditClasses_getContext(&needToBeLoaded, pEnv, thiz);
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext->mPreviewController),
+ "not initialized");
+ jclass mPreviewClipPropClazz = pEnv->FindClass(PREVIEW_PROPERTIES_CLASS_NAME);
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == mPreviewClipPropClazz),
+ "not initialized");
+
+ jfieldID fid = pEnv->GetFieldID(mPreviewClipPropClazz,"clipProperties",
+ "[L"PROPERTIES_CLASS_NAME";" );
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == fid),
+ "not initialized");
+
+ propertiesClipsArray = (jobjectArray)pEnv->GetObjectField(object, fid);
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == propertiesClipsArray),
+ "not initialized");
+
+ jclass engineClass = pEnv->FindClass(MANUAL_EDIT_ENGINE_CLASS_NAME);
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == engineClass),
+ "not initialized");
+
+ pContext->onPreviewProgressUpdateMethodId = pEnv->GetMethodID(engineClass,
+ "onPreviewProgressUpdate", "(IZ)V");
+ // Check if the context is valid (required because the context is dereferenced).
+ if (needToBeLoaded) {
+ // Make sure that we are in a correct state.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (pContext->state != ManualEditState_INITIALIZED),
+ "settings already loaded");
+ // Retrieve the edit settings.
+ if (pContext->pEditSettings != M4OSA_NULL) {
+ videoEditClasses_freeEditSettings(&pContext->pEditSettings);
+ pContext->pEditSettings = M4OSA_NULL;
+ }
+ videoEditClasses_getEditSettings(&needToBeLoaded, pEnv,
+ settings, &pContext->pEditSettings,false);
+ }
+ M4OSA_TRACE1_0("videoEditorC_getEditSettings done");
+
+ if ( pContext->pEditSettings != NULL )
+ {
+ // Check if the edit settings could be retrieved.
+ jclass mEditClazz = pEnv->FindClass(EDIT_SETTINGS_CLASS_NAME);
+ if(mEditClazz == M4OSA_NULL)
+ {
+ M4OSA_TRACE1_0("cannot find object field for mEditClazz");
+ return;
+ }
+ jclass mEffectsClazz = pEnv->FindClass(EFFECT_SETTINGS_CLASS_NAME);
+ if(mEffectsClazz == M4OSA_NULL)
+ {
+ M4OSA_TRACE1_0("cannot find object field for mEffectsClazz");
+ return;
+ }
+ fid = pEnv->GetFieldID(mEditClazz,"effectSettingsArray", "[L"EFFECT_SETTINGS_CLASS_NAME";" );
+ if(fid == M4OSA_NULL)
+ {
+ M4OSA_TRACE1_0("cannot find field for effectSettingsArray Array");
+ return;
+ }
+ effectSettingsArray = (jobjectArray)pEnv->GetObjectField(settings, fid);
+ if(effectSettingsArray == M4OSA_NULL)
+ {
+ M4OSA_TRACE1_0("cannot find object field for effectSettingsArray");
+ return;
+ }
+ i = 0;
+ j = 0;
+ //int overlayIndex[pContext->pEditSettings->nbEffects];
+ if ( pContext->pEditSettings->nbEffects )
+ {
+ pOverlayIndex
+ = (int*) M4OSA_malloc(pContext->pEditSettings->nbEffects, 0,
+ (M4OSA_Char*)"pOverlayIndex");
+ }
+
+ M4OSA_TRACE1_1("no of effects = %d",pContext->pEditSettings->nbEffects);
+ while (j < pContext->pEditSettings->nbEffects)
+ {
+ if (pContext->pEditSettings->Effects[j].xVSS.pFramingFilePath != M4OSA_NULL)
+ {
+ pOverlayIndex[nbOverlays] = j;
+ nbOverlays++;
+ M4xVSS_FramingStruct *aFramingCtx = M4OSA_NULL;
+ aFramingCtx
+ = (M4xVSS_FramingStruct*)M4OSA_malloc(sizeof(M4xVSS_FramingStruct), M4VS,
+ (M4OSA_Char*)"M4xVSS_internalDecodeGIF: Context of the framing effect");
+ if (aFramingCtx == M4OSA_NULL)
+ {
+ M4OSA_TRACE1_0("Allocation error in videoEditor_populateSettings");
+ }
+ aFramingCtx->pCurrent = M4OSA_NULL; /* Only used by the first element of the chain */
+ aFramingCtx->previousClipTime = -1;
+ aFramingCtx->FramingYuv = M4OSA_NULL;
+ aFramingCtx->FramingRgb = M4OSA_NULL;
+ aFramingCtx->topleft_x
+ = pContext->pEditSettings->Effects[j].xVSS.topleft_x;
+ aFramingCtx->topleft_y
+ = pContext->pEditSettings->Effects[j].xVSS.topleft_y;
+
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "OF u_width %d",
+ pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_width);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "OF u_height() %d",
+ pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_height);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "OF rgbType() %d",
+ pContext->pEditSettings->Effects[j].xVSS.rgbType);
+
+ aFramingCtx->width = pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_width;
+ aFramingCtx->height = pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_height;
+
+
+ result = M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(pContext->engineContext,
+ &(pContext->pEditSettings->Effects[j]),aFramingCtx,
+ pContext->pEditSettings->Effects[j].xVSS.framingScaledSize);
+ if (result != M4NO_ERROR)
+ {
+ M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect returned 0x%x", result);
+ }
+
+ //framing buffers are resized to fit the output video resolution.
+ pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_width =
+ aFramingCtx->FramingRgb->u_width;
+ pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_height =
+ aFramingCtx->FramingRgb->u_height;
+
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "A framing Context aFramingCtx->width = %d",
+ aFramingCtx->FramingRgb->u_width);
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "A framing Context aFramingCtx->height = %d",
+ aFramingCtx->FramingRgb->u_height);
+
+
+ width = pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_width;
+ height = pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_height;
+
+ pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_stride = width*3;
+
+ //for RGB888
+ pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->u_topleft = 0;
+
+ pContext->pEditSettings->Effects[j].xVSS.pFramingBuffer->pac_data =
+ (M4VIFI_UInt8 *)M4OSA_malloc(width*height*3,
+ 0x00,(M4OSA_Char *)"pac_data buffer");
+
+ M4OSA_memcpy((M4OSA_Int8 *)&pContext->pEditSettings->\
+ Effects[j].xVSS.pFramingBuffer->\
+ pac_data[0],(M4OSA_Int8 *)&aFramingCtx->FramingRgb->pac_data[0],(width*height*3));
+
+ //As of now rgb type is always rgb888, can be changed in future for rgb 565
+ pContext->pEditSettings->Effects[j].xVSS.rgbType =
+ (M4VSS3GPP_RGBType)M4VSS3GPP_kRGB888; //M4VSS3GPP_kRGB565;
+
+ if (aFramingCtx->FramingYuv != M4OSA_NULL )
+ {
+ if (aFramingCtx->FramingYuv->pac_data != M4OSA_NULL) {
+ M4OSA_free((M4OSA_MemAddr32)aFramingCtx->FramingYuv->pac_data);
+ aFramingCtx->FramingYuv->pac_data = M4OSA_NULL;
+ }
+ }
+ if (aFramingCtx->FramingYuv != M4OSA_NULL) {
+ M4OSA_free((M4OSA_MemAddr32)aFramingCtx->FramingYuv);
+ aFramingCtx->FramingYuv = M4OSA_NULL;
+ }
+ if (aFramingCtx->FramingRgb->pac_data != M4OSA_NULL) {
+ M4OSA_free((M4OSA_MemAddr32)aFramingCtx->FramingRgb->pac_data);
+ aFramingCtx->FramingRgb->pac_data = M4OSA_NULL;
+ }
+ if (aFramingCtx->FramingRgb != M4OSA_NULL) {
+ M4OSA_free((M4OSA_MemAddr32)aFramingCtx->FramingRgb);
+ aFramingCtx->FramingRgb = M4OSA_NULL;
+ }
+ if (aFramingCtx != M4OSA_NULL) {
+ M4OSA_free((M4OSA_MemAddr32)aFramingCtx);
+ aFramingCtx = M4OSA_NULL;
+ }
+ }
+ j++;
+ }
+
+ // Check if the edit settings could be retrieved.
+ M4OSA_TRACE1_1("total clips are = %d",pContext->pEditSettings->uiClipNumber);
+ for (i = 0; i < pContext->pEditSettings->uiClipNumber; i++) {
+ M4OSA_TRACE1_1("clip no = %d",i);
+ properties = pEnv->GetObjectArrayElement(propertiesClipsArray, i);
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == properties),
+ "not initialized");
+ getClipSetting(pEnv,properties, pContext->pEditSettings->pClipList[i]);
+ }
+
+ if (needToBeLoaded) {
+ // Log the edit settings.
+ VIDEOEDIT_LOG_EDIT_SETTINGS(pContext->pEditSettings);
+ }
+ }
+
+ if (audioSettingObject != M4OSA_NULL) {
+ jclass audioSettingClazz = pEnv->FindClass(AUDIO_SETTINGS_CLASS_NAME);
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == audioSettingClazz),
+ "not initialized");
+
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext->mAudioSettings),
+ "not initialized");
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"bRemoveOriginal","Z");
+ pContext->mAudioSettings->bRemoveOriginal = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("bRemoveOriginal = %d",pContext->mAudioSettings->bRemoveOriginal);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"channels","I");
+ pContext->mAudioSettings->uiNbChannels = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("uiNbChannels = %d",pContext->mAudioSettings->uiNbChannels);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"Fs","I");
+ pContext->mAudioSettings->uiSamplingFrequency = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("uiSamplingFrequency = %d",pContext->mAudioSettings->uiSamplingFrequency);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"ExtendedFs","I");
+ pContext->mAudioSettings->uiExtendedSamplingFrequency =
+ pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("uiExtendedSamplingFrequency = %d",
+ pContext->mAudioSettings->uiExtendedSamplingFrequency);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"startMs","J");
+ pContext->mAudioSettings->uiAddCts
+ = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("uiAddCts = %d",pContext->mAudioSettings->uiAddCts);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"volume","I");
+ pContext->mAudioSettings->uiAddVolume
+ = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("uiAddVolume = %d",pContext->mAudioSettings->uiAddVolume);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"loop","Z");
+ pContext->mAudioSettings->bLoop
+ = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("bLoop = %d",pContext->mAudioSettings->bLoop);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"beginCutTime","J");
+ pContext->mAudioSettings->beginCutMs
+ = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("begin cut time = %d",pContext->mAudioSettings->beginCutMs);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"endCutTime","J");
+ pContext->mAudioSettings->endCutMs
+ = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("end cut time = %d",pContext->mAudioSettings->endCutMs);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"fileType","I");
+ pContext->mAudioSettings->fileType
+ = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("fileType = %d",pContext->mAudioSettings->fileType);
+ fid = pEnv->GetFieldID(audioSettingClazz,"pFile","Ljava/lang/String;");
+ str = (jstring)pEnv->GetObjectField(audioSettingObject,fid);
+ pContext->mAudioSettings->pFile
+ = (M4OSA_Char*)pEnv->GetStringUTFChars(str, M4OSA_NULL);
+ M4OSA_TRACE1_1("file name = %s",pContext->mAudioSettings->pFile);
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEOEDITOR", "regenerateAudio() file name = %s",\
+ pContext->mAudioSettings->pFile);
+ fid = pEnv->GetFieldID(audioSettingClazz,"pcmFilePath","Ljava/lang/String;");
+ str = (jstring)pEnv->GetObjectField(audioSettingObject,fid);
+ pContext->mAudioSettings->pPCMFilePath =
+ (M4OSA_Char*)pEnv->GetStringUTFChars(str, M4OSA_NULL);
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEOEDITOR", "pPCMFilePath -- %s ",\
+ pContext->mAudioSettings->pPCMFilePath);
+ fid = pEnv->GetFieldID(engineClass,"mRegenerateAudio","Z");
+ bool regenerateAudio = pEnv->GetBooleanField(thiz,fid);
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEOEDITOR", "regenerateAudio -- %d ",\
+ regenerateAudio);
+ if (regenerateAudio) {
+ M4OSA_TRACE1_0("Calling Generate Audio now");
+ result = videoEditor_generateAudio(pEnv,
+ pContext,
+ (M4OSA_Char*)pContext->mAudioSettings->pFile,
+ (M4OSA_Char*)pContext->mAudioSettings->pPCMFilePath);
+ regenerateAudio = false;
+ pEnv->SetBooleanField(thiz,fid,regenerateAudio);
+ }
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEOEDITOR", "regenerateAudio()");
+
+ /* Audio mix and duck */
+ fid = pEnv->GetFieldID(audioSettingClazz,"ducking_threshold","I");
+ pContext->mAudioSettings->uiInDucking_threshold
+ = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("ducking threshold = %d",
+ pContext->mAudioSettings->uiInDucking_threshold);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"ducking_lowVolume","I");
+ pContext->mAudioSettings->uiInDucking_lowVolume
+ = pEnv->GetIntField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("ducking lowVolume = %d",
+ pContext->mAudioSettings->uiInDucking_lowVolume);
+
+ fid = pEnv->GetFieldID(audioSettingClazz,"bInDucking_enable","Z");
+ pContext->mAudioSettings->bInDucking_enable
+ = pEnv->GetBooleanField(audioSettingObject,fid);
+ M4OSA_TRACE1_1("ducking lowVolume = %d",
+ pContext->mAudioSettings->bInDucking_enable);
+ } else {
+ if (pContext->mAudioSettings != M4OSA_NULL) {
+ pContext->mAudioSettings->pFile = M4OSA_NULL;
+ pContext->mAudioSettings->bRemoveOriginal = 0;
+ pContext->mAudioSettings->uiNbChannels = 0;
+ pContext->mAudioSettings->uiSamplingFrequency = 0;
+ pContext->mAudioSettings->uiExtendedSamplingFrequency = 0;
+ pContext->mAudioSettings->uiAddCts = 0;
+ pContext->mAudioSettings->uiAddVolume = 0;
+ pContext->mAudioSettings->beginCutMs = 0;
+ pContext->mAudioSettings->endCutMs = 0;
+ pContext->mAudioSettings->fileType = 0;
+ pContext->mAudioSettings->bLoop = 0;
+ pContext->mAudioSettings->uiInDucking_lowVolume = 0;
+ pContext->mAudioSettings->bInDucking_enable = 0;
+ pContext->mAudioSettings->uiBTChannelCount = 0;
+ pContext->mAudioSettings->uiInDucking_threshold = 0;
+
+ fid = pEnv->GetFieldID(engineClass,"mRegenerateAudio","Z");
+ bool regenerateAudio = pEnv->GetBooleanField(thiz,fid);
+ if(!regenerateAudio) {
+ regenerateAudio = true;
+ pEnv->SetBooleanField(thiz,fid,regenerateAudio);
+ }
+ }
+ }
+ if (pContext->pEditSettings != NULL )
+ {
+ result = pContext->mPreviewController->loadEditSettings(pContext->pEditSettings,
+ pContext->mAudioSettings);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+
+ pContext->mPreviewController->setJniCallback((void*)pContext,
+ (jni_progress_callback_fct)jniPreviewProgressCallback);
+
+ j = 0;
+ while (j < nbOverlays)
+ {
+ if (pContext->pEditSettings->Effects[pOverlayIndex[j]].xVSS.pFramingBuffer->pac_data != \
+ M4OSA_NULL) {
+ M4OSA_free((M4OSA_MemAddr32)pContext->pEditSettings->\
+ Effects[pOverlayIndex[j]].xVSS.pFramingBuffer->pac_data);
+ pContext->pEditSettings->\
+ Effects[pOverlayIndex[j]].xVSS.pFramingBuffer->pac_data = M4OSA_NULL;
+ }
+ if (pContext->pEditSettings->Effects[pOverlayIndex[j]].xVSS.pFramingBuffer != M4OSA_NULL) {
+ M4OSA_free((M4OSA_MemAddr32)pContext->pEditSettings->\
+ Effects[pOverlayIndex[j]].xVSS.pFramingBuffer);
+ pContext->pEditSettings->Effects[pOverlayIndex[j]].xVSS.pFramingBuffer = M4OSA_NULL;
+ }
+ j++;
+ }
+ }
+ if (pOverlayIndex != M4OSA_NULL)
+ {
+ M4OSA_free((M4OSA_MemAddr32)pOverlayIndex);
+ pOverlayIndex = M4OSA_NULL;
+ }
+ return;
+}
+
+static void
+videoEditor_startPreview(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jobject mSurface,
+ jlong fromMs,
+ jlong toMs,
+ jint callbackInterval,
+ jboolean loop)
+{
+ bool needToBeLoaded = true;
+ M4OSA_ERR result = M4NO_ERROR;
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_startPreview()");
+
+ ManualEditContext* pContext = M4OSA_NULL;
+ // Get the context.
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&needToBeLoaded, pEnv, thiz);
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext->mAudioSettings),
+ "not initialized");
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext->mPreviewController),
+ "not initialized");
+
+ // Validate the mSurface parameter.
+ videoEditJava_checkAndThrowIllegalArgumentException(&needToBeLoaded, pEnv,
+ (NULL == mSurface),
+ "mSurface is null");
+
+ jclass surfaceClass = pEnv->FindClass("android/view/Surface");
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == surfaceClass),
+ "not initialized");
+ //jfieldID surface_native = pEnv->GetFieldID(surfaceClass, "mSurface", "I");
+ jfieldID surface_native
+ = pEnv->GetFieldID(surfaceClass, ANDROID_VIEW_SURFACE_JNI_ID, "I");
+
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == surface_native),
+ "not initialized");
+
+ Surface* const p = (Surface*)pEnv->GetIntField(mSurface, surface_native);
+
+ sp<Surface> previewSurface = sp<Surface>(p);
+
+ result = pContext->mPreviewController->setSurface(previewSurface);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "fromMs=%ld, toMs=%ld",
+ (M4OSA_UInt32)fromMs, (M4OSA_Int32)toMs);
+
+ result = pContext->mPreviewController->startPreview((M4OSA_UInt32)fromMs,
+ (M4OSA_Int32)toMs,
+ (M4OSA_UInt16)callbackInterval,
+ (M4OSA_Bool)loop);
+ videoEditJava_checkAndThrowRuntimeException(&needToBeLoaded, pEnv, (M4NO_ERROR != result), result);
+}
+
+
+static jobject
+videoEditor_getProperties(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jstring file)
+{
+ jobject object = M4OSA_NULL;
+ object = videoEditProp_getProperties(pEnv,thiz,file);
+
+ return object;
+
+}
+static int videoEditor_getPixels(
+ JNIEnv* env,
+ jobject thiz,
+ jstring path,
+ jintArray pixelArray,
+ M4OSA_UInt32 width,
+ M4OSA_UInt32 height,
+ M4OSA_UInt32 timeMS)
+{
+
+ M4OSA_ERR err = M4NO_ERROR;
+ M4OSA_Context mContext = M4OSA_NULL;
+ jint* m_dst32 = M4OSA_NULL;
+
+
+ // Add a text marker (the condition must always be true).
+ ADD_TEXT_MARKER_FUN(NULL != env)
+
+ const char *pString = env->GetStringUTFChars(path, NULL);
+ if (pString == M4OSA_NULL) {
+ if (env != NULL) {
+ jniThrowException(env, "java/lang/RuntimeException", "Input string null");
+ }
+ return M4ERR_ALLOC;
+ }
+
+ err = ThumbnailOpen(&mContext,(const M4OSA_Char*)pString, M4OSA_FALSE);
+ if (err != M4NO_ERROR || mContext == M4OSA_NULL) {
+ if (pString != NULL) {
+ env->ReleaseStringUTFChars(path, pString);
+ }
+ if (env != NULL) {
+ jniThrowException(env, "java/lang/RuntimeException", "ThumbnailOpen failed");
+ }
+ }
+
+ m_dst32 = env->GetIntArrayElements(pixelArray, NULL);
+
+ err = ThumbnailGetPixels32(mContext, (M4OSA_Int32 *)m_dst32, width,height,&timeMS);
+ if (err != M4NO_ERROR ) {
+ if (env != NULL) {
+ jniThrowException(env, "java/lang/RuntimeException",\
+ "ThumbnailGetPixels32 failed");
+ }
+ }
+ env->ReleaseIntArrayElements(pixelArray, m_dst32, 0);
+
+ ThumbnailClose(mContext);
+ if (pString != NULL) {
+ env->ReleaseStringUTFChars(path, pString);
+ }
+
+ return timeMS;
+}
+
+static int videoEditor_getPixelsList(
+ JNIEnv* env,
+ jobject thiz,
+ jstring path,
+ jintArray pixelArray,
+ M4OSA_UInt32 width,
+ M4OSA_UInt32 height,
+ M4OSA_UInt32 deltatimeMS,
+ M4OSA_UInt32 noOfThumbnails,
+ M4OSA_UInt32 startTime,
+ M4OSA_UInt32 endTime)
+{
+
+ M4OSA_ERR err;
+ M4OSA_Context mContext = M4OSA_NULL;
+ jint* m_dst32;
+ M4OSA_UInt32 timeMS = startTime;
+ int arrayOffset = 0;
+
+
+
+ // Add a text marker (the condition must always be true).
+ ADD_TEXT_MARKER_FUN(NULL != env)
+
+ const char *pString = env->GetStringUTFChars(path, NULL);
+ if (pString == M4OSA_NULL) {
+ if (env != NULL) {
+ jniThrowException(env, "java/lang/RuntimeException", "Input string null");
+ }
+ return M4ERR_ALLOC;
+ }
+
+ err = ThumbnailOpen(&mContext,(const M4OSA_Char*)pString, M4OSA_FALSE);
+ if (err != M4NO_ERROR || mContext == M4OSA_NULL) {
+ if (env != NULL) {
+ jniThrowException(env, "java/lang/RuntimeException", "ThumbnailOpen failed");
+ }
+ if (pString != NULL) {
+ env->ReleaseStringUTFChars(path, pString);
+ }
+ return err;
+ }
+
+ m_dst32 = env->GetIntArrayElements(pixelArray, NULL);
+
+ do {
+ err = ThumbnailGetPixels32(mContext, ((M4OSA_Int32 *)m_dst32 + arrayOffset),
+ width,height,&timeMS);
+ if (err != M4NO_ERROR ) {
+ if (env != NULL) {
+ jniThrowException(env, "java/lang/RuntimeException",\
+ "ThumbnailGetPixels32 failed");
+ }
+ return err;
+ }
+ timeMS += deltatimeMS;
+ arrayOffset += (width * height * 4);
+ noOfThumbnails--;
+ } while(noOfThumbnails > 0);
+
+ env->ReleaseIntArrayElements(pixelArray, m_dst32, 0);
+
+ ThumbnailClose(mContext);
+ if (pString != NULL) {
+ env->ReleaseStringUTFChars(path, pString);
+ }
+
+ return err;
+
+}
+
+static M4OSA_ERR
+videoEditor_toUTF8Fct(
+ M4OSA_Void* pBufferIn,
+ M4OSA_UInt8* pBufferOut,
+ M4OSA_UInt32* bufferOutSize)
+{
+ M4OSA_ERR result = M4NO_ERROR;
+ M4OSA_UInt32 length = 0;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_toUTF8Fct()");
+
+ // Determine the length of the input buffer.
+ if (M4OSA_NULL != pBufferIn)
+ {
+ length = M4OSA_chrLength((M4OSA_Char *)pBufferIn);
+ }
+
+ // Check if the output buffer is large enough to hold the input buffer.
+ if ((*bufferOutSize) > length)
+ {
+ // Check if the input buffer is not M4OSA_NULL.
+ if (M4OSA_NULL != pBufferIn)
+ {
+ // Copy the temp path, ignore the result.
+ M4OSA_chrNCopy((M4OSA_Char *)pBufferOut, (M4OSA_Char *)pBufferIn, length);
+ }
+ else
+ {
+ // Set the output buffer to an empty string.
+ (*(M4OSA_Char *)pBufferOut) = 0;
+ }
+ }
+ else
+ {
+ // The buffer is too small.
+ result = M4xVSSWAR_BUFFER_OUT_TOO_SMALL;
+ }
+
+ // Return the buffer output size.
+ (*bufferOutSize) = length + 1;
+
+ // Return the result.
+ return(result);
+}
+
+static M4OSA_ERR
+videoEditor_fromUTF8Fct(
+ M4OSA_UInt8* pBufferIn,
+ M4OSA_Void* pBufferOut,
+ M4OSA_UInt32* bufferOutSize)
+{
+ M4OSA_ERR result = M4NO_ERROR;
+ M4OSA_UInt32 length = 0;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_fromUTF8Fct()");
+
+ // Determine the length of the input buffer.
+ if (M4OSA_NULL != pBufferIn)
+ {
+ length = M4OSA_chrLength((M4OSA_Char *)pBufferIn);
+ }
+
+ // Check if the output buffer is large enough to hold the input buffer.
+ if ((*bufferOutSize) > length)
+ {
+ // Check if the input buffer is not M4OSA_NULL.
+ if (M4OSA_NULL != pBufferIn)
+ {
+ // Copy the temp path, ignore the result.
+ M4OSA_chrNCopy((M4OSA_Char *)pBufferOut, (M4OSA_Char *)pBufferIn, length);
+ }
+ else
+ {
+ // Set the output buffer to an empty string.
+ (*(M4OSA_Char *)pBufferOut) = 0;
+ }
+ }
+ else
+ {
+ // The buffer is too small.
+ result = M4xVSSWAR_BUFFER_OUT_TOO_SMALL;
+ }
+
+ // Return the buffer output size.
+ (*bufferOutSize) = length + 1;
+
+ // Return the result.
+ return(result);
+}
+
+static M4OSA_ERR
+videoEditor_getTextRgbBufferFct(
+ M4OSA_Void* pRenderingData,
+ M4OSA_Void* pTextBuffer,
+ M4OSA_UInt32 textBufferSize,
+ M4VIFI_ImagePlane** pOutputPlane)
+{
+ M4OSA_ERR result = M4NO_ERROR;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_getTextRgbBufferFct()");
+
+ // Return the result.
+ return(result);
+}
+
+static void
+videoEditor_callOnProgressUpdate(
+ ManualEditContext* pContext,
+ int task,
+ int progress)
+{
+ JNIEnv* pEnv = NULL;
+
+
+ // Attach the current thread.
+ pContext->pVM->AttachCurrentThread(&pEnv, NULL);
+
+
+ // Call the on completion callback.
+ pEnv->CallVoidMethod(pContext->engine, pContext->onProgressUpdateMethodId,
+ videoEditJava_getEngineCToJava(task), progress);
+
+
+ // Detach the current thread.
+ pContext->pVM->DetachCurrentThread();
+}
+
+static void
+videoEditor_freeContext(
+ JNIEnv* pEnv,
+ ManualEditContext** ppContext)
+{
+ ManualEditContext* pContext = M4OSA_NULL;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_freeContext");
+
+ // Set the context pointer.
+ pContext = (*ppContext);
+
+ // Check if the context was set.
+ if (M4OSA_NULL != pContext)
+ {
+ // Check if a global reference to the engine object was set.
+ if (NULL != pContext->engine)
+ {
+ // Free the global reference.
+ pEnv->DeleteGlobalRef(pContext->engine);
+ pContext->engine = NULL;
+ }
+
+ // Check if the temp path was set.
+ if (M4OSA_NULL != pContext->initParams.pTempPath)
+ {
+ // Free the memory allocated for the temp path.
+ videoEditOsal_free(pContext->initParams.pTempPath);
+ pContext->initParams.pTempPath = M4OSA_NULL;
+ }
+
+ // Check if the file writer was set.
+ if (M4OSA_NULL != pContext->initParams.pFileWritePtr)
+ {
+ // Free the memory allocated for the file writer.
+ videoEditOsal_free(pContext->initParams.pFileWritePtr);
+ pContext->initParams.pFileWritePtr = M4OSA_NULL;
+ }
+
+ // Check if the file reader was set.
+ if (M4OSA_NULL != pContext->initParams.pFileReadPtr)
+ {
+ // Free the memory allocated for the file reader.
+ videoEditOsal_free(pContext->initParams.pFileReadPtr);
+ pContext->initParams.pFileReadPtr = M4OSA_NULL;
+ }
+
+ // Free the memory allocated for the context.
+ videoEditOsal_free(pContext);
+ pContext = M4OSA_NULL;
+
+ // Reset the context pointer.
+ (*ppContext) = M4OSA_NULL;
+ }
+}
+
+static jobject
+videoEditor_getVersion(
+ JNIEnv* pEnv,
+ jobject thiz)
+{
+ bool isSuccessful = true;
+ jobject version = NULL;
+ M4_VersionInfo versionInfo = {0, 0, 0, 0};
+ M4OSA_ERR result = M4NO_ERROR;
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_getVersion()");
+
+ versionInfo.m_structSize = sizeof(versionInfo);
+ versionInfo.m_major = VIDEOEDITOR_VERSION_MAJOR;
+ versionInfo.m_minor = VIDEOEDITOR_VERSION_MINOR;
+ versionInfo.m_revision = VIDEOEDITOR_VERSION_REVISION;
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_getVersion() major %d,\
+ minor %d, revision %d", versionInfo.m_major, versionInfo.m_minor, versionInfo.m_revision);
+
+ // Create a version object.
+ videoEditClasses_createVersion(&isSuccessful, pEnv, &versionInfo, &version);
+
+ // Return the version object.
+ return(version);
+}
+
+static void
+videoEditor_init(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jstring tempPath,
+ jstring libraryPath)
+{
+ bool initialized = true;
+ ManualEditContext* pContext = M4OSA_NULL;
+ VideoEditJava_EngineMethodIds methodIds = {NULL};
+ M4OSA_Char* pLibraryPath = M4OSA_NULL;
+ M4OSA_Char* pTextRendererPath = M4OSA_NULL;
+ M4OSA_UInt32 textRendererPathLength = 0;
+ M4OSA_ERR result = M4NO_ERROR;
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_init()");
+
+ // Add a text marker (the condition must always be true).
+ ADD_TEXT_MARKER_FUN(NULL != pEnv)
+
+ // Get the context.
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&initialized, pEnv, thiz);
+
+ // Get the engine method ids.
+ videoEditJava_getEngineMethodIds(&initialized, pEnv, &methodIds);
+
+ // Validate the tempPath parameter.
+ videoEditJava_checkAndThrowIllegalArgumentException(&initialized, pEnv,
+ (NULL == tempPath),
+ "tempPath is null");
+
+ // Make sure that the context was not set already.
+ videoEditJava_checkAndThrowIllegalStateException(&initialized, pEnv,
+ (M4OSA_NULL != pContext),
+ "already initialized");
+
+ // Check if the initialization succeeded (required because of dereferencing of psContext,
+ // and freeing when initialization fails).
+ if (initialized)
+ {
+ // Allocate a new context.
+ pContext = new ManualEditContext;
+
+ // Check if the initialization succeeded (required because of dereferencing of psContext).
+ //if (initialized)
+ if (pContext != NULL)
+ {
+ // Set the state to not initialized.
+ pContext->state = ManualEditState_NOT_INITIALIZED;
+
+ // Allocate a file read pointer structure.
+ pContext->initParams.pFileReadPtr =
+ (M4OSA_FileReadPointer*)videoEditOsal_alloc(&initialized, pEnv,
+ sizeof(M4OSA_FileReadPointer), "FileReadPointer");
+
+ // Allocate a file write pointer structure.
+ pContext->initParams.pFileWritePtr =
+ (M4OSA_FileWriterPointer*)videoEditOsal_alloc(&initialized, pEnv,
+ sizeof(M4OSA_FileWriterPointer), "FileWriterPointer");
+
+ // Get the temp path.
+ M4OSA_Char* tmpString =
+ (M4OSA_Char *)videoEditJava_getString(&initialized, pEnv, tempPath,
+ NULL, M4OSA_NULL);
+ pContext->initParams.pTempPath = (M4OSA_Char *)
+ M4OSA_malloc(M4OSA_chrLength(tmpString) + 1, 0x0,
+ (M4OSA_Char *)"tempPath");
+ //initialize the first char. so that strcat works.
+ M4OSA_Char *ptmpChar = (M4OSA_Char*)pContext->initParams.pTempPath;
+ ptmpChar[0] = 0x00;
+ M4OSA_chrNCat((M4OSA_Char*)pContext->initParams.pTempPath, tmpString, M4OSA_chrLength(tmpString));
+ M4OSA_chrNCat((M4OSA_Char*)pContext->initParams.pTempPath, (M4OSA_Char*)"/", 1);
+ M4OSA_free((M4OSA_MemAddr32)tmpString);
+ }
+
+ // Check if the initialization succeeded
+ // (required because of dereferencing of pContext, pFileReadPtr and pFileWritePtr).
+ if (initialized)
+ {
+
+ // Initialize the OSAL file system function pointers.
+ videoEditOsal_getFilePointers(pContext->initParams.pFileReadPtr ,
+ pContext->initParams.pFileWritePtr);
+
+ // Set the UTF8 conversion functions.
+ pContext->initParams.pConvToUTF8Fct = videoEditor_toUTF8Fct;
+ pContext->initParams.pConvFromUTF8Fct = videoEditor_fromUTF8Fct;
+
+ // Set the callback method ids.
+ pContext->onProgressUpdateMethodId = methodIds.onProgressUpdate;
+
+ // Set the virtual machine.
+ pEnv->GetJavaVM(&(pContext->pVM));
+
+ // Create a global reference to the engine object.
+ pContext->engine = pEnv->NewGlobalRef(thiz);
+
+ // Check if the global reference could be created.
+ videoEditJava_checkAndThrowRuntimeException(&initialized, pEnv,
+ (NULL == pContext->engine), M4NO_ERROR);
+ }
+
+ // Check if the initialization succeeded (required because of dereferencing of pContext).
+ if (initialized)
+ {
+ // Log the API call.
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "M4xVSS_Init()");
+
+ // Initialize the visual studio library.
+ result = M4xVSS_Init(&pContext->engineContext, &pContext->initParams);
+
+ // Log the result.
+ VIDEOEDIT_LOG_RESULT(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ videoEditOsal_getResultString(result));
+
+ // Check if the library could be initialized.
+ videoEditJava_checkAndThrowRuntimeException(&initialized, pEnv,
+ (M4NO_ERROR != result), result);
+ }
+
+ if(initialized)
+ {
+ pContext->mPreviewController = new VideoEditorPreviewController();
+ videoEditJava_checkAndThrowIllegalStateException(&initialized, pEnv,
+ (M4OSA_NULL == pContext->mPreviewController),
+ "not initialized");
+ pContext->mAudioSettings =
+ (M4xVSS_AudioMixingSettings *)
+ M4OSA_malloc(sizeof(M4xVSS_AudioMixingSettings),0x0,
+ (M4OSA_Char *)"mAudioSettings");
+ videoEditJava_checkAndThrowIllegalStateException(&initialized, pEnv,
+ (M4OSA_NULL == pContext->mAudioSettings),
+ "not initialized");
+ pContext->mAudioSettings->pFile = M4OSA_NULL;
+ pContext->mAudioSettings->bRemoveOriginal = 0;
+ pContext->mAudioSettings->uiNbChannels = 0;
+ pContext->mAudioSettings->uiSamplingFrequency = 0;
+ pContext->mAudioSettings->uiExtendedSamplingFrequency = 0;
+ pContext->mAudioSettings->uiAddCts = 0;
+ pContext->mAudioSettings->uiAddVolume = 0;
+ pContext->mAudioSettings->beginCutMs = 0;
+ pContext->mAudioSettings->endCutMs = 0;
+ pContext->mAudioSettings->fileType = 0;
+ pContext->mAudioSettings->bLoop = 0;
+ pContext->mAudioSettings->uiInDucking_lowVolume = 0;
+ pContext->mAudioSettings->bInDucking_enable = 0;
+ pContext->mAudioSettings->uiBTChannelCount = 0;
+ pContext->mAudioSettings->uiInDucking_threshold = 0;
+ }
+ // Check if the library could be initialized.
+ if (initialized)
+ {
+ // Set the state to initialized.
+ pContext->state = ManualEditState_INITIALIZED;
+ }
+
+ // Set the context.
+ videoEditClasses_setContext(&initialized, pEnv, thiz, (void* )pContext);
+ pLibraryPath = M4OSA_NULL;
+
+ pContext->pEditSettings = M4OSA_NULL;
+ // Cleanup if anything went wrong during initialization.
+ if (!initialized)
+ {
+ // Free the context.
+ videoEditor_freeContext(pEnv, &pContext);
+ }
+ }
+}
+
+/*+ PROGRESS CB */
+static
+M4OSA_ERR videoEditor_processClip(
+ JNIEnv* pEnv,
+ jobject thiz,
+ int unuseditemID) {
+
+ bool loaded = true;
+ ManualEditContext* pContext = NULL;
+ M4OSA_UInt8 progress = 0;
+ M4OSA_UInt8 progressBase = 0;
+ M4OSA_UInt8 lastProgress = 0;
+ M4OSA_ERR result = M4NO_ERROR;
+
+ // Get the context.
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&loaded, pEnv, thiz);
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&loaded, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+
+ // We start in Analyzing state
+ pContext->state = ManualEditState_ANALYZING;
+ M4OSA_ERR completionResult = M4VSS3GPP_WAR_ANALYZING_DONE;
+ ManualEditState completionState = ManualEditState_OPENED;
+ ManualEditState errorState = ManualEditState_ANALYZING_ERROR;
+
+ // While analyzing progress goes from 0 to 50
+ progressBase = 0;
+
+ // Set the text rendering function.
+ if (M4OSA_NULL != pContext->pTextRendererFunction)
+ {
+ // Use the text renderer function in the library.
+ pContext->pEditSettings->xVSS.pTextRenderingFct = pContext->pTextRendererFunction;
+ }
+ else
+ {
+ // Use the internal text renderer function.
+ pContext->pEditSettings->xVSS.pTextRenderingFct = videoEditor_getTextRgbBufferFct;
+ }
+
+ // Send the command.
+ LOGV("videoEditor_processClip ITEM %d Calling M4xVSS_SendCommand()", unuseditemID);
+ result = M4xVSS_SendCommand(pContext->engineContext, pContext->pEditSettings);
+ LOGV("videoEditor_processClip ITEM %d M4xVSS_SendCommand() returned 0x%x",
+ unuseditemID, (unsigned int) result);
+
+ // Remove warnings indications (we only care about errors here)
+ if ((result == M4VSS3GPP_WAR_TRANSCODING_NECESSARY)
+ || (result == M4VSS3GPP_WAR_OUTPUTFILESIZE_EXCEED)) {
+ result = M4NO_ERROR;
+ }
+
+ // Send the first progress indication (=0)
+ LOGV("VERY FIRST PROGRESS videoEditor_processClip ITEM %d Progress indication %d",
+ unuseditemID, progress);
+ pEnv->CallVoidMethod(pContext->engine, pContext->onProgressUpdateMethodId,
+ unuseditemID, progress);
+
+ // Check if a task is being performed.
+ // ??? ADD STOPPING MECHANISM
+ LOGV("videoEditor_processClip Entering processing loop");
+ while((result == M4NO_ERROR)
+ &&(pContext->state!=ManualEditState_SAVED)
+ &&(pContext->state!=ManualEditState_STOPPING)) {
+
+ // Perform the next processing step.
+ //LOGV("LVME_processClip Entering M4xVSS_Step()");
+ result = M4xVSS_Step(pContext->engineContext, &progress);
+ //LOGV("LVME_processClip M4xVSS_Step() returned 0x%x", (unsigned int)result);
+
+ // Log the the 1 % .. 100 % progress after processing.
+ progress = progressBase + progress/2;
+ if (progress != lastProgress)
+ {
+ // Send a progress notification.
+ LOGV("videoEditor_processClip ITEM %d Progress indication %d",
+ unuseditemID, progress);
+ pEnv->CallVoidMethod(pContext->engine,
+ pContext->onProgressUpdateMethodId,
+ unuseditemID, progress);
+ lastProgress = progress;
+ }
+
+ // Check if processing has been completed.
+ if (result == completionResult)
+ {
+ // Set the state to the completions state.
+ pContext->state = completionState;
+ LOGV("videoEditor_processClip ITEM %d STATE changed to %d",
+ unuseditemID, pContext->state);
+
+ // Reset progress indication, as we switch to next state
+ lastProgress = 0;
+
+ // Reset error code, as we start a new round of processing
+ result = M4NO_ERROR;
+
+ // Check if we are analyzing input
+ if (pContext->state == ManualEditState_OPENED) {
+ // File is opened, we must start saving it
+ LOGV("videoEditor_processClip Calling M4xVSS_SaveStart()");
+ result = M4xVSS_SaveStart(pContext->engineContext,
+ (M4OSA_Char*)pContext->pEditSettings->pOutputFile,
+ (M4OSA_UInt32)pContext->pEditSettings->uiOutputPathSize);
+ LOGV("videoEditor_processClip ITEM %d SaveStart() returned 0x%x",
+ unuseditemID, (unsigned int) result);
+
+ // Set the state to saving.
+ pContext->state = ManualEditState_SAVING;
+ completionState = ManualEditState_SAVED;
+ completionResult = M4VSS3GPP_WAR_SAVING_DONE;
+ errorState = ManualEditState_SAVING_ERROR;
+
+ // While saving progress goes from 50 to 100
+ progressBase = 50;
+ }
+ // Check if we encoding is ongoing
+ else if (pContext->state == ManualEditState_SAVED) {
+ if (progress != 100) {
+ // Send a progress notification.
+ progress = 100;
+ LOGI("videoEditor_processClip ITEM %d Last progress indication %d",
+ unuseditemID, progress);
+ pEnv->CallVoidMethod(pContext->engine,
+ pContext->onProgressUpdateMethodId,
+ unuseditemID, progress);
+ }
+
+ // Stop the encoding.
+ LOGV("videoEditor_processClip Calling M4xVSS_SaveStop()");
+ result = M4xVSS_SaveStop(pContext->engineContext);
+ LOGV("videoEditor_processClip M4xVSS_SaveStop() returned 0x%x", result);
+ }
+ // Other states are unexpected
+ else {
+ result = M4ERR_STATE;
+ LOGE("videoEditor_processClip ITEM %d State ERROR 0x%x",
+ unuseditemID, (unsigned int) result);
+ }
+ }
+
+ // Check if an error occurred.
+ if (result != M4NO_ERROR)
+ {
+ // Set the state to the error state.
+ pContext->state = errorState;
+
+ // Log the result.
+ LOGE("videoEditor_processClip ITEM %d Processing ERROR 0x%x",
+ unuseditemID, (unsigned int) result);
+ }
+ }
+
+ // Return the error result
+ LOGE("videoEditor_processClip ITEM %d END 0x%x", unuseditemID, (unsigned int) result);
+ return result;
+}
+/*+ PROGRESS CB */
+
+static int
+videoEditor_generateClip(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jobject settings) {
+ bool loaded = true;
+ ManualEditContext* pContext = M4OSA_NULL;
+ M4OSA_ERR result = M4NO_ERROR;
+
+ LOGV("videoEditor_generateClip START");
+
+ // Get the context.
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&loaded, pEnv, thiz);
+
+ Mutex::Autolock autoLock(pContext->mLock);
+
+ // Validate the settings parameter.
+ videoEditJava_checkAndThrowIllegalArgumentException(&loaded, pEnv,
+ (NULL == settings),
+ "settings is null");
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&loaded, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+
+ // Load the clip settings
+ LOGV("videoEditor_generateClip Calling videoEditor_loadSettings");
+ videoEditor_loadSettings(pEnv, thiz, settings);
+ LOGV("videoEditor_generateClip videoEditor_loadSettings returned");
+
+ // Generate the clip
+ LOGV("videoEditor_generateClip Calling LVME_processClip");
+ result = videoEditor_processClip(pEnv, thiz, 0 /*item id is unused*/);
+ LOGV("videoEditor_generateClip videoEditor_processClip returned 0x%x", result);
+
+ // Free up memory (whatever the result)
+ videoEditor_unloadSettings(pEnv, thiz);
+ //LVME_release(pEnv, thiz);
+
+ LOGV("videoEditor_generateClip END 0x%x", (unsigned int) result);
+ return result;
+}
+
+static void
+videoEditor_loadSettings(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jobject settings)
+{
+ bool needToBeLoaded = true;
+ ManualEditContext* pContext = M4OSA_NULL;
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_loadSettings()");
+
+ // Add a code marker (the condition must always be true).
+ ADD_CODE_MARKER_FUN(NULL != pEnv)
+
+ // Get the context.
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&needToBeLoaded,
+ pEnv, thiz);
+
+ // Validate the settings parameter.
+ videoEditJava_checkAndThrowIllegalArgumentException(&needToBeLoaded, pEnv,
+ (NULL == settings),
+ "settings is null");
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+
+ // Check if the context is valid (required because the context is dereferenced).
+ if (needToBeLoaded)
+ {
+ // Make sure that we are in a correct state.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeLoaded, pEnv,
+ (pContext->state != ManualEditState_INITIALIZED),
+ "settings already loaded");
+
+ // Retrieve the edit settings.
+ if(pContext->pEditSettings != M4OSA_NULL) {
+ videoEditClasses_freeEditSettings(&pContext->pEditSettings);
+ pContext->pEditSettings = M4OSA_NULL;
+ }
+ videoEditClasses_getEditSettings(&needToBeLoaded, pEnv, settings,
+ &pContext->pEditSettings,true);
+ }
+
+ // Check if the edit settings could be retrieved.
+ if (needToBeLoaded)
+ {
+ // Log the edit settings.
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "inside load settings");
+ VIDEOEDIT_LOG_EDIT_SETTINGS(pContext->pEditSettings);
+ }
+ LOGV("videoEditor_loadSettings END");
+}
+
+
+
+static void
+videoEditor_unloadSettings(
+ JNIEnv* pEnv,
+ jobject thiz)
+{
+ bool needToBeUnLoaded = true;
+ ManualEditContext* pContext = M4OSA_NULL;
+ M4OSA_ERR result = M4NO_ERROR;
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_unloadSettings()");
+
+ // Get the context.
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&needToBeUnLoaded, pEnv, thiz);
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeUnLoaded, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+
+ // Check if the context is valid (required because the context is dereferenced).
+ if (needToBeUnLoaded)
+ {
+ LOGV("videoEditor_unloadSettings state %d", pContext->state);
+ // Make sure that we are in a correct state.
+ videoEditJava_checkAndThrowIllegalStateException(&needToBeUnLoaded, pEnv,
+ ((pContext->state != ManualEditState_ANALYZING ) &&
+ (pContext->state != ManualEditState_ANALYZING_ERROR) &&
+ (pContext->state != ManualEditState_OPENED ) &&
+ (pContext->state != ManualEditState_SAVING_ERROR ) &&
+ (pContext->state != ManualEditState_SAVED ) &&
+ (pContext->state != ManualEditState_STOPPING ) ),
+ "videoEditor_unloadSettings no load settings in progress");
+ }
+
+ // Check if we are in a correct state.
+ if (needToBeUnLoaded)
+ {
+ // Check if the thread could be stopped.
+ if (needToBeUnLoaded)
+ {
+ // Close the command.
+ LOGV("videoEditor_unloadSettings Calling M4xVSS_CloseCommand()");
+ result = M4xVSS_CloseCommand(pContext->engineContext);
+ LOGV("videoEditor_unloadSettings M4xVSS_CloseCommand() returned 0x%x",
+ (unsigned int)result);
+
+ // Check if the command could be closed.
+ videoEditJava_checkAndThrowRuntimeException(&needToBeUnLoaded, pEnv,
+ (M4NO_ERROR != result), result);
+ }
+
+ // Check if the command could be closed.
+ if (needToBeUnLoaded)
+ {
+ // Free the edit settings.
+ //videoEditClasses_freeEditSettings(&pContext->pEditSettings);
+
+ // Reset the thread result.
+ pContext->threadResult = M4NO_ERROR;
+
+ // Reset the thread progress.
+ pContext->threadProgress = 0;
+
+ // Set the state to initialized.
+ pContext->state = ManualEditState_INITIALIZED;
+ }
+ }
+}
+
+static void
+videoEditor_stopEncoding(
+ JNIEnv* pEnv,
+ jobject thiz)
+{
+ bool stopped = true;
+ ManualEditContext* pContext = M4OSA_NULL;
+ M4OSA_ERR result = M4NO_ERROR;
+
+ LOGV("videoEditor_stopEncoding START");
+
+ // Get the context.
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&stopped, pEnv, thiz);
+
+ // Change state and get Lock
+ // This will ensure the generateClip function exits
+ pContext->state = ManualEditState_STOPPING;
+ Mutex::Autolock autoLock(pContext->mLock);
+
+ // Make sure that the context was set.
+ videoEditJava_checkAndThrowIllegalStateException(&stopped, pEnv,
+ (M4OSA_NULL == pContext),
+ "not initialized");
+
+ if (stopped) {
+
+ // Check if the command should be closed.
+ if (pContext->state != ManualEditState_INITIALIZED)
+ {
+ // Close the command.
+ LOGV("videoEditor_stopEncoding Calling M4xVSS_CloseCommand()");
+ result = M4xVSS_CloseCommand(pContext->engineContext);
+ LOGV("videoEditor_stopEncoding M4xVSS_CloseCommand() returned 0x%x",
+ (unsigned int)result);
+ }
+
+ // Check if the command could be closed.
+ videoEditJava_checkAndThrowRuntimeException(&stopped, pEnv,
+ (M4NO_ERROR != result), result);
+
+ // Free the edit settings.
+ videoEditClasses_freeEditSettings(&pContext->pEditSettings);
+
+ // Set the state to initialized.
+ pContext->state = ManualEditState_INITIALIZED;
+ }
+
+}
+
+static void
+videoEditor_release(
+ JNIEnv* pEnv,
+ jobject thiz)
+{
+ bool released = true;
+ ManualEditContext* pContext = M4OSA_NULL;
+ M4OSA_ERR result = M4NO_ERROR;
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "videoEditor_release()");
+
+ // Add a text marker (the condition must always be true).
+ ADD_TEXT_MARKER_FUN(NULL != pEnv)
+
+ // Get the context.
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&released, pEnv, thiz);
+
+ // If context is not set, return (we consider release already happened)
+ if (pContext == NULL) {
+ LOGV("videoEditor_release Nothing to do, context is aleady NULL");
+ return;
+ }
+
+
+ // Check if the context is valid (required because the context is dereferenced).
+ if (released)
+ {
+ if (pContext->state != ManualEditState_INITIALIZED)
+ {
+ // Change state and get Lock
+ // This will ensure the generateClip function exits if it is running
+ pContext->state = ManualEditState_STOPPING;
+ Mutex::Autolock autoLock(pContext->mLock);
+ }
+
+ // Reset the context.
+ videoEditClasses_setContext(&released, pEnv, thiz, (void *)M4OSA_NULL);
+
+ // Check if the command should be closed.
+ if (pContext->state != ManualEditState_INITIALIZED)
+ {
+ // Close the command.
+ LOGV("videoEditor_release Calling M4xVSS_CloseCommand() state =%d",
+ pContext->state);
+ result = M4xVSS_CloseCommand(pContext->engineContext);
+ LOGV("videoEditor_release M4xVSS_CloseCommand() returned 0x%x",
+ (unsigned int)result);
+
+ // Check if the command could be closed.
+ videoEditJava_checkAndThrowRuntimeException(&released, pEnv,
+ (M4NO_ERROR != result), result);
+ }
+
+ // Cleanup the engine.
+ LOGV("videoEditor_release Calling M4xVSS_CleanUp()");
+ result = M4xVSS_CleanUp(pContext->engineContext);
+ LOGV("videoEditor_release M4xVSS_CleanUp() returned 0x%x", (unsigned int)result);
+
+ // Check if the cleanup succeeded.
+ videoEditJava_checkAndThrowRuntimeException(&released, pEnv,
+ (M4NO_ERROR != result), result);
+
+ // Free the edit settings.
+ videoEditClasses_freeEditSettings(&pContext->pEditSettings);
+ pContext->pEditSettings = M4OSA_NULL;
+
+
+ if(pContext->mPreviewController != M4OSA_NULL)
+ {
+ delete pContext->mPreviewController;
+ pContext->mPreviewController = M4OSA_NULL;
+ }
+
+ // Free the context.
+ if(pContext->mAudioSettings != M4OSA_NULL)
+ {
+ M4OSA_free((M4OSA_MemAddr32)pContext->mAudioSettings);
+ pContext->mAudioSettings = M4OSA_NULL;
+ }
+ videoEditor_freeContext(pEnv, &pContext);
+ }
+}
+
+static int
+videoEditor_registerManualEditMethods(
+ JNIEnv* pEnv)
+{
+ int result = -1;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_registerManualEditMethods()");
+
+ // Look up the engine class
+ jclass engineClazz = pEnv->FindClass(MANUAL_EDIT_ENGINE_CLASS_NAME);
+
+ // Clear any resulting exceptions.
+ pEnv->ExceptionClear();
+
+ // Check if the engine class was found.
+ if (NULL != engineClazz)
+ {
+ // Register all the methods.
+ if (pEnv->RegisterNatives(engineClazz, gManualEditMethods,
+ sizeof(gManualEditMethods) / sizeof(gManualEditMethods[0])) == JNI_OK)
+ {
+ // Success.
+ result = 0;
+ }
+ }
+
+ // Return the result.
+ return(result);
+}
+
+/*******Audio Graph*******/
+
+static M4OSA_UInt32 getDecibelSound(M4OSA_UInt32 value)
+{
+ int dbSound = 1;
+
+ if (value == 0) return 0;
+
+ if (value > 0x4000 && value <= 0x8000) // 32768
+ dbSound = 90;
+ else if (value > 0x2000 && value <= 0x4000) // 16384
+ dbSound = 84;
+ else if (value > 0x1000 && value <= 0x2000) // 8192
+ dbSound = 78;
+ else if (value > 0x0800 && value <= 0x1000) // 4028
+ dbSound = 72;
+ else if (value > 0x0400 && value <= 0x0800) // 2048
+ dbSound = 66;
+ else if (value > 0x0200 && value <= 0x0400) // 1024
+ dbSound = 60;
+ else if (value > 0x0100 && value <= 0x0200) // 512
+ dbSound = 54;
+ else if (value > 0x0080 && value <= 0x0100) // 256
+ dbSound = 48;
+ else if (value > 0x0040 && value <= 0x0080) // 128
+ dbSound = 42;
+ else if (value > 0x0020 && value <= 0x0040) // 64
+ dbSound = 36;
+ else if (value > 0x0010 && value <= 0x0020) // 32
+ dbSound = 30;
+ else if (value > 0x0008 && value <= 0x0010) //16
+ dbSound = 24;
+ else if (value > 0x0007 && value <= 0x0008) //8
+ dbSound = 24;
+ else if (value > 0x0003 && value <= 0x0007) // 4
+ dbSound = 18;
+ else if (value > 0x0001 && value <= 0x0003) //2
+ dbSound = 12;
+ else if (value > 0x000 && value == 0x0001) // 1
+ dbSound = 6;
+ else
+ dbSound = 0;
+
+ return dbSound;
+}
+
+typedef struct
+{
+ M4OSA_UInt8 *m_dataAddress;
+ M4OSA_UInt32 m_bufferSize;
+} M4AM_Buffer;
+
+
+M4OSA_UInt8 logLookUp[256]{
+0,120,137,146,154,159,163,167,171,173,176,178,181,182,184,186,188,189,190,192,193,
+194,195,196,198,199,199,200,201,202,203,204,205,205,206,207,207,208,209,209,210,
+211,211,212,212,213,213,214,215,215,216,216,216,217,217,218,218,219,219,220,220,
+220,221,221,222,222,222,223,223,223,224,224,224,225,225,225,226,226,226,227,227,
+227,228,228,228,229,229,229,229,230,230,230,230,231,231,231,232,232,232,232,233,
+233,233,233,233,234,234,234,234,235,235,235,235,236,236,236,236,236,237,237,237,
+237,237,238,238,238,238,238,239,239,239,239,239,240,240,240,240,240,240,241,241,
+241,241,241,241,242,242,242,242,242,242,243,243,243,243,243,243,244,244,244,244,
+244,244,245,245,245,245,245,245,245,246,246,246,246,246,246,246,247,247,247,247,
+247,247,247,247,248,248,248,248,248,248,248,249,249,249,249,249,249,249,249,250,
+250,250,250,250,250,250,250,250,251,251,251,251,251,251,251,251,252,252,252,252,
+252,252,252,252,252,253,253,253,253,253,253,253,253,253,253,254,254,254,254,254,
+254,254,254,254,255,255,255,255,255,255,255,255,255,255,255};
+
+M4OSA_ERR M4MA_generateAudioGraphFile(JNIEnv* pEnv, M4OSA_Char* pInputFileURL,
+ M4OSA_Char* pOutFileURL,
+ M4OSA_UInt32 samplesPerValue,
+ M4OSA_UInt32 channels,
+ M4OSA_UInt32 frameDuration,
+ ManualEditContext* pContext)
+{
+ M4OSA_ERR err;
+ M4OSA_Context outFileHandle = M4OSA_NULL;
+ M4OSA_Context inputFileHandle = M4OSA_NULL;
+ M4AM_Buffer bufferIn = {0, 0};
+ M4OSA_UInt32 peakVolumeDbValue = 0;
+ M4OSA_UInt32 samplesCountInBytes= 0 , numBytesToRead = 0, index = 0;
+ M4OSA_UInt32 writeCount = 0, samplesCountBigEndian = 0, volumeValuesCount = 0;
+ M4OSA_Int32 seekPos = 0;
+ M4OSA_UInt32 fileSize = 0;
+ M4OSA_UInt32 totalBytesRead = 0;
+ M4OSA_UInt32 prevProgress = 0;
+ bool threadStarted = true;
+
+ int dbValue = 0;
+ M4OSA_Int16 *ptr16 ;
+
+ jclass engineClass = pEnv->FindClass(MANUAL_EDIT_ENGINE_CLASS_NAME);
+ videoEditJava_checkAndThrowIllegalStateException(&threadStarted, pEnv,
+ (M4OSA_NULL == engineClass),
+ "not initialized");
+
+ /* register the call back function pointer */
+ pContext->onAudioGraphProgressUpdateMethodId =
+ pEnv->GetMethodID(engineClass, "onAudioGraphExtractProgressUpdate", "(IZ)V");
+
+
+ /* ENTER */
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "ENTER - M4MA_generateAudioGraphFile");
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "Audio Graph samplesPerValue %d channels %d", samplesPerValue, channels);
+
+ /******************************************************************************
+ OPEN INPUT AND OUTPUT FILES
+ *******************************************************************************/
+ err = M4OSA_fileReadOpen (&inputFileHandle, pInputFileURL, M4OSA_kFileRead);
+ if (inputFileHandle == M4OSA_NULL) {
+ VIDEOEDIT_LOG_ERROR(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "M4MA_generateAudioGraphFile: Cannot open input file 0x%x", err);
+ return err;
+ }
+
+ /* get the file size for progress */
+ err = M4OSA_fileReadGetOption(inputFileHandle, M4OSA_kFileReadGetFileSize,
+ (M4OSA_Void**)&fileSize);
+ if ( err != M4NO_ERROR) {
+ //LVMEL_LOG_ERROR("M4MA_generateAudioGraphFile : File write failed \n");
+ jniThrowException(pEnv, "java/lang/IOException", "file size get option failed");
+ //return -1;
+ }
+
+ err = M4OSA_fileWriteOpen (&outFileHandle,(M4OSA_Char*) pOutFileURL,
+ M4OSA_kFileCreate | M4OSA_kFileWrite);
+ if (outFileHandle == M4OSA_NULL) {
+ if (inputFileHandle != NULL)
+ {
+ M4OSA_fileReadClose(inputFileHandle);
+ }
+ return err;
+ }
+
+ /******************************************************************************
+ PROCESS THE SAMPLES
+ *******************************************************************************/
+ samplesCountInBytes = (samplesPerValue * sizeof(M4OSA_UInt16) * channels);
+
+ bufferIn.m_dataAddress = (M4OSA_UInt8*)M4OSA_malloc(samplesCountInBytes*sizeof(M4OSA_UInt16), 0,
+ (M4OSA_Char*)"AudioGraph" );
+ if ( bufferIn.m_dataAddress != M4OSA_NULL) {
+ bufferIn.m_bufferSize = samplesCountInBytes*sizeof(M4OSA_UInt16);
+ } else {
+ VIDEOEDIT_LOG_ERROR(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "M4MA_generateAudioGraphFile: Malloc failed for bufferIn.m_dataAddress 0x%x",\
+ M4ERR_ALLOC);
+ return M4ERR_ALLOC;
+ }
+ /* sample to be converted to BIG endian ; store the frame duration */
+ samplesCountBigEndian = ((frameDuration>>24)&0xff) | // move byte 3 to byte 0
+ ((frameDuration<<8)&0xff0000) | // move byte 1 to byte 2
+ ((frameDuration>>8)&0xff00) | // move byte 2 to byte 1
+ ((frameDuration<<24)&0xff000000); // byte 0 to byte 3
+
+ /* write the samples per value supplied to out file */
+ err = M4OSA_fileWriteData (outFileHandle, (M4OSA_MemAddr8)&samplesCountBigEndian,
+ sizeof(M4OSA_UInt32) );
+ if (err != M4NO_ERROR) {
+ jniThrowException(pEnv, "java/lang/IOException", "file write failed");
+ }
+
+
+ /* write UIn32 value 0 for no of values as place holder */
+ samplesCountBigEndian = 0; /* reusing local var */
+ err = M4OSA_fileWriteData (outFileHandle, (M4OSA_MemAddr8)&samplesCountBigEndian,
+ sizeof(M4OSA_UInt32) );
+ if (err != M4NO_ERROR) {
+ jniThrowException(pEnv, "java/lang/IOException", "file write failed");
+ }
+
+ /* loop until EOF */
+ do
+ {
+ M4OSA_memset((M4OSA_MemAddr8)bufferIn.m_dataAddress,bufferIn.m_bufferSize, 0);
+
+ numBytesToRead = samplesCountInBytes;
+
+ err = M4OSA_fileReadData( inputFileHandle,
+ (M4OSA_MemAddr8)bufferIn.m_dataAddress,
+ &numBytesToRead );
+
+ if (err != M4NO_ERROR) {
+ // if out value of bytes-read is 0, break
+ if ( numBytesToRead == 0) {
+ VIDEOEDIT_LOG_ERROR(ANDROID_LOG_INFO, "VIDEO_EDITOR", "numBytesToRead 0x%x",\
+ numBytesToRead);
+ break; /* stop if file is empty or EOF */
+ }
+ }
+
+ ptr16 = (M4OSA_Int16*)bufferIn.m_dataAddress;
+
+ peakVolumeDbValue = 0;
+ index = 0;
+
+ // loop through half the lenght frame bytes read 'cause its 16 bits samples
+ while (index < (numBytesToRead / 2)) {
+ /* absolute values of 16 bit sample */
+ if (ptr16[index] < 0) {
+ ptr16[index] = -(ptr16[index]);
+ }
+ peakVolumeDbValue = (peakVolumeDbValue > (M4OSA_UInt32)ptr16[index] ?\
+ peakVolumeDbValue : (M4OSA_UInt32)ptr16[index]);
+ index++;
+ }
+
+ // move 7 bits , ignore sign bit
+ dbValue = (peakVolumeDbValue >> 7);
+ dbValue = logLookUp[(M4OSA_UInt8)dbValue];
+
+ err = M4OSA_fileWriteData (outFileHandle, (M4OSA_MemAddr8)&dbValue, sizeof(M4OSA_UInt8) );
+ if (err != M4NO_ERROR) {
+ VIDEOEDIT_LOG_ERROR(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "M4MA_generateAudioGraphFile : File write failed");
+ break;
+ }
+
+ volumeValuesCount ++;
+ totalBytesRead += numBytesToRead;
+
+ if ((((totalBytesRead*100)/fileSize)) != prevProgress) {
+ if ( (pContext->threadProgress != prevProgress) && (prevProgress != 0 )) {
+ //pContext->threadProgress = prevProgress;
+ //onWveformProgressUpdateMethodId(prevProgress, 0);
+ //LVME_callAudioGraphOnProgressUpdate(pContext, 0, prevProgress);
+ pEnv->CallVoidMethod(pContext->engine,
+ pContext->onAudioGraphProgressUpdateMethodId,
+ prevProgress, 0);
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "pContext->threadProgress %d",
+ prevProgress);
+ }
+ }
+ prevProgress = (((totalBytesRead*100)/fileSize));
+
+ } while (numBytesToRead != 0);
+
+ VIDEOEDIT_LOG_ERROR(ANDROID_LOG_INFO, "VIDEO_EDITOR", "loop 0x%x", volumeValuesCount);
+
+ /* if some error occured in fwrite */
+ if (numBytesToRead != 0) {
+ //err = -1;
+ jniThrowException(pEnv, "java/lang/IOException", "numBytesToRead != 0 ; file write failed");
+ }
+
+ /* write the count in place holder after seek */
+ seekPos = sizeof(M4OSA_UInt32);
+ err = M4OSA_fileWriteSeek(outFileHandle, M4OSA_kFileSeekBeginning,
+ &seekPos /* after samples per value */);
+ if ( err != M4NO_ERROR) {
+ jniThrowException(pEnv, "java/lang/IOException", "file seek failed");
+ } else {
+ volumeValuesCount = ((volumeValuesCount>>24)&0xff) | // move byte 3 to byte 0
+ ((volumeValuesCount<<8)&0xff0000) | // move byte 1 to byte 2
+ ((volumeValuesCount>>8)&0xff00) | // move byte 2 to byte 1
+ ((volumeValuesCount<<24)&0xff000000); // byte 0 to byte 3
+
+ err = M4OSA_fileWriteData (outFileHandle, (M4OSA_MemAddr8)&volumeValuesCount,
+ sizeof(M4OSA_UInt32) );
+ if ( err != M4NO_ERROR) {
+ jniThrowException(pEnv, "java/lang/IOException", "file write failed");
+ }
+ }
+
+ /******************************************************************************
+ CLOSE AND FREE ALLOCATIONS
+ *******************************************************************************/
+ M4OSA_free((M4OSA_MemAddr32)bufferIn.m_dataAddress);
+ M4OSA_fileReadClose(inputFileHandle);
+ M4OSA_fileWriteClose(outFileHandle);
+ /* final finish callback */
+ pEnv->CallVoidMethod(pContext->engine, pContext->onAudioGraphProgressUpdateMethodId, 100, 0);
+
+ /* EXIT */
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR", "EXIT - M4MA_generateAudioGraphFile");
+
+ return err;
+}
+
+static int videoEditor_generateAudioWaveFormSync (JNIEnv* pEnv, jobject thiz,
+ jstring pcmfilePath,
+ jstring outGraphfilePath,
+ jint frameDuration, jint channels,
+ jint samplesCount)
+{
+ M4OSA_ERR result = M4NO_ERROR;
+ ManualEditContext* pContext = M4OSA_NULL;
+ bool needToBeLoaded = true;
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_generateAudioWaveFormSync() ");
+
+ /* Get the context. */
+ pContext = (ManualEditContext*)videoEditClasses_getContext(&needToBeLoaded, pEnv, thiz);
+ if (pContext == M4OSA_NULL) {
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_generateAudioWaveFormSync() - pContext is NULL ");
+ }
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_generateAudioWaveFormSync Retrieving pStringOutAudioGraphFile");
+
+ const char *pPCMFilePath = pEnv->GetStringUTFChars(pcmfilePath, NULL);
+ if (pPCMFilePath == M4OSA_NULL) {
+ if (pEnv != NULL) {
+ jniThrowException(pEnv, "java/lang/RuntimeException",
+ "Input string PCMFilePath is null");
+ }
+ }
+
+ const char *pStringOutAudioGraphFile = pEnv->GetStringUTFChars(outGraphfilePath, NULL);
+ if (pStringOutAudioGraphFile == M4OSA_NULL) {
+ if (pEnv != NULL) {
+ jniThrowException(pEnv, "java/lang/RuntimeException",
+ "Input string outGraphfilePath is null");
+ }
+ }
+
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_generateAudioWaveFormSync Generate the waveform data %s %d %d %d",
+ pStringOutAudioGraphFile, frameDuration, channels, samplesCount);
+
+ /* Generate the waveform */
+ result = M4MA_generateAudioGraphFile(pEnv, (M4OSA_Char*) pPCMFilePath,
+ (M4OSA_Char*) pStringOutAudioGraphFile,
+ (M4OSA_UInt32) samplesCount,
+ (M4OSA_UInt32) channels,
+ (M4OSA_UInt32)frameDuration,
+ pContext);
+
+ if (pStringOutAudioGraphFile != NULL) {
+ pEnv->ReleaseStringUTFChars(outGraphfilePath, pStringOutAudioGraphFile);
+ }
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR",
+ "videoEditor_generateAudioWaveFormSync pContext->bSkipState ");
+
+ return result;
+}
+
+/******** End Audio Graph *******/
+jint JNI_OnLoad(
+ JavaVM* pVm,
+ void* pReserved)
+{
+ void* pEnv = NULL;
+ bool needToBeInitialized = true;
+ jint result = -1;
+
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR", "JNI_OnLoad()");
+
+ // Add a text marker (the condition must always be true).
+ ADD_TEXT_MARKER_FUN(NULL != pVm)
+
+ // Check the JNI version.
+ if (pVm->GetEnv(&pEnv, JNI_VERSION_1_4) == JNI_OK)
+ {
+ // Add a code marker (the condition must always be true).
+ ADD_CODE_MARKER_FUN(NULL != pEnv)
+
+ // Register the manual edit JNI methods.
+ if (videoEditor_registerManualEditMethods((JNIEnv*)pEnv) == 0)
+ {
+ // Initialize the classes.
+ videoEditClasses_init(&needToBeInitialized, (JNIEnv*)pEnv);
+ if (needToBeInitialized)
+ {
+ // Success, return valid version number.
+ result = JNI_VERSION_1_4;
+ }
+ }
+ }
+
+ // Return the result.
+ return(result);
+}
+
diff --git a/media/jni/mediaeditor/VideoEditorMain.h b/media/jni/mediaeditor/VideoEditorMain.h
new file mode 100755
index 000000000000..b73913a07f99
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorMain.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __VIDEO_EDITOR_API_H__
+#define __VIDEO_EDITOR_API_H__
+
+#include "M4OSA_Types.h"
+
+typedef enum
+{
+ MSG_TYPE_PROGRESS_INDICATION, /* Playback progress indication event*/
+ MSG_TYPE_PLAYER_ERROR, /* Playback error*/
+ MSG_TYPE_PREVIEW_END, /* Preview of clips is complete */
+} progress_callback_msg_type;
+
+typedef struct
+{
+ M4OSA_Void *pFile; /** PCM file path */
+ M4OSA_Bool bRemoveOriginal; /** If true, the original audio track
+ is not taken into account */
+ M4OSA_UInt32 uiNbChannels; /** Number of channels (1=mono, 2=stereo) of BGM clip*/
+ M4OSA_UInt32 uiSamplingFrequency; /** Sampling audio frequency (8000 for amr, 16000 or
+ more for aac) of BGM clip*/
+ M4OSA_UInt32 uiExtendedSamplingFrequency; /** Extended frequency for AAC+,
+ eAAC+ streams of BGM clip*/
+ M4OSA_UInt32 uiAddCts; /** Time, in milliseconds, at which the added
+ audio track is inserted */
+ M4OSA_UInt32 uiAddVolume; /** Volume, in percentage, of the added audio track */
+ M4OSA_UInt32 beginCutMs;
+ M4OSA_UInt32 endCutMs;
+ M4OSA_Int32 fileType;
+ M4OSA_Bool bLoop; /** Looping on/off **/
+ /* Audio ducking */
+ M4OSA_UInt32 uiInDucking_threshold; /** Threshold value at which
+ background music shall duck */
+ M4OSA_UInt32 uiInDucking_lowVolume; /** lower the background track to
+ this factor of current level */
+ M4OSA_Bool bInDucking_enable; /** enable ducking */
+ M4OSA_UInt32 uiBTChannelCount; /** channel count for BT */
+ M4OSA_Void *pPCMFilePath;
+} M4xVSS_AudioMixingSettings;
+
+typedef struct
+{
+ M4OSA_Void *pBuffer; /* YUV420 buffer of frame to be rendered*/
+ M4OSA_UInt32 timeMs; /* time stamp of the frame to be rendered*/
+ M4OSA_UInt32 uiSurfaceWidth; /* Surface display width*/
+ M4OSA_UInt32 uiSurfaceHeight; /* Surface display height*/
+ M4OSA_UInt32 uiFrameWidth; /* Frame width*/
+ M4OSA_UInt32 uiFrameHeight; /* Frame height*/
+ M4OSA_Bool bApplyEffect; /* Apply video effects before render*/
+ M4OSA_UInt32 clipBeginCutTime; /* Clip begin cut time relative to storyboard */
+ M4OSA_UInt32 clipEndCutTime; /* Clip end cut time relative to storyboard */
+
+} VideoEditor_renderPreviewFrameStr;
+#endif /*__VIDEO_EDITOR_API_H__*/
diff --git a/media/jni/mediaeditor/VideoEditorOsal.cpp b/media/jni/mediaeditor/VideoEditorOsal.cpp
new file mode 100755
index 000000000000..423e93f4c023
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorOsal.cpp
@@ -0,0 +1,359 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <VideoEditorJava.h>
+#include <VideoEditorLogging.h>
+#include <VideoEditorOsal.h>
+
+extern "C" {
+#include <M4OSA_Clock.h>
+#include <M4OSA_CharStar.h>
+#include <M4OSA_FileCommon.h>
+#include <M4OSA_FileReader.h>
+#include <M4OSA_FileWriter.h>
+#include <M4OSA_Memory.h>
+#include <M4OSA_String.h>
+#include <M4OSA_Thread.h>
+#include <M4xVSS_API.h>
+#include <M4VSS3GPP_ErrorCodes.h>
+#include <M4MCS_ErrorCodes.h>
+#include <M4READER_Common.h>
+#include <M4WRITER_common.h>
+#include <M4VSS3GPP_API.h>
+#include <M4DECODER_Common.h>
+};
+
+
+#define VIDEOEDIT_OSAL_RESULT_STRING_MAX (32)
+
+#define VIDEOEDIT_OSAL_RESULT_INIT(m_result) { m_result, #m_result }
+
+
+typedef struct
+{
+ M4OSA_ERR result;
+ const char* pName;
+} VideoEdit_Osal_Result;
+
+static const VideoEdit_Osal_Result gkRESULTS[] =
+{
+ // M4OSA_Clock.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_TIMESCALE_TOO_BIG ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_CLOCK_BAD_REF_YEAR ),
+
+ // M4OSA_Error.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4NO_ERROR ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_PARAMETER ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_STATE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_ALLOC ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_BAD_CONTEXT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_CONTEXT_FAILED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_BAD_STREAM_ID ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_BAD_OPTION_ID ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_WRITE_ONLY ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_READ_ONLY ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_NOT_IMPLEMENTED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_UNSUPPORTED_MEDIA_TYPE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_NO_DATA_YET ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_NO_MORE_STREAM ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_INVALID_TIME ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_NO_MORE_AU ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_TIME_OUT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_BUFFER_FULL ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_REDIRECT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_TOO_MUCH_STREAMS ),
+
+ // M4OSA_FileCommon.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_FILE_NOT_FOUND ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_FILE_LOCKED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_FILE_BAD_MODE_ACCESS ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_FILE_INVALID_POSITION ),
+
+ // M4OSA_String.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_STR_BAD_STRING ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_STR_CONV_FAILED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_STR_OVERFLOW ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_STR_BAD_ARGS ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_STR_OVERFLOW ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_STR_NOT_FOUND ),
+
+ // M4OSA_Thread.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_THREAD_NOT_STARTED ),
+
+ // M4xVSS_API.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_WAR_ANALYZING_DONE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_WAR_PREVIEW_READY ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_WAR_SAVING_DONE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_WAR_TRANSCODING_NECESSARY ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_WAR_OUTPUTFILESIZE_EXCEED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_JPG_TOO_BIG ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4xVSSWAR_BUFFER_OUT_TOO_SMALL ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4xVSSERR_NO_MORE_SPACE ),
+
+ // M4VSS3GPP_ErrorCodes.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_FILE_TYPE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_EFFECT_KIND ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_AUDIO_EFFECT_TYPE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_AUDIO_TRANSITION_TYPE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_VIDEO_ENCODING_FRAME_RATE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_EXTERNAL_EFFECT_NULL ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_EXTERNAL_TRANSITION_NULL ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_BEGIN_CUT_LARGER_THAN_DURATION ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_BEGIN_CUT_LARGER_THAN_END_CUT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_OVERLAPPING_TRANSITIONS ),
+#ifdef M4VSS3GPP_ERR_ANALYSIS_DATA_SIZE_TOO_SMALL
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_ANALYSIS_DATA_SIZE_TOO_SMALL ),
+#endif
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_3GPP_FILE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_AMR_EDITING_UNSUPPORTED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INPUT_AUDIO_AU_TOO_LARGE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INPUT_AUDIO_CORRUPTED_AU ),
+#ifdef M4VSS3GPP_ERR_INPUT_AUDIO_CORRUPTED_AMR_AU
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INPUT_AUDIO_CORRUPTED_AMR_AU ),
+#endif
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_EDITING_UNSUPPORTED_H263_PROFILE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_EDITING_UNSUPPORTED_MPEG4_RVLC ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_CLIP_ANALYSIS_VERSION ),
+#ifdef M4VSS3GPP_ERR_INVALID_CLIP_ANALYSIS_PLATFORM
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INVALID_CLIP_ANALYSIS_PLATFORM ),
+#endif
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_FORMAT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_TIME_SCALE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_UNSUPPORTED_MP3_ASSEMBLY ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_WAR_INCOMPATIBLE_AUDIO_STREAM_TYPE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_WAR_INCOMPATIBLE_AUDIO_NB_OF_CHANNELS ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_WAR_INCOMPATIBLE_AUDIO_SAMPLING_FREQUENCY ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_NO_SUPPORTED_STREAM_IN_FILE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_ADDVOLUME_EQUALS_ZERO ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_UNSUPPORTED_ADDED_AUDIO_STREAM ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_AUDIO_MIXING_UNSUPPORTED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_AUDIO_CANNOT_BE_MIXED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INPUT_CLIP_IS_NOT_A_3GPP ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_BEGINLOOP_HIGHER_ENDLOOP ),
+#ifdef M4VSS3GPP_ERR_AUDIO_MIXING_MP3_UNSUPPORTED
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_AUDIO_MIXING_MP3_UNSUPPORTED ),
+#endif
+#ifdef M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_AAC
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_AAC ),
+#endif
+#ifdef M4VSS3GPP_ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED ),
+#endif
+#ifdef M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_EVRC
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_FEATURE_UNSUPPORTED_WITH_EVRC ),
+#endif
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_H263_PROFILE_NOT_SUPPORTED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_INTERNAL_STATE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_LUMA_FILTER_ERROR ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_CURTAIN_FILTER_ERROR ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_AUDIO_DECODER_INIT_FAILED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_AUDIO_DECODED_PCM_SIZE_ISSUE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4VSS3GPP_ERR_OUTPUT_FILE_TYPE_ERROR ),
+
+ // M4MCS_ErrorCodes.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_WAR_TRANSCODING_DONE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_WAR_MEDIATYPE_NOT_SUPPORTED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_INVALID_INPUT_FILE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_DURATION_IS_NULL ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_H263_FORBIDDEN_IN_MP4_FILE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_H263_PROFILE_NOT_SUPPORTED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_INVALID_AAC_SAMPLING_FREQUENCY ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_AUDIO_CONVERSION_FAILED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_BEGIN_CUT_LARGER_THAN_DURATION ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_BEGIN_CUT_EQUALS_END_CUT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_END_CUT_SMALLER_THAN_BEGIN_CUT ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_MAXFILESIZE_TOO_SMALL ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_VIDEOBITRATE_TOO_LOW ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_AUDIOBITRATE_TOO_LOW ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_VIDEOBITRATE_TOO_HIGH ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_AUDIOBITRATE_TOO_HIGH ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_OUTPUT_FILE_SIZE_TOO_SMALL ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4MCS_ERR_NOMORE_SPACE ),
+
+ // M4READER_Common.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_READER_UNKNOWN_STREAM_TYPE ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_READER_NO_METADATA ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_READER_INFORMATION_NOT_PRESENT ),
+
+ // M4WRITER_Common.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_WRITER_STOP_REQ ),
+ // M4DECODER_Common.h
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_VIDEORENDERER_NO_NEW_FRAME ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_DECODER_H263_PROFILE_NOT_SUPPORTED ),
+ VIDEOEDIT_OSAL_RESULT_INIT(M4ERR_DECODER_H263_NOT_BASELINE )
+};
+
+static const int gkRESULTS_COUNT = (sizeof(gkRESULTS) / sizeof(VideoEdit_Osal_Result));
+
+#ifdef OSAL_MEM_LEAK_DEBUG
+static int gAllocatedBlockCount = 0;
+#endif
+
+const char*
+videoEditOsal_getResultString(
+ M4OSA_ERR result)
+{
+ static char string[VIDEOEDIT_OSAL_RESULT_STRING_MAX] = "";
+ const char* pString = M4OSA_NULL;
+ int index = 0;
+
+ // Loop over the list with constants.
+ for (index = 0;
+ ((M4OSA_NULL == pString) && (index < gkRESULTS_COUNT));
+ index++)
+ {
+ // Check if the specified result matches.
+ if (result == gkRESULTS[index].result)
+ {
+ // Set the description.
+ pString = gkRESULTS[index].pName;
+ }
+ }
+
+ // Check if no result was found.
+ if (M4OSA_NULL == pString)
+ {
+ // Set the description to a default value.
+ M4OSA_chrSPrintf((M4OSA_Char *)string, sizeof(string) - 1,
+ (M4OSA_Char*)"<unknown(0x%08X)>", result);
+ pString = string;
+ }
+
+ // Return the result.
+ return(pString);
+}
+
+void *
+videoEditOsal_alloc(
+ bool* pResult,
+ JNIEnv* pEnv,
+ size_t size,
+ const char* pDescription)
+{
+ void *pData = M4OSA_NULL;
+
+ // Check if the previous action succeeded.
+ if (*pResult)
+ {
+ // Allocate memory for the settings.
+ pData = (M4VSS3GPP_EditSettings*)M4OSA_malloc(size, 0, (M4OSA_Char*)pDescription);
+ if (M4OSA_NULL != pData)
+ {
+ // Reset the allocated memory.
+ M4OSA_memset((M4OSA_MemAddr8)pData, size, 0);
+#ifdef OSAL_MEM_LEAK_DEBUG
+ // Update the allocated block count.
+ gAllocatedBlockCount++;
+#endif
+ }
+ else
+ {
+ // Reset the result flag.
+ (*pResult) = false;
+
+ // Log the error.
+ VIDEOEDIT_LOG_ERROR(ANDROID_LOG_ERROR, "VIDEO_EDITOR_OSAL", "videoEditOsal_alloc,\
+ error: unable to allocate memory for %s", pDescription);
+
+ // Throw an exception.
+ jniThrowException(pEnv, "java/lang/OutOfMemoryError", "unable to allocate memory");
+ }
+ }
+
+ // Return the allocated memory.
+ return(pData);
+}
+
+void
+videoEditOsal_free(
+ void* pData)
+{
+ // Check if memory was allocated.
+ if (M4OSA_NULL != pData)
+ {
+ VIDEOEDIT_LOG_FUNCTION(ANDROID_LOG_INFO, "VIDEO_EDITOR_OSAL", "videoEditOsal_free()");
+
+ // Log the API call.
+ VIDEOEDIT_LOG_API(ANDROID_LOG_INFO, "VIDEO_EDITOR_OSAL", "M4OSA_free()");
+
+ // Free the memory.
+ M4OSA_free((M4OSA_MemAddr32)pData);
+#ifdef OSAL_MEM_LEAK_DEBUG
+ // Update the allocated block count.
+ gAllocatedBlockCount--;
+
+ // Log the number of allocated blocks.
+ VIDEOEDIT_LOG_ALLOCATION(ANDROID_LOG_ERROR, "VIDEO_EDITOR_OSAL", "allocated, %d blocks",\
+ gAllocatedBlockCount);
+#endif
+ }
+}
+
+
+void
+videoEditOsal_getFilePointers ( M4OSA_FileReadPointer *pOsaFileReadPtr,
+ M4OSA_FileWriterPointer *pOsaFileWritePtr)
+{
+ if (pOsaFileReadPtr != M4OSA_NULL)
+ {
+ // Initialize the filereader function pointers.
+ pOsaFileReadPtr->openRead = M4OSA_fileReadOpen;
+ pOsaFileReadPtr->readData = M4OSA_fileReadData;
+ pOsaFileReadPtr->seek = M4OSA_fileReadSeek;
+ pOsaFileReadPtr->closeRead = M4OSA_fileReadClose;
+ pOsaFileReadPtr->setOption = M4OSA_fileReadSetOption;
+ pOsaFileReadPtr->getOption = M4OSA_fileReadGetOption;
+ }
+
+ if (pOsaFileWritePtr != M4OSA_NULL)
+ {
+ // Initialize the filewriter function pointers.
+ pOsaFileWritePtr->openWrite = M4OSA_fileWriteOpen;
+ pOsaFileWritePtr->writeData = M4OSA_fileWriteData;
+ pOsaFileWritePtr->seek = M4OSA_fileWriteSeek;
+ pOsaFileWritePtr->Flush = M4OSA_fileWriteFlush;
+ pOsaFileWritePtr->closeWrite = M4OSA_fileWriteClose;
+ pOsaFileWritePtr->setOption = M4OSA_fileWriteSetOption;
+ pOsaFileWritePtr->getOption = M4OSA_fileWriteGetOption;
+ }
+}
+
diff --git a/media/jni/mediaeditor/VideoEditorOsal.h b/media/jni/mediaeditor/VideoEditorOsal.h
new file mode 100755
index 000000000000..7a6f5ea59227
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorOsal.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_EDITOR_OSAL_H
+#define VIDEO_EDITOR_OSAL_H
+
+#include <jni.h>
+#include <JNIHelp.h>
+
+extern "C" {
+#include <M4OSA_Error.h>
+#include <M4OSA_Thread.h>
+#include <M4OSA_FileReader.h>
+#include <M4OSA_FileWriter.h>
+};
+
+const char*
+videoEditOsal_getResultString(
+ M4OSA_ERR result);
+
+void*
+videoEditOsal_alloc(
+ bool* pResult,
+ JNIEnv* pEnv,
+ size_t size,
+ const char* pDescription);
+
+void
+videoEditOsal_free(
+ void* pData);
+
+void
+videoEditOsal_startThread(
+ bool* pResult,
+ JNIEnv* pEnv,
+ int stackSize,
+ M4OSA_ThreadDoIt callback,
+ M4OSA_Context* pContext,
+ void* pParam);
+
+void
+videoEditOsal_stopThread(
+ bool* pResult,
+ JNIEnv* pEnv,
+ M4OSA_Context* pContext);
+
+void
+videoEditOsal_getFilePointers ( M4OSA_FileReadPointer *pOsaFileReadPtr,
+ M4OSA_FileWriterPointer *pOsaFileWritePtr);
+
+#endif // VIDEO_EDITOR_OSAL_H
+
diff --git a/media/jni/mediaeditor/VideoEditorPropertiesMain.cpp b/media/jni/mediaeditor/VideoEditorPropertiesMain.cpp
new file mode 100755
index 000000000000..7bf76da229f8
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorPropertiesMain.cpp
@@ -0,0 +1,502 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <dlfcn.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <VideoEditorClasses.h>
+#include <VideoEditorJava.h>
+#include <VideoEditorOsal.h>
+#include <VideoEditorLogging.h>
+#include <VideoEditorOsal.h>
+#include <marker.h>
+
+extern "C" {
+#include <M4OSA_Clock.h>
+#include <M4OSA_CharStar.h>
+#include <M4OSA_Error.h>
+#include <M4OSA_FileCommon.h>
+#include <M4OSA_FileReader.h>
+#include <M4OSA_FileWriter.h>
+#include <M4OSA_Memory.h>
+#include <M4OSA_String.h>
+#include <M4OSA_Thread.h>
+#include <M4VSS3GPP_API.h>
+#include <M4VSS3GPP_ErrorCodes.h>
+#include <M4MCS_API.h>
+#include <M4MCS_ErrorCodes.h>
+#include <M4MDP_API.h>
+#include <M4READER_Common.h>
+#include <M4WRITER_common.h>
+#include <M4DECODER_Common.h>
+#include <M4AD_Common.h>
+};
+
+extern "C" M4OSA_ERR M4MCS_open_normalMode(
+ M4MCS_Context pContext,
+ M4OSA_Void* pFileIn,
+ M4VIDEOEDITING_FileType InputFileType,
+ M4OSA_Void* pFileOut,
+ M4OSA_Void* pTempFile);
+
+jobject videoEditProp_getProperties(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jstring file);
+
+static void
+getFileAndMediaTypeFromExtension (
+ M4OSA_Char* pExtension,
+ VideoEditClasses_FileType *pFileType,
+ M4VIDEOEDITING_FileType *pClipType);
+
+static M4OSA_ERR
+getClipProperties( JNIEnv* pEnv,
+ jobject thiz,
+ M4OSA_Char* pFile,
+ M4VIDEOEDITING_FileType clipType,
+ M4VIDEOEDITING_ClipProperties* pClipProperties);
+
+M4OSA_UInt32
+VideoEdit_chrCompare(M4OSA_Char* pStrIn1,
+ M4OSA_Char* pStrIn2,
+ M4OSA_Int32* pCmpResult);
+
+jobject videoEditProp_getProperties(
+ JNIEnv* pEnv,
+ jobject thiz,
+ jstring file)
+{
+ bool gotten = true;
+ M4OSA_Char* pFile = M4OSA_NULL;
+ M4OSA_Char* pExtension = M4OSA_NULL;
+ M4OSA_UInt32 index = 0;
+ M4OSA_Int32 cmpResult = 0;
+ VideoEditPropClass_Properties* pProperties = M4OSA_NULL;
+ M4VIDEOEDITING_ClipProperties* pClipProperties = M4OSA_NULL;
+ M4OSA_ERR result = M4NO_ERROR;
+ M4MCS_Context context = M4OSA_NULL;
+ M4OSA_FilePosition size = 0;
+ M4OSA_UInt32 width = 0;
+ M4OSA_UInt32 height = 0;
+ jobject properties = NULL;
+ M4OSA_Context pOMXContext = M4OSA_NULL;
+ M4DECODER_VideoInterface* pOMXVidDecoderInterface = M4OSA_NULL;
+ M4AD_Interface* pOMXAudDecoderInterface = M4OSA_NULL;
+
+ bool initialized = true;
+ VideoEditClasses_FileType fileType = VideoEditClasses_kFileType_Unsupported;
+ M4VIDEOEDITING_FileType clipType = M4VIDEOEDITING_kFileType_Unsupported;
+
+ VIDEOEDIT_LOG_API(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES",
+ "videoEditProp_getProperties()");
+
+ // Add a text marker (the condition must always be true).
+ ADD_TEXT_MARKER_FUN(NULL != pEnv)
+
+ // Initialize the classes.
+ videoEditPropClass_init(&initialized, (JNIEnv*)pEnv);
+
+ // Validate the tempPath parameter.
+ videoEditJava_checkAndThrowIllegalArgumentException(
+ &gotten, pEnv, (NULL == file), "file is null");
+
+ // Get the file path.
+ pFile = (M4OSA_Char *)videoEditJava_getString(
+ &gotten, pEnv, file, NULL, M4OSA_NULL);
+
+ result = M4OSA_fileReadOpen(&context, (M4OSA_Void*)pFile, M4OSA_kFileRead);
+ videoEditJava_checkAndThrowIllegalArgumentException(&gotten, pEnv,
+ (M4NO_ERROR != result), "file not found");
+ if(M4NO_ERROR != result)
+ return(properties);
+ result = M4OSA_fileReadClose(context);
+ context = M4OSA_NULL;
+
+ // Check if the file path is valid.
+ if (gotten)
+ {
+ // Retrieve the extension.
+ result = M4OSA_chrReverseFindChar(pFile, '.', &pExtension);
+ if ((M4NO_ERROR == result) && (M4OSA_NULL != pExtension))
+ {
+ // Skip the dot.
+ pExtension++;
+
+ // Get the file type and Media type from extension
+ getFileAndMediaTypeFromExtension(
+ pExtension ,&fileType, &clipType);
+ }
+ }
+
+ // Check if the file type could be determined.
+ videoEditJava_checkAndThrowIllegalArgumentException(
+ &gotten, pEnv,
+ (VideoEditClasses_kFileType_Unsupported == fileType),
+ "file type is not supported");
+
+ // Allocate a new properties structure.
+ pProperties = (VideoEditPropClass_Properties*)videoEditOsal_alloc(
+ &gotten, pEnv,
+ sizeof(VideoEditPropClass_Properties), "Properties");
+
+ // Check if the context is valid and allocation succeeded
+ // (required because of dereferencing of pProperties).
+ if (gotten)
+ {
+ // Check if this type of file needs to be analyzed using MCS.
+ if ((VideoEditClasses_kFileType_MP3 == fileType) ||
+ (VideoEditClasses_kFileType_MP4 == fileType) ||
+ (VideoEditClasses_kFileType_3GPP == fileType) ||
+ (VideoEditClasses_kFileType_AMR == fileType) ||
+ (VideoEditClasses_kFileType_PCM == fileType))
+ {
+ // Allocate a new clip properties structure.
+ pClipProperties =
+ (M4VIDEOEDITING_ClipProperties*)videoEditOsal_alloc(
+ &gotten, pEnv,
+ sizeof(M4VIDEOEDITING_ClipProperties), "ClipProperties");
+
+ // Check if allocation succeeded (required because of
+ // dereferencing of pClipProperties).
+ if (gotten)
+ {
+ // Add a code marker (the condition must always be true).
+ ADD_CODE_MARKER_FUN(NULL != pClipProperties)
+
+ // Log the API call.
+ VIDEOEDIT_LOG_API(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES",
+ "getClipProperties");
+
+ // Get Video clip properties
+ result = getClipProperties(
+ pEnv, thiz, pFile, clipType, pClipProperties);
+
+ // Check if the creation succeeded.
+ videoEditJava_checkAndThrowIllegalArgumentException(
+ &gotten, pEnv,(M4NO_ERROR != result),
+ "Invalid File or File not found");
+
+ if (pClipProperties->uiVideoWidth >= 1920)
+ {
+ result = M4MCS_ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM;
+ videoEditJava_checkAndThrowIllegalArgumentException(
+ &gotten, pEnv, (M4NO_ERROR != result),
+ "HD Content (1080p) is not supported");
+ }
+ }
+
+ // Check if the properties could be retrieved.
+ if (gotten)
+ {
+ // Set the properties.
+ pProperties->uiClipDuration = pClipProperties->uiClipDuration;
+ if (M4VIDEOEDITING_kFileType_Unsupported == pClipProperties->FileType)
+ {
+ pProperties->FileType = VideoEditClasses_kFileType_Unsupported;
+ }
+ else
+ {
+ pProperties->FileType = fileType;
+ }
+ pProperties->VideoStreamType = pClipProperties->VideoStreamType;
+ pProperties->uiClipVideoDuration = pClipProperties->uiClipVideoDuration;
+ pProperties->uiVideoBitrate = pClipProperties->uiVideoBitrate;
+ pProperties->uiVideoWidth = pClipProperties->uiVideoWidth;
+ pProperties->uiVideoHeight = pClipProperties->uiVideoHeight;
+ pProperties->fAverageFrameRate = pClipProperties->fAverageFrameRate;
+ pProperties->ProfileAndLevel = pClipProperties->ProfileAndLevel;
+ pProperties->AudioStreamType = pClipProperties->AudioStreamType;
+ pProperties->uiClipAudioDuration = pClipProperties->uiClipAudioDuration;
+ pProperties->uiAudioBitrate = pClipProperties->uiAudioBitrate;
+ pProperties->uiNbChannels = pClipProperties->uiNbChannels;
+ pProperties->uiSamplingFrequency = pClipProperties->uiSamplingFrequency;
+ }
+
+ // Free the clip properties.
+ videoEditOsal_free(pClipProperties);
+ pClipProperties = M4OSA_NULL;
+ }
+ else if ((VideoEditClasses_kFileType_JPG == fileType) ||
+ (VideoEditClasses_kFileType_GIF == fileType) ||
+ (VideoEditClasses_kFileType_PNG == fileType))
+ {
+ pProperties->uiClipDuration = 0;
+ pProperties->FileType = fileType;
+ pProperties->VideoStreamType = M4VIDEOEDITING_kNoneVideo;
+ pProperties->uiClipVideoDuration = 0;
+ pProperties->uiVideoBitrate = 0;
+ pProperties->uiVideoWidth = width;
+ pProperties->uiVideoHeight = height;
+ pProperties->fAverageFrameRate = 0.0f;
+ pProperties->ProfileAndLevel = M4VIDEOEDITING_kProfile_and_Level_Out_Of_Range;
+ pProperties->AudioStreamType = M4VIDEOEDITING_kNoneAudio;
+ pProperties->uiClipAudioDuration = 0;
+ pProperties->uiAudioBitrate = 0;
+ pProperties->uiNbChannels = 0;
+ pProperties->uiSamplingFrequency = 0;
+
+ // Added for Handling invalid paths and non existent image files
+ // Open the file for reading.
+ result = M4OSA_fileReadOpen(&context, (M4OSA_Void*)pFile, M4OSA_kFileRead);
+ if (M4NO_ERROR != result)
+ {
+ pProperties->FileType = VideoEditClasses_kFileType_Unsupported;
+ }
+ result = M4OSA_fileReadClose(context);
+ context = M4OSA_NULL;
+ }
+ }
+
+ // Create a properties object.
+ videoEditPropClass_createProperties(&gotten, pEnv, pProperties, &properties);
+
+ // Log the properties.
+ VIDEOEDIT_PROP_LOG_PROPERTIES(pProperties);
+
+ // Free the properties.
+ videoEditOsal_free(pProperties);
+ pProperties = M4OSA_NULL;
+
+ // Free the file path.
+ videoEditOsal_free(pFile);
+ pFile = M4OSA_NULL;
+
+ // Add a text marker (the condition must always be true).
+ ADD_TEXT_MARKER_FUN(NULL != pEnv)
+
+ // Return the Properties object.
+ return(properties);
+}
+
+static void getFileAndMediaTypeFromExtension (
+ M4OSA_Char *pExtension,
+ VideoEditClasses_FileType *pFileType,
+ M4VIDEOEDITING_FileType *pClipType)
+{
+ M4OSA_Char extension[5] = {0, 0, 0, 0, 0};
+ VideoEditClasses_FileType fileType =
+ VideoEditClasses_kFileType_Unsupported;
+
+ M4VIDEOEDITING_FileType clipType =
+ M4VIDEOEDITING_kFileType_Unsupported;
+
+ M4OSA_UInt32 index = 0;
+ M4OSA_ERR result = M4NO_ERROR;
+ M4OSA_Int32 cmpResult = 0;
+ M4OSA_UInt32 extLength = M4OSA_chrLength(pExtension);
+
+ // Assign default
+ *pFileType = VideoEditClasses_kFileType_Unsupported;
+ *pClipType = M4VIDEOEDITING_kFileType_Unsupported;
+
+ // Check if the length of the extension is valid.
+ if ((3 == extLength) || (4 == extLength))
+ {
+ // Convert the extension to lowercase.
+ for (index = 0; index < extLength ; index++)
+ {
+ extension[index] = M4OSA_chrToLower(pExtension[index]);
+ }
+
+ // Check if the extension is ".mp3".
+ if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"mp3", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_MP3;
+ *pClipType = M4VIDEOEDITING_kFileType_MP3;
+ } // Check if the extension is ".mp4".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"mp4", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_MP4;
+ *pClipType = M4VIDEOEDITING_kFileType_MP4;
+ }
+ // Check if the extension is ".3gp".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"3gp", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_3GPP;
+ *pClipType = M4VIDEOEDITING_kFileType_3GPP;
+ }
+ // Check if the extension is ".3gp".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"m4a", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_3GPP;
+ *pClipType = M4VIDEOEDITING_kFileType_3GPP;
+ }
+ // Check if the extension is ".3gpp".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"3gpp", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_3GPP;
+ *pClipType = M4VIDEOEDITING_kFileType_3GPP;
+ }
+ // Check if the extension is ".amr".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"amr", &cmpResult)))
+ {
+
+ *pFileType = VideoEditClasses_kFileType_AMR;
+ *pClipType = M4VIDEOEDITING_kFileType_AMR;
+ }
+ // Check if the extension is ".pcm".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"pcm", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_PCM;
+ *pClipType = M4VIDEOEDITING_kFileType_PCM;
+ }
+ // Check if the extension is ".jpg".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"jpg", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_JPG;
+ }
+ // Check if the extension is ".jpeg".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"jpeg", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_JPG;
+ }
+ // Check if the extension is ".gif".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"gif", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_GIF;
+ }
+ // Check if the extension is ".png".
+ else if (!(VideoEdit_chrCompare(extension, (M4OSA_Char*)"png", &cmpResult)))
+ {
+ *pFileType = VideoEditClasses_kFileType_PNG;
+ }
+
+ }
+
+}
+
+static M4OSA_ERR getClipProperties(
+ JNIEnv* pEnv,
+ jobject thiz,
+ M4OSA_Char* pFile,
+ M4VIDEOEDITING_FileType clipType,
+ M4VIDEOEDITING_ClipProperties* pClipProperties)
+{
+ bool gotten = true;
+ M4OSA_ERR result = M4NO_ERROR;
+ M4OSA_ERR resultAbort = M4NO_ERROR;
+ M4MCS_Context context = M4OSA_NULL;
+
+ M4OSA_FileReadPointer fileReadPtr =
+ { M4OSA_NULL, M4OSA_NULL, M4OSA_NULL,
+ M4OSA_NULL, M4OSA_NULL, M4OSA_NULL };
+
+ M4OSA_FileWriterPointer fileWritePtr =
+ { M4OSA_NULL, M4OSA_NULL, M4OSA_NULL,
+ M4OSA_NULL, M4OSA_NULL, M4OSA_NULL, M4OSA_NULL };
+
+ // Initialize the OSAL file system function pointers.
+ videoEditOsal_getFilePointers(&fileReadPtr , &fileWritePtr);
+
+ // Log the API call.
+ VIDEOEDIT_LOG_API(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES",\
+ "getClipProperties - M4MCS_init()");
+
+ // Initialize the MCS context.
+ result = M4MCS_init(&context, &fileReadPtr, &fileWritePtr);
+
+ // Log the result.
+ VIDEOEDIT_PROP_LOG_RESULT(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES", "%s",
+ videoEditOsal_getResultString(result));
+
+ // Check if the creation succeeded.
+ videoEditJava_checkAndThrowRuntimeException(
+ &gotten, pEnv, (M4NO_ERROR != result), result);
+
+ // Check if opening the MCS context succeeded.
+ if (gotten)
+ {
+ // Log the API call.
+ VIDEOEDIT_LOG_API(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES",
+ "getClipProperties - M4MCS_open_normalMode()");
+
+ // Open the MCS in the normal opening mode to
+ // retrieve the exact duration
+ result = M4MCS_open_normalMode(
+ context, pFile, clipType, M4OSA_NULL, M4OSA_NULL);
+
+ // Log the result.
+ VIDEOEDIT_PROP_LOG_RESULT(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES", "%s",
+ videoEditOsal_getResultString(result));
+
+ // Check if the creation succeeded.
+ videoEditJava_checkAndThrowRuntimeException(
+ &gotten, pEnv, (M4NO_ERROR != result), result);
+
+ // Check if the MCS could be opened.
+ if (gotten)
+ {
+ // Log the API call.
+ VIDEOEDIT_LOG_API(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES",
+ "getClipProperties - M4MCS_getInputFileProperties()");
+
+ // Get the properties.
+ result = M4MCS_getInputFileProperties(context, pClipProperties);
+
+ // Log the result.
+ VIDEOEDIT_PROP_LOG_RESULT(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES", "%s",
+ videoEditOsal_getResultString(result));
+
+ // Check if the creation succeeded.
+ videoEditJava_checkAndThrowRuntimeException(
+ &gotten, pEnv, (M4NO_ERROR != result), result);
+ }
+
+ // Log the API call.
+ VIDEOEDIT_LOG_API(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES",
+ "getClipProperties - M4MCS_abort()");
+
+ // Close the MCS session.
+ resultAbort = M4MCS_abort(context);
+
+ if (result == M4NO_ERROR) {
+ // Log the result.
+ VIDEOEDIT_PROP_LOG_RESULT(
+ ANDROID_LOG_INFO, "VIDEO_EDITOR_PROPERTIES", "%s",
+ videoEditOsal_getResultString(resultAbort));
+
+ // Check if the abort succeeded.
+ videoEditJava_checkAndThrowRuntimeException(
+ &gotten, pEnv, (M4NO_ERROR != resultAbort), resultAbort);
+ result = resultAbort;
+ }
+ }
+
+ return result;
+}
+
+M4OSA_UInt32
+VideoEdit_chrCompare(M4OSA_Char* pStrIn1,
+ M4OSA_Char* pStrIn2,
+ M4OSA_Int32* pCmpResult)
+{
+ M4OSA_chrCompare(pStrIn1, pStrIn2, pCmpResult);
+ return *pCmpResult;
+}
+
+
diff --git a/media/jni/mediaeditor/VideoEditorThumbnailMain.cpp b/media/jni/mediaeditor/VideoEditorThumbnailMain.cpp
new file mode 100755
index 000000000000..b1f9fe465401
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorThumbnailMain.cpp
@@ -0,0 +1,330 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <jni.h>
+#include <JNIHelp.h>
+#include <utils/Log.h>
+#include "VideoBrowserMain.h"
+#include "VideoBrowserInternal.h"
+
+#if (M4OSA_TRACE_LEVEL >= 1)
+#undef M4OSA_TRACE1_0
+#undef M4OSA_TRACE1_1
+#undef M4OSA_TRACE1_2
+#undef M4OSA_TRACE1_3
+
+#define M4OSA_TRACE1_0(a) __android_log_print(ANDROID_LOG_INFO, "Thumbnail", a);
+#define M4OSA_TRACE1_1(a,b) __android_log_print(ANDROID_LOG_INFO, "Thumbnail", a,b);
+#define M4OSA_TRACE1_2(a,b,c) __android_log_print(ANDROID_LOG_INFO, "Thumbnail", a,b,c);
+#define M4OSA_TRACE1_3(a,b,c,d) __android_log_print(ANDROID_LOG_INFO, "Thumbnail", a,b,c,d);
+#endif
+
+/*
+ * Memory format of 'ARGB8888' in skia is RGBA, so ABGR in 32bit little-endian packed format
+ * bitmap format is rgb565
+ */
+// RED GREEN BLUE ALPHA
+#define RGB565toSKCOLOR(c) ( (((c)&0xF800)>>8) | (((c)&0x7E0)<<5) | (((c)&0x1F)<<19) | 0xFF000000)
+
+#define GetIntField(env, obj, name) env->GetIntField(obj,\
+env->GetFieldID(env->GetObjectClass(obj), name, "I"))
+
+extern "C" M4OSA_ERR NXPSW_FileReaderOptim_init(M4OSA_Void *lowLevel_functionPointers,
+ M4OSA_Void *optimized_functionPointers);
+
+/*
+ * Video Browser execution context.
+ * Based on request for RGB565 or RGB888, m_dst16 or m_dst32
+ * will be initialized and used
+ */
+typedef struct
+{
+ M4OSA_Context m_pVideoBrowser;
+ M4OSA_UInt32 m_previousTime;
+ M4OSA_Int32* m_dst32;
+ M4OSA_Int16* m_dst16;
+ unsigned int m_width;
+ unsigned int m_height;
+ M4OSA_Bool m_bRender;
+} ThumbnailContext;
+
+/**
+ ************************************************************************
+ * @brief Interface to retrieve the thumbnail pixels
+ * @param pContext (IN) Thumbnail Context.
+ * @param width (IN) Width of thumbnail
+ * @param height (IN) Height of thumbnail
+ * @param pTimeMS (IN/OUT)Time stamp at which thumbnail is retrieved.
+ ************************************************************************
+*/
+M4OSA_ERR ThumbnailGetPixels(const M4OSA_Context pContext,
+ M4OSA_Int32* pixelArray,
+ M4OSA_UInt32 width, M4OSA_UInt32 height,
+ M4OSA_UInt32* pTimeMS);
+
+
+/**
+ ************************************************************************
+ * @brief Video browser callback, called when a frame must be displayed
+ * @param pInstance (IN) Thumbnail context.
+ * @param notificationID (IN) Id of the callback which generated the error
+ * @param errCode (IN) Error code from the Core
+ * @param pCbData (IN) pointer to data associated wit the callback.
+ * @param pCbUserData (IN) pointer to application user data passed in init.
+ * @note This callback mechanism is used to request display of an image
+ ************************************************************************
+*/
+M4OSA_Void VBcallback( M4OSA_Context pInstance,
+ VideoBrowser_Notification notificationID,
+ M4OSA_ERR errCode, M4OSA_Void* pCbData,
+ M4OSA_Void* pCallbackUserData)
+{
+ M4OSA_UInt32 i, j;
+ M4OSA_ERR err;
+
+ M4OSA_TRACE3_0("inside VBcallback");
+ M4VIFI_ImagePlane* pPlane=NULL;
+ M4OSA_UInt16* src=NULL;
+ ThumbnailContext* pC = NULL;
+
+ CHECK_PTR(VBcallback, pCbData, err, M4ERR_PARAMETER);
+ CHECK_PTR(VBcallback, pInstance,err, M4ERR_PARAMETER);
+
+ pC = (ThumbnailContext*)pCallbackUserData ;
+ CHECK_PTR(VBcallback, pC->m_pVideoBrowser, err, M4ERR_PARAMETER);
+
+ pPlane = (M4VIFI_ImagePlane*)pCbData;
+ src = (M4OSA_UInt16*)pPlane->pac_data;
+
+ if (pC->m_dst32 != NULL)
+ {
+ M4OSA_Int32* dst = pC->m_dst32;
+
+ for (j = 0; j < pPlane->u_height; j++)
+ {
+ for (i = 0; i < pPlane->u_width; i++)
+ {
+ dst[i] = RGB565toSKCOLOR(src[i]);
+ }
+ for (i = pPlane->u_width; i < pC->m_width; i++)
+ {
+ dst[i] = 0;
+ }
+ src = (M4OSA_UInt16*)((M4OSA_UInt8*)src + pPlane->u_stride);
+ dst += pC->m_width;
+ }
+ }
+ else if (pC->m_dst16 != NULL)
+ {
+ M4OSA_Int16* dst = pC->m_dst16;
+
+ for (j = 0; j < pPlane->u_height; j++)
+ {
+ M4OSA_memcpy((M4OSA_MemAddr8 )dst, (M4OSA_MemAddr8 )src, pPlane->u_stride);
+ for (i = pPlane->u_width; i < pC->m_width; i++)
+ {
+ dst[i] = 0;
+ }
+ src = (M4OSA_UInt16*)((M4OSA_UInt8*)src + pPlane->u_stride);
+ dst += pC->m_width;
+ }
+ }
+ else
+ {
+ CHECK_PTR(VBcallback, NULL, err, M4ERR_PARAMETER);
+ }
+
+VBcallback_cleanUp:
+
+ return;
+}
+
+M4OSA_ERR ThumbnailOpen(M4OSA_Context *pPContext,
+ const M4OSA_Char *pString,
+ M4OSA_Bool bRender)
+{
+
+ M4OSA_ERR err;
+ ThumbnailContext *pContext = M4OSA_NULL;
+ VideoBrowser_VideoColorType vbColorType;
+
+ CHECK_PTR(ThumbnailOpen, pString, err, M4ERR_BAD_CONTEXT);
+
+ /*--- Create context ---*/
+ pContext = (ThumbnailContext*)M4OSA_malloc(sizeof(ThumbnailContext), VIDEOBROWSER,
+ (M4OSA_Char*)"Thumbnail context") ;
+ M4OSA_TRACE3_1("context value is = %d",pContext);
+ CHECK_PTR(ThumbnailOpen, pContext, err, M4ERR_ALLOC);
+
+ M4OSA_memset((M4OSA_MemAddr8)pContext, sizeof(ThumbnailContext), 0);
+
+ M4OSA_FileReadPointer optFP;
+ M4OSA_FileReadPointer llFP;
+
+ NXPSW_FileReaderOptim_init(&llFP, &optFP);
+ M4OSA_TRACE1_2("ThumbnailOpen: entering videoBrowserCreate with 0x%x %s",
+ &pContext->m_pVideoBrowser, pString) ;
+
+ pContext->m_bRender = bRender;
+ if (bRender == M4OSA_TRUE) {
+ //Open is called for rendering the frame.
+ //So set YUV420 as the output color format.
+ vbColorType = VideoBrowser_kYUV420;
+ } else {
+ //Open is called for thumbnail Extraction
+ //So set BGR565 as the output.
+ vbColorType = VideoBrowser_kGB565;
+ }
+
+ err = videoBrowserCreate(&pContext->m_pVideoBrowser, (M4OSA_Char*)pString,
+ VideoBrowser_kVBNormalBliting, &optFP, VBcallback, pContext, vbColorType);
+
+ M4OSA_TRACE1_1("err value is = 0x%x",err);
+ CHECK_ERR(ThumbnailOpen, err);
+ CHECK_PTR(ThumbnailOpen, pContext->m_pVideoBrowser, err, M4ERR_ALLOC);
+
+ *pPContext = pContext;
+ M4OSA_TRACE1_1("context value is = %d",*pPContext);
+
+ return M4NO_ERROR;
+
+ThumbnailOpen_cleanUp:
+
+ M4OSA_TRACE1_0("i am inside cleanUP");
+ if (M4OSA_NULL != pContext)
+ {
+ if (M4OSA_NULL != pContext->m_pVideoBrowser)
+ {
+ videoBrowserCleanUp(pContext->m_pVideoBrowser) ;
+ }
+ M4OSA_free((M4OSA_MemAddr32)pContext) ;
+ }
+ return err;
+}
+
+M4OSA_ERR ThumbnailGetPixels(const M4OSA_Context pContext,
+ M4OSA_Int32* pixelArray,
+ M4OSA_UInt32 width, M4OSA_UInt32 height,
+ M4OSA_UInt32* pTimeMS)
+{
+ M4OSA_ERR err;
+
+ ThumbnailContext* pC = (ThumbnailContext*)pContext;
+
+ if ((pC->m_width != width) || (pC->m_height != height))
+ {
+ err = videoBrowserSetWindow(pC->m_pVideoBrowser, pixelArray,
+ 0, 0, width, height);
+ CHECK_ERR(ThumbnailGetPixels, err);
+ pC->m_width = width;
+ pC->m_height = height;
+ }
+
+ // Alter the pTimeMS to a valid value at which a frame is found
+ // m_currentCTS has the actual frame time stamp just ahead of the
+ // pTimeMS supplied.
+ if ((((VideoBrowserContext*)pC->m_pVideoBrowser)->m_currentCTS != 0) &&
+ (*pTimeMS >= pC->m_previousTime) &&
+ (*pTimeMS < ((VideoBrowserContext*)pC->m_pVideoBrowser)->m_currentCTS))
+ {
+ pC->m_previousTime = *pTimeMS;
+ *pTimeMS = ((VideoBrowserContext*)pC->m_pVideoBrowser)->m_currentCTS;
+ }
+ else
+ {
+ pC->m_previousTime = *pTimeMS;
+ }
+
+ err = videoBrowserPrepareFrame(pC->m_pVideoBrowser, pTimeMS);
+ CHECK_ERR(ThumbnailGetPixels, err);
+
+ if (pC->m_bRender != M4OSA_TRUE) {
+ err = videoBrowserDisplayCurrentFrame(pC->m_pVideoBrowser);
+ CHECK_ERR(ThumbnailGetPixels, err);
+ }
+
+ThumbnailGetPixels_cleanUp:
+
+ return err;
+}
+
+M4OSA_ERR ThumbnailGetPixels32(const M4OSA_Context pContext,
+ M4OSA_Int32* pixelArray, M4OSA_UInt32 width,
+ M4OSA_UInt32 height, M4OSA_UInt32* timeMS)
+{
+
+ M4OSA_ERR err = M4NO_ERROR;
+
+ ThumbnailContext* pC = (ThumbnailContext*)pContext;
+
+ CHECK_PTR(ThumbnailGetPixels32, pC->m_pVideoBrowser, err, M4ERR_ALLOC) ;
+ CHECK_PTR(ThumbnailGetPixels32, pixelArray, err, M4ERR_ALLOC) ;
+
+ pC->m_dst16 = NULL;
+ pC->m_dst32 = pixelArray;
+
+ err = ThumbnailGetPixels(pContext, pixelArray, width, height, timeMS);
+
+ThumbnailGetPixels32_cleanUp:
+
+ return err;
+}
+
+M4OSA_ERR ThumbnailGetPixels16(const M4OSA_Context pContext,
+ M4OSA_Int16* pixelArray, M4OSA_UInt32 width,
+ M4OSA_UInt32 height, M4OSA_UInt32* timeMS)
+{
+ M4OSA_ERR err = M4NO_ERROR;
+
+ ThumbnailContext* pC = (ThumbnailContext*)pContext;
+
+ CHECK_PTR(ThumbnailGetPixels16, pC->m_pVideoBrowser, err, M4ERR_ALLOC);
+ CHECK_PTR(ThumbnailGetPixels16, pixelArray, err, M4ERR_ALLOC);
+
+ pC->m_dst16 = pixelArray;
+ pC->m_dst32 = NULL;
+
+ err = ThumbnailGetPixels(pContext, (M4OSA_Int32*)pixelArray, width, height, timeMS);
+
+ThumbnailGetPixels16_cleanUp:
+
+ return err;
+}
+
+
+void ThumbnailClose(const M4OSA_Context pContext)
+{
+ M4OSA_ERR err;
+
+ ThumbnailContext* pC = (ThumbnailContext*)pContext;
+
+ CHECK_PTR(ThumbnailClose, pC, err, M4ERR_ALLOC);
+
+ if (M4OSA_NULL != pC)
+ {
+ if (M4OSA_NULL != pC->m_pVideoBrowser)
+ {
+ videoBrowserCleanUp(pC->m_pVideoBrowser);
+ }
+ M4OSA_free((M4OSA_MemAddr32)pC);
+ }
+
+ThumbnailClose_cleanUp:
+
+ return;
+}
+
diff --git a/media/jni/mediaeditor/VideoEditorThumbnailMain.h b/media/jni/mediaeditor/VideoEditorThumbnailMain.h
new file mode 100755
index 000000000000..14c60dd6de21
--- /dev/null
+++ b/media/jni/mediaeditor/VideoEditorThumbnailMain.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEOEDITOR_THUMBNAIL_MAIN_H
+#define VIDEOEDITOR_THUMBNAIL_MAIN_H
+
+/**
+ ************************************************************************
+ * @file VideoEditorThumbnailMain.h
+ * @brief Thumbnail extract interface.
+ ************************************************************************
+*/
+
+/**
+ ************************************************************************
+ * @brief Interface to open a Thumbnail session.
+ * @param pContext (OUT) Thumbnail Context.
+ * @param pString (IN) File path from which thumbnail will be
+ * retrieved
+ * @param M4OSA_Bool (IN) true if this is for rendering at native layer.
+ ************************************************************************
+*/
+M4OSA_ERR ThumbnailOpen(M4OSA_Context *pPContext,
+ const M4OSA_Char *pString,
+ M4OSA_Bool bRender);
+
+/**
+ ************************************************************************
+ * @brief Interface to retrieve a RGB888 format thumbnail pixels
+ * @param pContext (IN) Thumbnail Context.
+ * @param pixelArray (OUT) Pointer to array in which pixels data to return
+ * @param width (IN) Width of thumbnail
+ * @param height (IN) Height of thumbnail
+ * @param pTimeMS (IN/OUT)Time stamp at which thumbnail is retrieved.
+ ************************************************************************
+*/
+M4OSA_ERR ThumbnailGetPixels32(const M4OSA_Context pContext,
+ M4OSA_Int32* pixelArray, M4OSA_UInt32 width,
+ M4OSA_UInt32 height, M4OSA_UInt32 *timeMS);
+
+/**
+ ************************************************************************
+ * @brief Interface to retrieve a RGB565 format thumbnail pixels
+ * @param pContext (IN) Thumbnail Context.
+ * @param pixelArray (OUT) Pointer to array in which pixcel data to return
+ * @param width (IN) Width of thumbnail
+ * @param height (IN) Height of thumbnail
+ * @param pTimeMS (IN/OUT)Time stamp at which thumbnail is retrieved.
+ ************************************************************************
+*/
+M4OSA_ERR ThumbnailGetPixels16(const M4OSA_Context pContext,
+ M4OSA_Int16* pixelArray, M4OSA_UInt32 width,
+ M4OSA_UInt32 height, M4OSA_UInt32 *timeMS);
+
+/**
+ ************************************************************************
+ * @brief Interface to close the Thumbnail session.
+ * @param pContext (IN) Thumbnail Context.
+ ************************************************************************
+*/
+void ThumbnailClose(const M4OSA_Context pContext);
+
+#endif // VIDEOEDITOR_THUMBNAIL_MAIN_H
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 130ad8228900..fe00856946f0 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -100,6 +100,7 @@ struct AwesomePlayer {
private:
friend struct AwesomeEvent;
+ friend struct PreviewPlayer;
enum {
PLAYING = 1,