summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--api/current.txt4
-rw-r--r--core/java/android/app/FragmentManager.java1
-rw-r--r--core/java/android/appwidget/AppWidgetManager.java8
-rw-r--r--core/java/android/appwidget/AppWidgetProviderInfo.java4
-rw-r--r--core/java/android/net/LinkProperties.java4
-rw-r--r--core/java/android/provider/ContactsContract.java7
-rw-r--r--core/java/android/util/JsonReader.java6
-rw-r--r--core/java/android/view/View.java13
-rw-r--r--core/java/android/view/ViewGroup.java2
-rw-r--r--core/java/android/view/animation/AlphaAnimation.java8
-rw-r--r--core/java/android/view/animation/Animation.java9
-rw-r--r--core/java/android/view/animation/AnimationSet.java26
-rw-r--r--core/java/com/android/internal/widget/ActionBarView.java18
-rw-r--r--core/res/res/anim/screen_rotate_minus_90_enter.xml5
-rw-r--r--core/res/res/anim/screen_rotate_plus_90_enter.xml5
-rw-r--r--docs/html/guide/developing/tools/adb.jd2
-rw-r--r--docs/html/guide/topics/intents/intents-filters.jd2
-rw-r--r--docs/html/guide/topics/manifest/activity-element.jd4
-rw-r--r--docs/html/guide/topics/providers/content-providers.jd2
-rw-r--r--graphics/jni/android_renderscript_RenderScript.cpp1
-rw-r--r--include/gui/ISurfaceTexture.h8
-rw-r--r--include/gui/SurfaceTexture.h23
-rw-r--r--include/gui/SurfaceTextureClient.h4
-rw-r--r--include/media/IMediaRecorder.h2
-rw-r--r--include/media/MediaRecorderBase.h2
-rw-r--r--include/media/mediarecorder.h10
-rw-r--r--include/media/stagefright/DataSource.h5
-rw-r--r--include/media/stagefright/HardwareAPI.h7
-rw-r--r--include/media/stagefright/MediaSource.h2
-rw-r--r--include/media/stagefright/SurfaceMediaSource.h350
-rw-r--r--include/ui/PixelFormat.h10
-rw-r--r--libs/gui/ISurfaceTexture.cpp28
-rw-r--r--libs/gui/SurfaceTexture.cpp88
-rw-r--r--libs/gui/SurfaceTextureClient.cpp12
-rw-r--r--libs/gui/tests/SurfaceTexture_test.cpp151
-rw-r--r--media/java/android/media/MediaRecorder.java97
-rw-r--r--media/jni/android_media_MediaRecorder.cpp17
-rw-r--r--media/libmedia/IMediaRecorder.cpp30
-rw-r--r--media/libmedia/mediaplayer.cpp2
-rw-r--r--media/libmedia/mediarecorder.cpp33
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.cpp15
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.h68
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.cpp109
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.h18
-rw-r--r--media/libstagefright/Android.mk1
-rw-r--r--media/libstagefright/NuCachedSource2.cpp61
-rw-r--r--media/libstagefright/SurfaceMediaSource.cpp756
-rw-r--r--media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp34
-rw-r--r--media/libstagefright/include/ChromiumHTTPDataSource.h2
-rw-r--r--media/libstagefright/include/NuCachedSource2.h6
-rw-r--r--media/libstagefright/tests/Android.mk53
-rw-r--r--media/libstagefright/tests/DummyRecorder.cpp91
-rw-r--r--media/libstagefright/tests/DummyRecorder.h58
-rw-r--r--media/libstagefright/tests/SurfaceMediaSource_test.cpp349
-rw-r--r--packages/SystemUI/res/values/config.xml2
-rw-r--r--services/audioflinger/AudioFlinger.cpp2
-rw-r--r--services/java/com/android/server/AppWidgetService.java6
-rw-r--r--services/java/com/android/server/ConnectivityService.java86
-rw-r--r--services/java/com/android/server/InputMethodManagerService.java14
-rw-r--r--services/java/com/android/server/connectivity/Vpn.java2
-rw-r--r--services/java/com/android/server/wm/BlackFrame.java4
-rw-r--r--services/surfaceflinger/SurfaceTextureLayer.cpp2
-rw-r--r--tests/RenderScriptTests/ImageProcessing/src/com/android/rs/image/ImageProcessingActivity.java2
-rw-r--r--wifi/java/android/net/wifi/WifiWatchdogStateMachine.java2
64 files changed, 2452 insertions, 303 deletions
diff --git a/api/current.txt b/api/current.txt
index 78d5131..2514efd 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -10406,8 +10406,8 @@ package android.media {
method public void setAudioEncodingBitRate(int);
method public void setAudioSamplingRate(int);
method public void setAudioSource(int) throws java.lang.IllegalStateException;
- method public void setAuxiliaryOutputFile(java.io.FileDescriptor);
- method public void setAuxiliaryOutputFile(java.lang.String);
+ method public deprecated void setAuxiliaryOutputFile(java.io.FileDescriptor);
+ method public deprecated void setAuxiliaryOutputFile(java.lang.String);
method public void setCamera(android.hardware.Camera);
method public void setCaptureRate(double);
method public void setLocation(float, float);
diff --git a/core/java/android/app/FragmentManager.java b/core/java/android/app/FragmentManager.java
index c82c9ec..789d3a6 100644
--- a/core/java/android/app/FragmentManager.java
+++ b/core/java/android/app/FragmentManager.java
@@ -1695,6 +1695,7 @@ final class FragmentManagerImpl extends FragmentManager {
public void dispatchDestroy() {
mDestroyed = true;
+ execPendingActions();
moveToState(Fragment.INITIALIZING, false);
mActivity = null;
}
diff --git a/core/java/android/appwidget/AppWidgetManager.java b/core/java/android/appwidget/AppWidgetManager.java
index 019652c..1ef99a1 100644
--- a/core/java/android/appwidget/AppWidgetManager.java
+++ b/core/java/android/appwidget/AppWidgetManager.java
@@ -388,6 +388,10 @@ public class AppWidgetManager {
TypedValue.complexToDimensionPixelSize(info.minWidth, mDisplayMetrics);
info.minHeight =
TypedValue.complexToDimensionPixelSize(info.minHeight, mDisplayMetrics);
+ info.minResizeWidth =
+ TypedValue.complexToDimensionPixelSize(info.minResizeWidth, mDisplayMetrics);
+ info.minResizeHeight =
+ TypedValue.complexToDimensionPixelSize(info.minResizeHeight, mDisplayMetrics);
}
return providers;
}
@@ -411,6 +415,10 @@ public class AppWidgetManager {
TypedValue.complexToDimensionPixelSize(info.minWidth, mDisplayMetrics);
info.minHeight =
TypedValue.complexToDimensionPixelSize(info.minHeight, mDisplayMetrics);
+ info.minResizeWidth =
+ TypedValue.complexToDimensionPixelSize(info.minResizeWidth, mDisplayMetrics);
+ info.minResizeHeight =
+ TypedValue.complexToDimensionPixelSize(info.minResizeHeight, mDisplayMetrics);
}
return info;
}
diff --git a/core/java/android/appwidget/AppWidgetProviderInfo.java b/core/java/android/appwidget/AppWidgetProviderInfo.java
index b8c5b02..9c352d5 100644
--- a/core/java/android/appwidget/AppWidgetProviderInfo.java
+++ b/core/java/android/appwidget/AppWidgetProviderInfo.java
@@ -187,6 +187,8 @@ public class AppWidgetProviderInfo implements Parcelable {
}
this.minWidth = in.readInt();
this.minHeight = in.readInt();
+ this.minResizeWidth = in.readInt();
+ this.minResizeHeight = in.readInt();
this.updatePeriodMillis = in.readInt();
this.initialLayout = in.readInt();
if (0 != in.readInt()) {
@@ -208,6 +210,8 @@ public class AppWidgetProviderInfo implements Parcelable {
}
out.writeInt(this.minWidth);
out.writeInt(this.minHeight);
+ out.writeInt(this.minResizeWidth);
+ out.writeInt(this.minResizeHeight);
out.writeInt(this.updatePeriodMillis);
out.writeInt(this.initialLayout);
if (this.configure != null) {
diff --git a/core/java/android/net/LinkProperties.java b/core/java/android/net/LinkProperties.java
index 9826bec..132f3ba 100644
--- a/core/java/android/net/LinkProperties.java
+++ b/core/java/android/net/LinkProperties.java
@@ -58,8 +58,8 @@ public class LinkProperties implements Parcelable {
private ProxyProperties mHttpProxy;
public static class CompareResult<T> {
- public ArrayList<T> removed = new ArrayList<T>();
- public ArrayList<T> added = new ArrayList<T>();
+ public Collection<T> removed = new ArrayList<T>();
+ public Collection<T> added = new ArrayList<T>();
@Override
public String toString() {
diff --git a/core/java/android/provider/ContactsContract.java b/core/java/android/provider/ContactsContract.java
index c299891..4a719ec 100644
--- a/core/java/android/provider/ContactsContract.java
+++ b/core/java/android/provider/ContactsContract.java
@@ -1439,6 +1439,13 @@ public final class ContactsContract {
CONTENT_URI, "strequent");
/**
+ * The content:// style URI for showing frequently contacted person listing.
+ * @hide
+ */
+ public static final Uri CONTENT_FREQUENT_URI = Uri.withAppendedPath(
+ CONTENT_URI, "frequent");
+
+ /**
* The content:// style URI used for "type-to-filter" functionality on the
* {@link #CONTENT_STREQUENT_URI} URI. The filter string will be used to match
* various parts of the contact name. The filter argument should be passed
diff --git a/core/java/android/util/JsonReader.java b/core/java/android/util/JsonReader.java
index f139372..f2a86c9 100644
--- a/core/java/android/util/JsonReader.java
+++ b/core/java/android/util/JsonReader.java
@@ -740,8 +740,8 @@ public final class JsonReader implements Closeable {
limit += total;
// if this is the first read, consume an optional byte order mark (BOM) if it exists
- if (bufferStartLine == 1 && bufferStartColumn == 1
- && limit > 0 && buffer[0] == '\ufeff') {
+ if (bufferStartLine == 1 && bufferStartColumn == 1
+ && limit > 0 && buffer[0] == '\ufeff') {
pos++;
bufferStartColumn--;
}
@@ -852,7 +852,7 @@ public final class JsonReader implements Closeable {
private boolean skipTo(String toFind) throws IOException {
outer:
- for (; pos + toFind.length() < limit || fillBuffer(toFind.length()); pos++) {
+ for (; pos + toFind.length() <= limit || fillBuffer(toFind.length()); pos++) {
for (int c = 0; c < toFind.length(); c++) {
if (buffer[pos + c] != toFind.charAt(c)) {
continue outer;
diff --git a/core/java/android/view/View.java b/core/java/android/view/View.java
index ecb391d..c68b01c 100644
--- a/core/java/android/view/View.java
+++ b/core/java/android/view/View.java
@@ -12028,12 +12028,13 @@ public class View implements Drawable.Callback2, KeyEvent.Callback, Accessibilit
mPrivateFlags |= FORCE_LAYOUT;
mPrivateFlags |= INVALIDATED;
- if (mLayoutParams != null && mParent != null) {
- mLayoutParams.resolveWithDirection(getResolvedLayoutDirection());
- }
-
- if (mParent != null && !mParent.isLayoutRequested()) {
- mParent.requestLayout();
+ if (mParent != null) {
+ if (mLayoutParams != null) {
+ mLayoutParams.resolveWithDirection(getResolvedLayoutDirection());
+ }
+ if (!mParent.isLayoutRequested()) {
+ mParent.requestLayout();
+ }
}
}
diff --git a/core/java/android/view/ViewGroup.java b/core/java/android/view/ViewGroup.java
index 92a8ce7..6f90971 100644
--- a/core/java/android/view/ViewGroup.java
+++ b/core/java/android/view/ViewGroup.java
@@ -2854,7 +2854,7 @@ public abstract class ViewGroup extends View implements ViewParent, ViewManager
// display lists to render, force an invalidate to allow the animation to
// continue drawing another frame
invalidate(true);
- if (a instanceof AlphaAnimation) {
+ if (a.hasAlpha()) {
// alpha animations should cause the child to recreate its display list
child.invalidate(true);
}
diff --git a/core/java/android/view/animation/AlphaAnimation.java b/core/java/android/view/animation/AlphaAnimation.java
index 651fe45..c4d9afc 100644
--- a/core/java/android/view/animation/AlphaAnimation.java
+++ b/core/java/android/view/animation/AlphaAnimation.java
@@ -78,4 +78,12 @@ public class AlphaAnimation extends Animation {
public boolean willChangeBounds() {
return false;
}
+
+ /**
+ * @hide
+ */
+ @Override
+ public boolean hasAlpha() {
+ return true;
+ }
}
diff --git a/core/java/android/view/animation/Animation.java b/core/java/android/view/animation/Animation.java
index 87c759c..b7dfabc 100644
--- a/core/java/android/view/animation/Animation.java
+++ b/core/java/android/view/animation/Animation.java
@@ -1001,6 +1001,15 @@ public abstract class Animation implements Cloneable {
}
/**
+ * Return true if this animation changes the view's alpha property.
+ *
+ * @hide
+ */
+ public boolean hasAlpha() {
+ return false;
+ }
+
+ /**
* Utility class to parse a string description of a size.
*/
protected static class Description {
diff --git a/core/java/android/view/animation/AnimationSet.java b/core/java/android/view/animation/AnimationSet.java
index 873ce53..4f2542b 100644
--- a/core/java/android/view/animation/AnimationSet.java
+++ b/core/java/android/view/animation/AnimationSet.java
@@ -43,6 +43,8 @@ public class AnimationSet extends Animation {
private static final int PROPERTY_CHANGE_BOUNDS_MASK = 0x80;
private int mFlags = 0;
+ private boolean mDirty;
+ private boolean mHasAlpha;
private ArrayList<Animation> mAnimations = new ArrayList<Animation>();
@@ -138,6 +140,28 @@ public class AnimationSet extends Animation {
}
/**
+ * @hide
+ */
+ @Override
+ public boolean hasAlpha() {
+ if (mDirty) {
+ mDirty = mHasAlpha = false;
+
+ final int count = mAnimations.size();
+ final ArrayList<Animation> animations = mAnimations;
+
+ for (int i = 0; i < count; i++) {
+ if (animations.get(i).hasAlpha()) {
+ mHasAlpha = true;
+ break;
+ }
+ }
+ }
+
+ return mHasAlpha;
+ }
+
+ /**
* <p>Sets the duration of every child animation.</p>
*
* @param durationMillis the duration of the animation, in milliseconds, for
@@ -175,6 +199,8 @@ public class AnimationSet extends Animation {
mLastEnd = Math.max(mLastEnd, a.getStartOffset() + a.getDuration());
mDuration = mLastEnd - mStartOffset;
}
+
+ mDirty = true;
}
/**
diff --git a/core/java/com/android/internal/widget/ActionBarView.java b/core/java/com/android/internal/widget/ActionBarView.java
index 8b74f3d..09262e0 100644
--- a/core/java/com/android/internal/widget/ActionBarView.java
+++ b/core/java/com/android/internal/widget/ActionBarView.java
@@ -286,8 +286,11 @@ public class ActionBarView extends AbsActionBarView {
public void setSplitActionBar(boolean splitActionBar) {
if (mSplitActionBar != splitActionBar) {
if (mMenuView != null) {
+ final ViewGroup oldParent = (ViewGroup) mMenuView.getParent();
+ if (oldParent != null) {
+ oldParent.removeView(mMenuView);
+ }
if (splitActionBar) {
- removeView(mMenuView);
if (mSplitView != null) {
mSplitView.addView(mMenuView);
}
@@ -333,7 +336,10 @@ public class ActionBarView extends AbsActionBarView {
MenuBuilder builder = (MenuBuilder) menu;
mOptionsMenu = builder;
if (mMenuView != null) {
- removeView(mMenuView);
+ final ViewGroup oldParent = (ViewGroup) mMenuView.getParent();
+ if (oldParent != null) {
+ oldParent.removeView(mMenuView);
+ }
}
if (mActionMenuPresenter == null) {
mActionMenuPresenter = new ActionMenuPresenter();
@@ -352,6 +358,10 @@ public class ActionBarView extends AbsActionBarView {
builder.addMenuPresenter(mActionMenuPresenter);
builder.addMenuPresenter(mExpandedMenuPresenter);
menuView = (ActionMenuView) mActionMenuPresenter.getMenuView(this);
+ final ViewGroup oldParent = (ViewGroup) menuView.getParent();
+ if (oldParent != null && oldParent != this) {
+ oldParent.removeView(menuView);
+ }
addView(menuView, layoutParams);
} else {
mActionMenuPresenter.setExpandedActionViewsExclusive(false);
@@ -366,6 +376,10 @@ public class ActionBarView extends AbsActionBarView {
builder.addMenuPresenter(mExpandedMenuPresenter);
menuView = (ActionMenuView) mActionMenuPresenter.getMenuView(this);
if (mSplitView != null) {
+ final ViewGroup oldParent = (ViewGroup) menuView.getParent();
+ if (oldParent != null && oldParent != mSplitView) {
+ oldParent.removeView(menuView);
+ }
mSplitView.addView(menuView, layoutParams);
} else {
// We'll add this later if we missed it this time.
diff --git a/core/res/res/anim/screen_rotate_minus_90_enter.xml b/core/res/res/anim/screen_rotate_minus_90_enter.xml
index 30518e0..61aa72a 100644
--- a/core/res/res/anim/screen_rotate_minus_90_enter.xml
+++ b/core/res/res/anim/screen_rotate_minus_90_enter.xml
@@ -19,11 +19,6 @@
<set xmlns:android="http://schemas.android.com/apk/res/android"
android:shareInterpolator="false">
- <scale android:fromXScale="100%p" android:toXScale="100%"
- android:fromYScale="100%p" android:toYScale="100%"
- android:pivotX="50%" android:pivotY="50%"
- android:interpolator="@interpolator/decelerate_quint"
- android:duration="@android:integer/config_mediumAnimTime" />
<rotate android:fromDegrees="-90" android:toDegrees="0"
android:pivotX="50%" android:pivotY="50%"
android:interpolator="@interpolator/decelerate_quint"
diff --git a/core/res/res/anim/screen_rotate_plus_90_enter.xml b/core/res/res/anim/screen_rotate_plus_90_enter.xml
index 20943c8..53b0ccd 100644
--- a/core/res/res/anim/screen_rotate_plus_90_enter.xml
+++ b/core/res/res/anim/screen_rotate_plus_90_enter.xml
@@ -19,11 +19,6 @@
<set xmlns:android="http://schemas.android.com/apk/res/android"
android:shareInterpolator="false">
- <scale android:fromXScale="100%p" android:toXScale="100%"
- android:fromYScale="100%p" android:toYScale="100%"
- android:pivotX="50%" android:pivotY="50%"
- android:interpolator="@interpolator/decelerate_quint"
- android:duration="@android:integer/config_mediumAnimTime" />
<rotate android:fromDegrees="90" android:toDegrees="0"
android:pivotX="50%" android:pivotY="50%"
android:interpolator="@interpolator/decelerate_quint"
diff --git a/docs/html/guide/developing/tools/adb.jd b/docs/html/guide/developing/tools/adb.jd
index 78d12ef..d32cf66 100644
--- a/docs/html/guide/developing/tools/adb.jd
+++ b/docs/html/guide/developing/tools/adb.jd
@@ -280,7 +280,7 @@ instance, see <a href="{@docRoot}guide/developing/building/index.html">Building
<td>Run PPP over USB.
<ul>
<li><code>&lt;tty&gt;</code> &mdash; the tty for PPP stream. For example <code>dev:/dev/omap_csmi_ttyl</code>. </li>
-<li><code>[parm]... </code> &mdash zero or more PPP/PPPD options, such as <code>defaultroute</code>, <code>local</code>, <code>notty</code>, etc.</li></ul>
+<li><code>[parm]... </code> &mdash; zero or more PPP/PPPD options, such as <code>defaultroute</code>, <code>local</code>, <code>notty</code>, etc.</li></ul>
<p>Note that you should not automatically start a PPP connection. </p></td>
<td></td>
diff --git a/docs/html/guide/topics/intents/intents-filters.jd b/docs/html/guide/topics/intents/intents-filters.jd
index 5905214..3f94553 100644
--- a/docs/html/guide/topics/intents/intents-filters.jd
+++ b/docs/html/guide/topics/intents/intents-filters.jd
@@ -927,7 +927,7 @@ as described by its two intent filters:
<p>
The first, primary, purpose of this activity is to enable the user to
-interact with a single note &mdash to either {@code VIEW} the note or
+interact with a single note &mdash; to either {@code VIEW} the note or
{@code EDIT} it. (The {@code EDIT_NOTE} category is a synonym for
{@code EDIT}.) The intent would contain the URI for data matching the
MIME type <code>vnd.android.cursor.item/vnd.google.note</code> &mdash;
diff --git a/docs/html/guide/topics/manifest/activity-element.jd b/docs/html/guide/topics/manifest/activity-element.jd
index 3486212..743832c 100644
--- a/docs/html/guide/topics/manifest/activity-element.jd
+++ b/docs/html/guide/topics/manifest/activity-element.jd
@@ -710,7 +710,7 @@ the soft keyboard.</li>
The setting must be one of the values listed in the following table, or a
combination of one "{@code state...}" value plus one "{@code adjust...}"
value. Setting multiple values in either group &mdash; multiple
-"{@code state...}" values, for example &mdash has undefined results.
+"{@code state...}" values, for example &mdash; has undefined results.
Individual values are separated by a vertical bar ({@code |}). For example:
</p>
@@ -801,4 +801,4 @@ Level 3.</dd>
<dt>see also:</dt>
<dd><code><a href="{@docRoot}guide/topics/manifest/application-element.html">&lt;application&gt;</a></code>
<br/><code><a href="{@docRoot}guide/topics/manifest/activity-alias-element.html">&lt;activity-alias&gt;</a></code></dd>
-</dl> \ No newline at end of file
+</dl>
diff --git a/docs/html/guide/topics/providers/content-providers.jd b/docs/html/guide/topics/providers/content-providers.jd
index 2a84c26..513886a 100644
--- a/docs/html/guide/topics/providers/content-providers.jd
+++ b/docs/html/guide/topics/providers/content-providers.jd
@@ -277,7 +277,7 @@ are returned. All the content providers that come with the platform define
constants for their columns. For example, the
{@link android.provider.Contacts.Phones android.provider.Contacts.Phones} class
defines constants for the names of the columns in the phone table illustrated
-earlier &mdash {@code _ID}, {@code NUMBER}, {@code NUMBER_KEY}, {@code NAME},
+earlier &mdash; {@code _ID}, {@code NUMBER}, {@code NUMBER_KEY}, {@code NAME},
and so on.</li>
<li><p>A filter detailing which rows to return, formatted as an SQL {@code WHERE}
diff --git a/graphics/jni/android_renderscript_RenderScript.cpp b/graphics/jni/android_renderscript_RenderScript.cpp
index 4a85faf..3476bd5 100644
--- a/graphics/jni/android_renderscript_RenderScript.cpp
+++ b/graphics/jni/android_renderscript_RenderScript.cpp
@@ -504,6 +504,7 @@ nAllocationCopyToBitmap(JNIEnv *_env, jobject _this, RsContext con, jint alloc,
void* ptr = bitmap.getPixels();
rsAllocationCopyToBitmap(con, (RsAllocation)alloc, ptr, bitmap.getSize());
bitmap.unlockPixels();
+ bitmap.notifyPixelsChanged();
}
static void ReleaseBitmapCallback(void *bmp)
diff --git a/include/gui/ISurfaceTexture.h b/include/gui/ISurfaceTexture.h
index bc630ae..1eda646 100644
--- a/include/gui/ISurfaceTexture.h
+++ b/include/gui/ISurfaceTexture.h
@@ -51,7 +51,7 @@ protected:
// the given slot index, and the client is expected to mirror the
// slot->buffer mapping so that it's not necessary to transfer a
// GraphicBuffer for every dequeue operation.
- virtual sp<GraphicBuffer> requestBuffer(int slot) = 0;
+ virtual status_t requestBuffer(int slot, sp<GraphicBuffer>* buf) = 0;
// setBufferCount sets the number of buffer slots available. Calling this
// will also cause all buffer slots to be emptied. The caller should empty
@@ -94,12 +94,6 @@ protected:
virtual status_t setTransform(uint32_t transform) = 0;
virtual status_t setScalingMode(int mode) = 0;
- // getAllocator retrieves the binder object that must be referenced as long
- // as the GraphicBuffers dequeued from this ISurfaceTexture are referenced.
- // Holding this binder reference prevents SurfaceFlinger from freeing the
- // buffers before the client is done with them.
- virtual sp<IBinder> getAllocator() = 0;
-
// query retrieves some information for this surface
// 'what' tokens allowed are that of android_natives.h
virtual int query(int what, int* value) = 0;
diff --git a/include/gui/SurfaceTexture.h b/include/gui/SurfaceTexture.h
index 945f4bc..134c208 100644
--- a/include/gui/SurfaceTexture.h
+++ b/include/gui/SurfaceTexture.h
@@ -69,7 +69,7 @@ public:
// SurfaceTexture object (i.e. they are not owned by the client).
virtual status_t setBufferCount(int bufferCount);
- virtual sp<GraphicBuffer> requestBuffer(int buf);
+ virtual status_t requestBuffer(int slot, sp<GraphicBuffer>* buf);
// dequeueBuffer gets the next buffer slot index for the client to use. If a
// buffer slot is available then that slot index is written to the location
@@ -190,6 +190,17 @@ public:
// getCurrentScalingMode returns the scaling mode of the current buffer
uint32_t getCurrentScalingMode() const;
+ // abandon frees all the buffers and puts the SurfaceTexture into the
+ // 'abandoned' state. Once put in this state the SurfaceTexture can never
+ // leave it. When in the 'abandoned' state, all methods of the
+ // ISurfaceTexture interface will fail with the NO_INIT error.
+ //
+ // Note that while calling this method causes all the buffers to be freed
+ // from the perspective of the the SurfaceTexture, if there are additional
+ // references on the buffers (e.g. if a buffer is referenced by a client or
+ // by OpenGL ES as a texture) then those buffer will remain allocated.
+ void abandon();
+
// dump our state in a String
void dump(String8& result) const;
void dump(String8& result, const char* prefix, char* buffer, size_t SIZE) const;
@@ -343,8 +354,7 @@ private:
// mCurrentTextureBuf is the graphic buffer of the current texture. It's
// possible that this buffer is not associated with any buffer slot, so we
- // must track it separately in order to properly use
- // IGraphicBufferAlloc::freeAllGraphicBuffersExcept.
+ // must track it separately in order to support the getCurrentBuffer method.
sp<GraphicBuffer> mCurrentTextureBuf;
// mCurrentCrop is the crop rectangle that applies to the current texture.
@@ -412,6 +422,13 @@ private:
typedef Vector<int> Fifo;
Fifo mQueue;
+ // mAbandoned indicates that the SurfaceTexture will no longer be used to
+ // consume images buffers pushed to it using the ISurfaceTexture interface.
+ // It is initialized to false, and set to true in the abandon method. A
+ // SurfaceTexture that has been abandoned will return the NO_INIT error from
+ // all ISurfaceTexture methods capable of returning an error.
+ bool mAbandoned;
+
// mMutex is the mutex used to prevent concurrent access to the member
// variables of SurfaceTexture objects. It must be locked whenever the
// member variables are accessed.
diff --git a/include/gui/SurfaceTextureClient.h b/include/gui/SurfaceTextureClient.h
index 829d8ab..56f029f 100644
--- a/include/gui/SurfaceTextureClient.h
+++ b/include/gui/SurfaceTextureClient.h
@@ -106,10 +106,6 @@ private:
// interactions with the server using this interface.
sp<ISurfaceTexture> mSurfaceTexture;
- // mAllocator is the binder object that is referenced to prevent the
- // dequeued buffers from being freed prematurely.
- sp<IBinder> mAllocator;
-
// mSlots stores the buffers that have been allocated for each buffer slot.
// It is initialized to null pointers, and gets filled in with the result of
// ISurfaceTexture::requestBuffer when the client dequeues a buffer from a
diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h
index a73267d..007aea6 100644
--- a/include/media/IMediaRecorder.h
+++ b/include/media/IMediaRecorder.h
@@ -26,6 +26,7 @@ class Surface;
class ICamera;
class ICameraRecordingProxy;
class IMediaRecorderClient;
+class ISurfaceTexture;
class IMediaRecorder: public IInterface
{
@@ -55,6 +56,7 @@ public:
virtual status_t init() = 0;
virtual status_t close() = 0;
virtual status_t release() = 0;
+ virtual sp<ISurfaceTexture> querySurfaceMediaSource() = 0;
};
// ----------------------------------------------------------------------------
diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h
index 1c08969..ef799f5 100644
--- a/include/media/MediaRecorderBase.h
+++ b/include/media/MediaRecorderBase.h
@@ -26,6 +26,7 @@ namespace android {
class ICameraRecordingProxy;
class Surface;
+class ISurfaceTexture;
struct MediaRecorderBase {
MediaRecorderBase() {}
@@ -54,6 +55,7 @@ struct MediaRecorderBase {
virtual status_t reset() = 0;
virtual status_t getMaxAmplitude(int *max) = 0;
virtual status_t dump(int fd, const Vector<String16>& args) const = 0;
+ virtual sp<ISurfaceTexture> querySurfaceMediaSource() const = 0;
private:
MediaRecorderBase(const MediaRecorderBase &);
diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h
index af12d3c..72d3736 100644
--- a/include/media/mediarecorder.h
+++ b/include/media/mediarecorder.h
@@ -31,12 +31,15 @@ class Surface;
class IMediaRecorder;
class ICamera;
class ICameraRecordingProxy;
+class ISurfaceTexture;
+class SurfaceTextureClient;
typedef void (*media_completion_f)(status_t status, void *cookie);
enum video_source {
VIDEO_SOURCE_DEFAULT = 0,
VIDEO_SOURCE_CAMERA = 1,
+ VIDEO_SOURCE_GRALLOC_BUFFER = 2,
VIDEO_SOURCE_LIST_END // must be last - used to validate audio source type
};
@@ -226,6 +229,7 @@ public:
status_t close();
status_t release();
void notify(int msg, int ext1, int ext2);
+ sp<ISurfaceTexture> querySurfaceMediaSourceFromMediaServer();
private:
void doCleanUp();
@@ -233,6 +237,12 @@ private:
sp<IMediaRecorder> mMediaRecorder;
sp<MediaRecorderListener> mListener;
+
+ // Reference toISurfaceTexture
+ // for encoding GL Frames. That is useful only when the
+ // video source is set to VIDEO_SOURCE_GRALLOC_BUFFER
+ sp<ISurfaceTexture> mSurfaceMediaSource;
+
media_recorder_states mCurrentState;
bool mIsAudioSourceSet;
bool mIsVideoSourceSet;
diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h
index 48d1464..713af92 100644
--- a/include/media/stagefright/DataSource.h
+++ b/include/media/stagefright/DataSource.h
@@ -20,6 +20,7 @@
#include <sys/types.h>
+#include <media/stagefright/MediaErrors.h>
#include <utils/Errors.h>
#include <utils/KeyedVector.h>
#include <utils/List.h>
@@ -61,6 +62,10 @@ public:
return 0;
}
+ virtual status_t reconnectAtOffset(off64_t offset) {
+ return ERROR_UNSUPPORTED;
+ }
+
////////////////////////////////////////////////////////////////////////////
bool sniff(String8 *mimeType, float *confidence, sp<AMessage> *meta);
diff --git a/include/media/stagefright/HardwareAPI.h b/include/media/stagefright/HardwareAPI.h
index 946a0aa..32eed3f 100644
--- a/include/media/stagefright/HardwareAPI.h
+++ b/include/media/stagefright/HardwareAPI.h
@@ -99,6 +99,13 @@ struct GetAndroidNativeBufferUsageParams {
OMX_U32 nUsage; // OUT
};
+// An enum OMX_COLOR_FormatAndroidOpaque to indicate an opaque colorformat
+// is declared in media/stagefright/openmax/OMX_IVCommon.h
+// This will inform the encoder that the actual
+// colorformat will be relayed by the GRalloc Buffers.
+// OMX_COLOR_FormatAndroidOpaque = 0x7F000001,
+
+
} // namespace android
extern android::OMXPluginBase *createOMXPlugin();
diff --git a/include/media/stagefright/MediaSource.h b/include/media/stagefright/MediaSource.h
index 37dbcd8..3818e63 100644
--- a/include/media/stagefright/MediaSource.h
+++ b/include/media/stagefright/MediaSource.h
@@ -29,7 +29,7 @@ namespace android {
class MediaBuffer;
class MetaData;
-struct MediaSource : public RefBase {
+struct MediaSource : public virtual RefBase {
MediaSource();
// To be called before any other methods on this object, except
diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h
new file mode 100644
index 0000000..56bd9c3
--- /dev/null
+++ b/include/media/stagefright/SurfaceMediaSource.h
@@ -0,0 +1,350 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_GUI_SURFACEMEDIASOURCE_H
+#define ANDROID_GUI_SURFACEMEDIASOURCE_H
+
+#include <gui/ISurfaceTexture.h>
+
+#include <utils/threads.h>
+#include <utils/Vector.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaBuffer.h>
+
+namespace android {
+// ----------------------------------------------------------------------------
+
+class IGraphicBufferAlloc;
+class String8;
+class GraphicBuffer;
+
+class SurfaceMediaSource : public BnSurfaceTexture, public MediaSource,
+ public MediaBufferObserver {
+public:
+ enum { MIN_UNDEQUEUED_BUFFERS = 3 };
+ enum {
+ MIN_ASYNC_BUFFER_SLOTS = MIN_UNDEQUEUED_BUFFERS + 1,
+ MIN_SYNC_BUFFER_SLOTS = MIN_UNDEQUEUED_BUFFERS
+ };
+ enum { NUM_BUFFER_SLOTS = 32 };
+ enum { NO_CONNECTED_API = 0 };
+
+ struct FrameAvailableListener : public virtual RefBase {
+ // onFrameAvailable() is called from queueBuffer() is the FIFO is
+ // empty. You can use SurfaceMediaSource::getQueuedCount() to
+ // figure out if there are more frames waiting.
+ // This is called without any lock held can be called concurrently by
+ // multiple threads.
+ virtual void onFrameAvailable() = 0;
+ };
+
+ SurfaceMediaSource(uint32_t bufW, uint32_t bufH);
+
+ virtual ~SurfaceMediaSource();
+
+
+ // For the MediaSource interface for use by StageFrightRecorder:
+ virtual status_t start(MetaData *params = NULL);
+ virtual status_t stop();
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+ virtual sp<MetaData> getFormat();
+
+ // Get / Set the frame rate used for encoding. Default fps = 30
+ status_t setFrameRate(int32_t fps) ;
+ int32_t getFrameRate( ) const;
+
+ // The call for the StageFrightRecorder to tell us that
+ // it is done using the MediaBuffer data so that its state
+ // can be set to FREE for dequeuing
+ virtual void signalBufferReturned(MediaBuffer* buffer);
+ // end of MediaSource interface
+
+ uint32_t getBufferCount( ) const { return mBufferCount;}
+
+
+ // setBufferCount updates the number of available buffer slots. After
+ // calling this all buffer slots are both unallocated and owned by the
+ // SurfaceMediaSource object (i.e. they are not owned by the client).
+ virtual status_t setBufferCount(int bufferCount);
+
+ virtual status_t requestBuffer(int slot, sp<GraphicBuffer>* buf);
+
+ // dequeueBuffer gets the next buffer slot index for the client to use. If a
+ // buffer slot is available then that slot index is written to the location
+ // pointed to by the buf argument and a status of OK is returned. If no
+ // slot is available then a status of -EBUSY is returned and buf is
+ // unmodified.
+ virtual status_t dequeueBuffer(int *buf, uint32_t w, uint32_t h,
+ uint32_t format, uint32_t usage);
+
+ // queueBuffer returns a filled buffer to the SurfaceMediaSource. In addition, a
+ // timestamp must be provided for the buffer. The timestamp is in
+ // nanoseconds, and must be monotonically increasing. Its other semantics
+ // (zero point, etc) are client-dependent and should be documented by the
+ // client.
+ virtual status_t queueBuffer(int buf, int64_t timestamp,
+ uint32_t* outWidth, uint32_t* outHeight, uint32_t* outTransform);
+ virtual void cancelBuffer(int buf);
+
+ // onFrameReceivedLocked informs the buffer consumers (StageFrightRecorder)
+ // or listeners that a frame has been received
+ // The buffer is not made available for dequeueing immediately. We need to
+ // wait to hear from StageFrightRecorder to set the buffer FREE
+ // Make sure this is called when the mutex is locked
+ virtual status_t onFrameReceivedLocked();
+
+ virtual status_t setScalingMode(int mode) { } // no op for encoding
+ virtual int query(int what, int* value);
+
+ // Just confirming to the ISurfaceTexture interface as of now
+ virtual status_t setCrop(const Rect& reg) { return OK; }
+ virtual status_t setTransform(uint32_t transform) {return OK;}
+
+ // setSynchronousMode set whether dequeueBuffer is synchronous or
+ // asynchronous. In synchronous mode, dequeueBuffer blocks until
+ // a buffer is available, the currently bound buffer can be dequeued and
+ // queued buffers will be retired in order.
+ // The default mode is synchronous.
+ // TODO: Clarify the minute differences bet sycn /async
+ // modes (S.Encoder vis-a-vis SurfaceTexture)
+ virtual status_t setSynchronousMode(bool enabled);
+
+ // connect attempts to connect a client API to the SurfaceMediaSource. This
+ // must be called before any other ISurfaceTexture methods are called except
+ // for getAllocator.
+ //
+ // This method will fail if the connect was previously called on the
+ // SurfaceMediaSource and no corresponding disconnect call was made.
+ virtual status_t connect(int api);
+
+ // disconnect attempts to disconnect a client API from the SurfaceMediaSource.
+ // Calling this method will cause any subsequent calls to other
+ // ISurfaceTexture methods to fail except for getAllocator and connect.
+ // Successfully calling connect after this will allow the other methods to
+ // succeed again.
+ //
+ // This method will fail if the the SurfaceMediaSource is not currently
+ // connected to the specified client API.
+ virtual status_t disconnect(int api);
+
+ // getqueuedCount returns the number of queued frames waiting in the
+ // FIFO. In asynchronous mode, this always returns 0 or 1 since
+ // frames are not accumulating in the FIFO.
+ size_t getQueuedCount() const;
+
+ // setBufferCountServer set the buffer count. If the client has requested
+ // a buffer count using setBufferCount, the server-buffer count will
+ // take effect once the client sets the count back to zero.
+ status_t setBufferCountServer(int bufferCount);
+
+ // getTimestamp retrieves the timestamp associated with the image
+ // set by the most recent call to updateFrameInfoLocked().
+ //
+ // The timestamp is in nanoseconds, and is monotonically increasing. Its
+ // other semantics (zero point, etc) are source-dependent and should be
+ // documented by the source.
+ int64_t getTimestamp();
+
+ // setFrameAvailableListener sets the listener object that will be notified
+ // when a new frame becomes available.
+ void setFrameAvailableListener(const sp<FrameAvailableListener>& listener);
+
+ // getCurrentBuffer returns the buffer associated with the current image.
+ sp<GraphicBuffer> getCurrentBuffer() const;
+
+ // dump our state in a String
+ void dump(String8& result) const;
+ void dump(String8& result, const char* prefix, char* buffer,
+ size_t SIZE) const;
+
+ // isMetaDataStoredInVideoBuffers tells the encoder whether we will
+ // pass metadata through the buffers. Currently, it is force set to true
+ bool isMetaDataStoredInVideoBuffers() const;
+
+protected:
+
+ // freeAllBuffers frees the resources (both GraphicBuffer and EGLImage) for
+ // all slots.
+ void freeAllBuffers();
+ static bool isExternalFormat(uint32_t format);
+
+private:
+
+ status_t setBufferCountServerLocked(int bufferCount);
+
+ enum { INVALID_BUFFER_SLOT = -1 };
+
+ struct BufferSlot {
+
+ BufferSlot()
+ : mBufferState(BufferSlot::FREE),
+ mRequestBufferCalled(false),
+ mTimestamp(0) {
+ }
+
+ // mGraphicBuffer points to the buffer allocated for this slot or is
+ // NULL if no buffer has been allocated.
+ sp<GraphicBuffer> mGraphicBuffer;
+
+ // BufferState represents the different states in which a buffer slot
+ // can be.
+ enum BufferState {
+ // FREE indicates that the buffer is not currently being used and
+ // will not be used in the future until it gets dequeued and
+ // subseqently queued by the client.
+ FREE = 0,
+
+ // DEQUEUED indicates that the buffer has been dequeued by the
+ // client, but has not yet been queued or canceled. The buffer is
+ // considered 'owned' by the client, and the server should not use
+ // it for anything.
+ //
+ // Note that when in synchronous-mode (mSynchronousMode == true),
+ // the buffer that's currently attached to the texture may be
+ // dequeued by the client. That means that the current buffer can
+ // be in either the DEQUEUED or QUEUED state. In asynchronous mode,
+ // however, the current buffer is always in the QUEUED state.
+ DEQUEUED = 1,
+
+ // QUEUED indicates that the buffer has been queued by the client,
+ // and has not since been made available for the client to dequeue.
+ // Attaching the buffer to the texture does NOT transition the
+ // buffer away from the QUEUED state. However, in Synchronous mode
+ // the current buffer may be dequeued by the client under some
+ // circumstances. See the note about the current buffer in the
+ // documentation for DEQUEUED.
+ QUEUED = 2,
+ };
+
+ // mBufferState is the current state of this buffer slot.
+ BufferState mBufferState;
+
+ // mRequestBufferCalled is used for validating that the client did
+ // call requestBuffer() when told to do so. Technically this is not
+ // needed but useful for debugging and catching client bugs.
+ bool mRequestBufferCalled;
+
+ // mTimestamp is the current timestamp for this buffer slot. This gets
+ // to set by queueBuffer each time this slot is queued.
+ int64_t mTimestamp;
+ };
+
+ // mSlots is the array of buffer slots that must be mirrored on the client
+ // side. This allows buffer ownership to be transferred between the client
+ // and server without sending a GraphicBuffer over binder. The entire array
+ // is initialized to NULL at construction time, and buffers are allocated
+ // for a slot when requestBuffer is called with that slot's index.
+ BufferSlot mSlots[NUM_BUFFER_SLOTS];
+
+ // mDefaultWidth holds the default width of allocated buffers. It is used
+ // in requestBuffers() if a width and height of zero is specified.
+ uint32_t mDefaultWidth;
+
+ // mDefaultHeight holds the default height of allocated buffers. It is used
+ // in requestBuffers() if a width and height of zero is specified.
+ uint32_t mDefaultHeight;
+
+ // mPixelFormat holds the pixel format of allocated buffers. It is used
+ // in requestBuffers() if a format of zero is specified.
+ uint32_t mPixelFormat;
+
+ // mBufferCount is the number of buffer slots that the client and server
+ // must maintain. It defaults to MIN_ASYNC_BUFFER_SLOTS and can be changed
+ // by calling setBufferCount or setBufferCountServer
+ int mBufferCount;
+
+ // mClientBufferCount is the number of buffer slots requested by the
+ // client. The default is zero, which means the client doesn't care how
+ // many buffers there are
+ int mClientBufferCount;
+
+ // mServerBufferCount buffer count requested by the server-side
+ int mServerBufferCount;
+
+ // mCurrentSlot is the buffer slot index of the buffer that is currently
+ // being used by buffer consumer
+ // (e.g. StageFrightRecorder in the case of SurfaceMediaSource or GLTexture
+ // in the case of SurfaceTexture).
+ // It is initialized to INVALID_BUFFER_SLOT,
+ // indicating that no buffer slot is currently bound to the texture. Note,
+ // however, that a value of INVALID_BUFFER_SLOT does not necessarily mean
+ // that no buffer is bound to the texture. A call to setBufferCount will
+ // reset mCurrentTexture to INVALID_BUFFER_SLOT.
+ int mCurrentSlot;
+
+
+ // mCurrentBuf is the graphic buffer of the current slot to be used by
+ // buffer consumer. It's possible that this buffer is not associated
+ // with any buffer slot, so we must track it separately in order to
+ // properly use IGraphicBufferAlloc::freeAllGraphicBuffersExcept.
+ sp<GraphicBuffer> mCurrentBuf;
+
+
+ // mCurrentTimestamp is the timestamp for the current texture. It
+ // gets set to mLastQueuedTimestamp each time updateTexImage is called.
+ int64_t mCurrentTimestamp;
+
+ // mGraphicBufferAlloc is the connection to SurfaceFlinger that is used to
+ // allocate new GraphicBuffer objects.
+ sp<IGraphicBufferAlloc> mGraphicBufferAlloc;
+
+ // mFrameAvailableListener is the listener object that will be called when a
+ // new frame becomes available. If it is not NULL it will be called from
+ // queueBuffer.
+ sp<FrameAvailableListener> mFrameAvailableListener;
+
+ // mSynchronousMode whether we're in synchronous mode or not
+ bool mSynchronousMode;
+
+ // mConnectedApi indicates the API that is currently connected to this
+ // SurfaceTexture. It defaults to NO_CONNECTED_API (= 0), and gets updated
+ // by the connect and disconnect methods.
+ int mConnectedApi;
+
+ // mDequeueCondition condition used for dequeueBuffer in synchronous mode
+ mutable Condition mDequeueCondition;
+
+
+ // mQueue is a FIFO of queued buffers used in synchronous mode
+ typedef Vector<int> Fifo;
+ Fifo mQueue;
+
+ // mMutex is the mutex used to prevent concurrent access to the member
+ // variables of SurfaceMediaSource objects. It must be locked whenever the
+ // member variables are accessed.
+ mutable Mutex mMutex;
+
+ ////////////////////////// For MediaSource
+ // Set to a default of 30 fps if not specified by the client side
+ int32_t mFrameRate;
+
+ // mStarted is a flag to check if the recording has started
+ bool mStarted;
+
+ // mFrameAvailableCondition condition used to indicate whether there
+ // is a frame available for dequeuing
+ Condition mFrameAvailableCondition;
+ Condition mFrameCompleteCondition;
+
+ // Avoid copying and equating and default constructor
+ DISALLOW_IMPLICIT_CONSTRUCTORS(SurfaceMediaSource);
+};
+
+// ----------------------------------------------------------------------------
+}; // namespace android
+
+#endif // ANDROID_GUI_SURFACEMEDIASOURCE_H
diff --git a/include/ui/PixelFormat.h b/include/ui/PixelFormat.h
index f46f25c..848c5a1 100644
--- a/include/ui/PixelFormat.h
+++ b/include/ui/PixelFormat.h
@@ -55,7 +55,7 @@ enum {
PIXEL_FORMAT_OPAQUE = -1,
// System chooses an opaque format (no alpha bits required)
-
+
// real pixel formats supported for rendering -----------------------------
PIXEL_FORMAT_RGBA_8888 = HAL_PIXEL_FORMAT_RGBA_8888, // 4x8-bit RGBA
@@ -84,7 +84,7 @@ struct PixelFormatInfo
INDEX_GREEN = 2,
INDEX_BLUE = 3
};
-
+
enum { // components
ALPHA = 1,
RGB = 2,
@@ -98,10 +98,10 @@ struct PixelFormatInfo
uint8_t h;
uint8_t l;
};
-
+
inline PixelFormatInfo() : version(sizeof(PixelFormatInfo)) { }
size_t getScanlineSize(unsigned int width) const;
- size_t getSize(size_t ci) const {
+ size_t getSize(size_t ci) const {
return (ci <= 3) ? (cinfo[ci].h - cinfo[ci].l) : 0;
}
size_t version;
@@ -112,7 +112,7 @@ struct PixelFormatInfo
szinfo cinfo[4];
struct {
uint8_t h_alpha;
- uint8_t l_alpha;
+ uint8_t l_alpha;
uint8_t h_red;
uint8_t l_red;
uint8_t h_green;
diff --git a/libs/gui/ISurfaceTexture.cpp b/libs/gui/ISurfaceTexture.cpp
index be90e2e..55246dc 100644
--- a/libs/gui/ISurfaceTexture.cpp
+++ b/libs/gui/ISurfaceTexture.cpp
@@ -38,7 +38,6 @@ enum {
CANCEL_BUFFER,
SET_CROP,
SET_TRANSFORM,
- GET_ALLOCATOR,
QUERY,
SET_SYNCHRONOUS_MODE,
CONNECT,
@@ -55,18 +54,18 @@ public:
{
}
- virtual sp<GraphicBuffer> requestBuffer(int bufferIdx) {
+ virtual status_t requestBuffer(int bufferIdx, sp<GraphicBuffer>* buf) {
Parcel data, reply;
data.writeInterfaceToken(ISurfaceTexture::getInterfaceDescriptor());
data.writeInt32(bufferIdx);
remote()->transact(REQUEST_BUFFER, data, &reply);
- sp<GraphicBuffer> buffer;
bool nonNull = reply.readInt32();
if (nonNull) {
- buffer = new GraphicBuffer();
- reply.read(*buffer);
+ *buf = new GraphicBuffer();
+ reply.read(**buf);
}
- return buffer;
+ status_t result = reply.readInt32();
+ return result;
}
virtual status_t setBufferCount(int bufferCount)
@@ -144,13 +143,6 @@ public:
return result;
}
- virtual sp<IBinder> getAllocator() {
- Parcel data, reply;
- data.writeInterfaceToken(ISurfaceTexture::getInterfaceDescriptor());
- remote()->transact(GET_ALLOCATOR, data, &reply);
- return reply.readStrongBinder();
- }
-
virtual int query(int what, int* value) {
Parcel data, reply;
data.writeInterfaceToken(ISurfaceTexture::getInterfaceDescriptor());
@@ -200,11 +192,13 @@ status_t BnSurfaceTexture::onTransact(
case REQUEST_BUFFER: {
CHECK_INTERFACE(ISurfaceTexture, data, reply);
int bufferIdx = data.readInt32();
- sp<GraphicBuffer> buffer(requestBuffer(bufferIdx));
+ sp<GraphicBuffer> buffer;
+ int result = requestBuffer(bufferIdx, &buffer);
reply->writeInt32(buffer != 0);
if (buffer != 0) {
reply->write(*buffer);
}
+ reply->writeInt32(result);
return NO_ERROR;
} break;
case SET_BUFFER_COUNT: {
@@ -270,12 +264,6 @@ status_t BnSurfaceTexture::onTransact(
reply->writeInt32(result);
return NO_ERROR;
} break;
- case GET_ALLOCATOR: {
- CHECK_INTERFACE(ISurfaceTexture, data, reply);
- sp<IBinder> result = getAllocator();
- reply->writeStrongBinder(result);
- return NO_ERROR;
- } break;
case QUERY: {
CHECK_INTERFACE(ISurfaceTexture, data, reply);
int value;
diff --git a/libs/gui/SurfaceTexture.cpp b/libs/gui/SurfaceTexture.cpp
index 0f08570..c190195 100644
--- a/libs/gui/SurfaceTexture.cpp
+++ b/libs/gui/SurfaceTexture.cpp
@@ -94,7 +94,8 @@ SurfaceTexture::SurfaceTexture(GLuint tex, bool allowSynchronousMode) :
mTexName(tex),
mSynchronousMode(false),
mAllowSynchronousMode(allowSynchronousMode),
- mConnectedApi(NO_CONNECTED_API) {
+ mConnectedApi(NO_CONNECTED_API),
+ mAbandoned(false) {
LOGV("SurfaceTexture::SurfaceTexture");
sp<ISurfaceComposer> composer(ComposerService::getComposerService());
mGraphicBufferAlloc = composer->createGraphicBufferAlloc();
@@ -150,6 +151,11 @@ status_t SurfaceTexture::setBufferCount(int bufferCount) {
LOGV("SurfaceTexture::setBufferCount");
Mutex::Autolock lock(mMutex);
+ if (mAbandoned) {
+ LOGE("setBufferCount: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
+
if (bufferCount > NUM_BUFFER_SLOTS) {
LOGE("setBufferCount: bufferCount larger than slots available");
return BAD_VALUE;
@@ -199,22 +205,32 @@ status_t SurfaceTexture::setDefaultBufferSize(uint32_t w, uint32_t h)
return OK;
}
-sp<GraphicBuffer> SurfaceTexture::requestBuffer(int buf) {
+status_t SurfaceTexture::requestBuffer(int slot, sp<GraphicBuffer>* buf) {
LOGV("SurfaceTexture::requestBuffer");
Mutex::Autolock lock(mMutex);
- if (buf < 0 || mBufferCount <= buf) {
+ if (mAbandoned) {
+ LOGE("requestBuffer: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
+ if (slot < 0 || mBufferCount <= slot) {
LOGE("requestBuffer: slot index out of range [0, %d]: %d",
- mBufferCount, buf);
- return 0;
+ mBufferCount, slot);
+ return BAD_VALUE;
}
- mSlots[buf].mRequestBufferCalled = true;
- return mSlots[buf].mGraphicBuffer;
+ mSlots[slot].mRequestBufferCalled = true;
+ *buf = mSlots[slot].mGraphicBuffer;
+ return NO_ERROR;
}
status_t SurfaceTexture::dequeueBuffer(int *outBuf, uint32_t w, uint32_t h,
uint32_t format, uint32_t usage) {
LOGV("SurfaceTexture::dequeueBuffer");
+ if (mAbandoned) {
+ LOGE("dequeueBuffer: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
+
if ((w && !h) || (!w && h)) {
LOGE("dequeueBuffer: invalid size: w=%u, h=%u", w, h);
return BAD_VALUE;
@@ -252,6 +268,11 @@ status_t SurfaceTexture::dequeueBuffer(int *outBuf, uint32_t w, uint32_t h,
// wait for the FIFO to drain
while (!mQueue.isEmpty()) {
mDequeueCondition.wait(mMutex);
+ if (mAbandoned) {
+ LOGE("dequeueBuffer: SurfaceTexture was abandoned while "
+ "blocked!");
+ return NO_INIT;
+ }
}
minBufferCountNeeded = mSynchronousMode ?
MIN_SYNC_BUFFER_SLOTS : MIN_ASYNC_BUFFER_SLOTS;
@@ -380,6 +401,11 @@ status_t SurfaceTexture::dequeueBuffer(int *outBuf, uint32_t w, uint32_t h,
status_t SurfaceTexture::setSynchronousMode(bool enabled) {
Mutex::Autolock lock(mMutex);
+ if (mAbandoned) {
+ LOGE("setSynchronousMode: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
+
status_t err = OK;
if (!mAllowSynchronousMode && enabled)
return err;
@@ -410,6 +436,10 @@ status_t SurfaceTexture::queueBuffer(int buf, int64_t timestamp,
{ // scope for the lock
Mutex::Autolock lock(mMutex);
+ if (mAbandoned) {
+ LOGE("queueBuffer: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
if (buf < 0 || buf >= mBufferCount) {
LOGE("queueBuffer: slot index out of range [0, %d]: %d",
mBufferCount, buf);
@@ -475,6 +505,12 @@ status_t SurfaceTexture::queueBuffer(int buf, int64_t timestamp,
void SurfaceTexture::cancelBuffer(int buf) {
LOGV("SurfaceTexture::cancelBuffer");
Mutex::Autolock lock(mMutex);
+
+ if (mAbandoned) {
+ LOGW("cancelBuffer: SurfaceTexture has been abandoned!");
+ return;
+ }
+
if (buf < 0 || buf >= mBufferCount) {
LOGE("cancelBuffer: slot index out of range [0, %d]: %d",
mBufferCount, buf);
@@ -491,6 +527,10 @@ void SurfaceTexture::cancelBuffer(int buf) {
status_t SurfaceTexture::setCrop(const Rect& crop) {
LOGV("SurfaceTexture::setCrop");
Mutex::Autolock lock(mMutex);
+ if (mAbandoned) {
+ LOGE("setCrop: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
mNextCrop = crop;
return OK;
}
@@ -498,6 +538,10 @@ status_t SurfaceTexture::setCrop(const Rect& crop) {
status_t SurfaceTexture::setTransform(uint32_t transform) {
LOGV("SurfaceTexture::setTransform");
Mutex::Autolock lock(mMutex);
+ if (mAbandoned) {
+ LOGE("setTransform: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
mNextTransform = transform;
return OK;
}
@@ -505,6 +549,12 @@ status_t SurfaceTexture::setTransform(uint32_t transform) {
status_t SurfaceTexture::connect(int api) {
LOGV("SurfaceTexture::connect(this=%p, %d)", this, api);
Mutex::Autolock lock(mMutex);
+
+ if (mAbandoned) {
+ LOGE("connect: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
+
int err = NO_ERROR;
switch (api) {
case NATIVE_WINDOW_API_EGL:
@@ -529,6 +579,12 @@ status_t SurfaceTexture::connect(int api) {
status_t SurfaceTexture::disconnect(int api) {
LOGV("SurfaceTexture::disconnect(this=%p, %d)", this, api);
Mutex::Autolock lock(mMutex);
+
+ if (mAbandoned) {
+ LOGE("connect: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
+
int err = NO_ERROR;
switch (api) {
case NATIVE_WINDOW_API_EGL:
@@ -786,11 +842,6 @@ void SurfaceTexture::setFrameAvailableListener(
mFrameAvailableListener = listener;
}
-sp<IBinder> SurfaceTexture::getAllocator() {
- LOGV("SurfaceTexture::getAllocator");
- return mGraphicBufferAlloc->asBinder();
-}
-
void SurfaceTexture::freeAllBuffers() {
for (int i = 0; i < NUM_BUFFER_SLOTS; i++) {
mSlots[i].mGraphicBuffer = 0;
@@ -842,6 +893,12 @@ uint32_t SurfaceTexture::getCurrentScalingMode() const {
int SurfaceTexture::query(int what, int* outValue)
{
Mutex::Autolock lock(mMutex);
+
+ if (mAbandoned) {
+ LOGE("query: SurfaceTexture has been abandoned!");
+ return NO_INIT;
+ }
+
int value;
switch (what) {
case NATIVE_WINDOW_WIDTH:
@@ -868,6 +925,13 @@ int SurfaceTexture::query(int what, int* outValue)
return NO_ERROR;
}
+void SurfaceTexture::abandon() {
+ Mutex::Autolock lock(mMutex);
+ freeAllBuffers();
+ mAbandoned = true;
+ mDequeueCondition.signal();
+}
+
void SurfaceTexture::dump(String8& result) const
{
char buffer[1024];
diff --git a/libs/gui/SurfaceTextureClient.cpp b/libs/gui/SurfaceTextureClient.cpp
index 986fc7e..df0ad5a 100644
--- a/libs/gui/SurfaceTextureClient.cpp
+++ b/libs/gui/SurfaceTextureClient.cpp
@@ -65,9 +65,6 @@ void SurfaceTextureClient::setISurfaceTexture(
const sp<ISurfaceTexture>& surfaceTexture)
{
mSurfaceTexture = surfaceTexture;
-
- // Get a reference to the allocator.
- mAllocator = mSurfaceTexture->getAllocator();
}
sp<ISurfaceTexture> SurfaceTextureClient::getISurfaceTexture() const {
@@ -151,10 +148,11 @@ int SurfaceTextureClient::dequeueBuffer(android_native_buffer_t** buffer) {
}
if ((result & ISurfaceTexture::BUFFER_NEEDS_REALLOCATION) || gbuf == 0) {
- gbuf = mSurfaceTexture->requestBuffer(buf);
- if (gbuf == 0) {
- LOGE("dequeueBuffer: ISurfaceTexture::requestBuffer failed");
- return NO_MEMORY;
+ result = mSurfaceTexture->requestBuffer(buf, &gbuf);
+ if (result != NO_ERROR) {
+ LOGE("dequeueBuffer: ISurfaceTexture::requestBuffer failed: %d",
+ result);
+ return result;
}
mQueryWidth = gbuf->width;
mQueryHeight = gbuf->height;
diff --git a/libs/gui/tests/SurfaceTexture_test.cpp b/libs/gui/tests/SurfaceTexture_test.cpp
index 9abe89d..0fac6cd 100644
--- a/libs/gui/tests/SurfaceTexture_test.cpp
+++ b/libs/gui/tests/SurfaceTexture_test.cpp
@@ -1018,6 +1018,83 @@ TEST_F(SurfaceTextureGLTest, DISABLED_TexturingFromGLFilledRGBABufferPow2) {
EXPECT_TRUE(checkPixel( 3, 52, 153, 153, 153, 153));
}
+TEST_F(SurfaceTextureGLTest, AbandonUnblocksDequeueBuffer) {
+ class ProducerThread : public Thread {
+ public:
+ ProducerThread(const sp<ANativeWindow>& anw):
+ mANW(anw),
+ mDequeueError(NO_ERROR) {
+ }
+
+ virtual ~ProducerThread() {
+ }
+
+ virtual bool threadLoop() {
+ Mutex::Autolock lock(mMutex);
+ ANativeWindowBuffer* anb;
+
+ // Frame 1
+ if (mANW->dequeueBuffer(mANW.get(), &anb) != NO_ERROR) {
+ return false;
+ }
+ if (anb == NULL) {
+ return false;
+ }
+ if (mANW->queueBuffer(mANW.get(), anb)
+ != NO_ERROR) {
+ return false;
+ }
+
+ // Frame 2
+ if (mANW->dequeueBuffer(mANW.get(), &anb) != NO_ERROR) {
+ return false;
+ }
+ if (anb == NULL) {
+ return false;
+ }
+ if (mANW->queueBuffer(mANW.get(), anb)
+ != NO_ERROR) {
+ return false;
+ }
+
+ // Frame 3 - error expected
+ mDequeueError = mANW->dequeueBuffer(mANW.get(), &anb);
+ return false;
+ }
+
+ status_t getDequeueError() {
+ Mutex::Autolock lock(mMutex);
+ return mDequeueError;
+ }
+
+ private:
+ sp<ANativeWindow> mANW;
+ status_t mDequeueError;
+ Mutex mMutex;
+ };
+
+ sp<FrameWaiter> fw(new FrameWaiter);
+ mST->setFrameAvailableListener(fw);
+ ASSERT_EQ(OK, mST->setSynchronousMode(true));
+ ASSERT_EQ(OK, mST->setBufferCountServer(2));
+
+ sp<Thread> pt(new ProducerThread(mANW));
+ pt->run();
+
+ fw->waitForFrame();
+ fw->waitForFrame();
+
+ // Sleep for 100ms to allow the producer thread's dequeueBuffer call to
+ // block waiting for a buffer to become available.
+ usleep(100000);
+
+ mST->abandon();
+
+ pt->requestExitAndWait();
+ ASSERT_EQ(NO_INIT,
+ reinterpret_cast<ProducerThread*>(pt.get())->getDequeueError());
+}
+
/*
* This test is for testing GL -> GL texture streaming via SurfaceTexture. It
* contains functionality to create a producer thread that will perform GL
@@ -1205,7 +1282,7 @@ protected:
sp<FrameCondition> mFC;
};
-TEST_F(SurfaceTextureGLToGLTest, UpdateTexImageBeforeFrameFinishedWorks) {
+TEST_F(SurfaceTextureGLToGLTest, UpdateTexImageBeforeFrameFinishedCompletes) {
class PT : public ProducerThread {
virtual void render() {
glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
@@ -1223,7 +1300,7 @@ TEST_F(SurfaceTextureGLToGLTest, UpdateTexImageBeforeFrameFinishedWorks) {
// TODO: Add frame verification once RGB TEX_EXTERNAL_OES is supported!
}
-TEST_F(SurfaceTextureGLToGLTest, UpdateTexImageAfterFrameFinishedWorks) {
+TEST_F(SurfaceTextureGLToGLTest, UpdateTexImageAfterFrameFinishedCompletes) {
class PT : public ProducerThread {
virtual void render() {
glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
@@ -1241,7 +1318,7 @@ TEST_F(SurfaceTextureGLToGLTest, UpdateTexImageAfterFrameFinishedWorks) {
// TODO: Add frame verification once RGB TEX_EXTERNAL_OES is supported!
}
-TEST_F(SurfaceTextureGLToGLTest, RepeatedUpdateTexImageBeforeFrameFinishedWorks) {
+TEST_F(SurfaceTextureGLToGLTest, RepeatedUpdateTexImageBeforeFrameFinishedCompletes) {
enum { NUM_ITERATIONS = 1024 };
class PT : public ProducerThread {
@@ -1269,7 +1346,7 @@ TEST_F(SurfaceTextureGLToGLTest, RepeatedUpdateTexImageBeforeFrameFinishedWorks)
}
}
-TEST_F(SurfaceTextureGLToGLTest, RepeatedUpdateTexImageAfterFrameFinishedWorks) {
+TEST_F(SurfaceTextureGLToGLTest, RepeatedUpdateTexImageAfterFrameFinishedCompletes) {
enum { NUM_ITERATIONS = 1024 };
class PT : public ProducerThread {
@@ -1297,4 +1374,70 @@ TEST_F(SurfaceTextureGLToGLTest, RepeatedUpdateTexImageAfterFrameFinishedWorks)
}
}
+// XXX: This test is disabled because it is currently hanging on some devices.
+TEST_F(SurfaceTextureGLToGLTest, DISABLED_RepeatedSwapBuffersWhileDequeueStalledCompletes) {
+ enum { NUM_ITERATIONS = 64 };
+
+ class PT : public ProducerThread {
+ virtual void render() {
+ for (int i = 0; i < NUM_ITERATIONS; i++) {
+ glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
+ glClear(GL_COLOR_BUFFER_BIT);
+ LOGV("+swapBuffers");
+ swapBuffers();
+ LOGV("-swapBuffers");
+ }
+ }
+ };
+
+ ASSERT_EQ(OK, mST->setSynchronousMode(true));
+ ASSERT_EQ(OK, mST->setBufferCountServer(2));
+
+ runProducerThread(new PT());
+
+ // Allow three frames to be rendered and queued before starting the
+ // rendering in this thread. For the latter two frames we don't call
+ // updateTexImage so the next dequeue from the producer thread will block
+ // waiting for a frame to become available.
+ mFC->waitForFrame();
+ mFC->finishFrame();
+
+ // We must call updateTexImage to consume the first frame so that the
+ // SurfaceTexture is able to reduce the buffer count to 2. This is because
+ // the GL driver may dequeue a buffer when the EGLSurface is created, and
+ // that happens before we call setBufferCountServer. It's possible that the
+ // driver does not dequeue a buffer at EGLSurface creation time, so we
+ // cannot rely on this to cause the second dequeueBuffer call to block.
+ mST->updateTexImage();
+
+ mFC->waitForFrame();
+ mFC->finishFrame();
+ mFC->waitForFrame();
+ mFC->finishFrame();
+
+ // Sleep for 100ms to allow the producer thread's dequeueBuffer call to
+ // block waiting for a buffer to become available.
+ usleep(100000);
+
+ // Render and present a number of images. This thread should not be blocked
+ // by the fact that the producer thread is blocking in dequeue.
+ for (int i = 0; i < NUM_ITERATIONS; i++) {
+ glClear(GL_COLOR_BUFFER_BIT);
+ eglSwapBuffers(mEglDisplay, mEglSurface);
+ }
+
+ // Consume the two pending buffers to unblock the producer thread.
+ mST->updateTexImage();
+ mST->updateTexImage();
+
+ // Consume the remaining buffers from the producer thread.
+ for (int i = 0; i < NUM_ITERATIONS-3; i++) {
+ mFC->waitForFrame();
+ mFC->finishFrame();
+ LOGV("+updateTexImage");
+ mST->updateTexImage();
+ LOGV("-updateTexImage");
+ }
+}
+
} // namespace android
diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java
index e3cbd57..72069ac 100644
--- a/media/java/android/media/MediaRecorder.java
+++ b/media/java/android/media/MediaRecorder.java
@@ -81,9 +81,6 @@ public class MediaRecorder
private String mPath;
private FileDescriptor mFd;
- private boolean mPrepareAuxiliaryFile = false;
- private String mPathAux;
- private FileDescriptor mFdAux;
private EventHandler mEventHandler;
private OnErrorListener mOnErrorListener;
private OnInfoListener mOnInfoListener;
@@ -557,84 +554,23 @@ public class MediaRecorder
}
/**
- * Sets the auxiliary time lapse video's resolution and bitrate.
- *
- * The auxiliary video's resolution and bitrate are determined by the CamcorderProfile
- * quality level {@link android.media.CamcorderProfile#QUALITY_HIGH}.
- */
- private void setAuxVideoParameters() {
- CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
- setParameter(String.format("video-aux-param-width=%d", profile.videoFrameWidth));
- setParameter(String.format("video-aux-param-height=%d", profile.videoFrameHeight));
- setParameter(String.format("video-aux-param-encoding-bitrate=%d", profile.videoBitRate));
- }
-
- /**
- * Pass in the file descriptor for the auxiliary time lapse video. Call this before
- * prepare().
- *
- * Sets file descriptor and parameters for auxiliary time lapse video. Time lapse mode
- * can capture video (using the still camera) at resolutions higher than that can be
- * played back on the device. This function or
- * {@link #setAuxiliaryOutputFile(String)} enable capture of a smaller video in
- * parallel with the main time lapse video, which can be used to play back on the
- * device. The smaller video is created by downsampling the main video. This call is
- * optional and does not have to be called if parallel capture of a downsampled video
- * is not desired.
- *
- * Note that while the main video resolution and bitrate is determined from the
- * CamcorderProfile in {@link #setProfile(CamcorderProfile)}, the auxiliary video's
- * resolution and bitrate are determined by the CamcorderProfile quality level
- * {@link android.media.CamcorderProfile#QUALITY_HIGH}. All other encoding parameters
- * remain the same for the main video and the auxiliary video.
- *
- * E.g. if the device supports the time lapse profile quality level
- * {@link android.media.CamcorderProfile#QUALITY_TIME_LAPSE_1080P} but can playback at
- * most 480p, the application might want to capture an auxiliary video of resolution
- * 480p using this call.
- *
- * @param fd an open file descriptor to be written into.
+ * Currently not implemented. It does nothing.
+ * @deprecated Time lapse mode video recording using camera still image capture
+ * is not desirable, and will not be supported.
*/
public void setAuxiliaryOutputFile(FileDescriptor fd)
{
- mPrepareAuxiliaryFile = true;
- mPathAux = null;
- mFdAux = fd;
- setAuxVideoParameters();
+ Log.w(TAG, "setAuxiliaryOutputFile(FileDescriptor) is no longer supported.");
}
/**
- * Pass in the file path for the auxiliary time lapse video. Call this before
- * prepare().
- *
- * Sets file path and parameters for auxiliary time lapse video. Time lapse mode can
- * capture video (using the still camera) at resolutions higher than that can be
- * played back on the device. This function or
- * {@link #setAuxiliaryOutputFile(FileDescriptor)} enable capture of a smaller
- * video in parallel with the main time lapse video, which can be used to play back on
- * the device. The smaller video is created by downsampling the main video. This call
- * is optional and does not have to be called if parallel capture of a downsampled
- * video is not desired.
- *
- * Note that while the main video resolution and bitrate is determined from the
- * CamcorderProfile in {@link #setProfile(CamcorderProfile)}, the auxiliary video's
- * resolution and bitrate are determined by the CamcorderProfile quality level
- * {@link android.media.CamcorderProfile#QUALITY_HIGH}. All other encoding parameters
- * remain the same for the main video and the auxiliary video.
- *
- * E.g. if the device supports the time lapse profile quality level
- * {@link android.media.CamcorderProfile#QUALITY_TIME_LAPSE_1080P} but can playback at
- * most 480p, the application might want to capture an auxiliary video of resolution
- * 480p using this call.
- *
- * @param path The pathname to use.
+ * Currently not implemented. It does nothing.
+ * @deprecated Time lapse mode video recording using camera still image capture
+ * is not desirable, and will not be supported.
*/
public void setAuxiliaryOutputFile(String path)
{
- mPrepareAuxiliaryFile = true;
- mFdAux = null;
- mPathAux = path;
- setAuxVideoParameters();
+ Log.w(TAG, "setAuxiliaryOutputFile(String) is no longer supported.");
}
/**
@@ -668,8 +604,6 @@ public class MediaRecorder
// native implementation
private native void _setOutputFile(FileDescriptor fd, long offset, long length)
throws IllegalStateException, IOException;
- private native void _setOutputFileAux(FileDescriptor fd)
- throws IllegalStateException, IOException;
private native void _prepare() throws IllegalStateException, IOException;
/**
@@ -696,21 +630,6 @@ public class MediaRecorder
throw new IOException("No valid output file");
}
- if (mPrepareAuxiliaryFile) {
- if (mPathAux != null) {
- FileOutputStream fos = new FileOutputStream(mPathAux);
- try {
- _setOutputFileAux(fos.getFD());
- } finally {
- fos.close();
- }
- } else if (mFdAux != null) {
- _setOutputFileAux(mFdAux);
- } else {
- throw new IOException("No valid output file");
- }
- }
-
_prepare();
}
diff --git a/media/jni/android_media_MediaRecorder.cpp b/media/jni/android_media_MediaRecorder.cpp
index 12391c8..922f7ed 100644
--- a/media/jni/android_media_MediaRecorder.cpp
+++ b/media/jni/android_media_MediaRecorder.cpp
@@ -127,7 +127,7 @@ static bool process_media_recorder_call(JNIEnv *env, status_t opStatus, const ch
return false;
}
-static sp<MediaRecorder> getMediaRecorder(JNIEnv* env, jobject thiz)
+sp<MediaRecorder> getMediaRecorder(JNIEnv* env, jobject thiz)
{
Mutex::Autolock l(sLock);
MediaRecorder* const p = (MediaRecorder*)env->GetIntField(thiz, fields.context);
@@ -261,20 +261,6 @@ android_media_MediaRecorder_setOutputFileFD(JNIEnv *env, jobject thiz, jobject f
}
static void
-android_media_MediaRecorder_setOutputFileAuxFD(JNIEnv *env, jobject thiz, jobject fileDescriptor)
-{
- LOGV("setOutputFile");
- if (fileDescriptor == NULL) {
- jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
- return;
- }
- int fd = jniGetFDFromFileDescriptor(env, fileDescriptor);
- sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
- status_t opStatus = mr->setOutputFileAuxiliary(fd);
- process_media_recorder_call(env, opStatus, "java/io/IOException", "setOutputFile failed.");
-}
-
-static void
android_media_MediaRecorder_setVideoSize(JNIEnv *env, jobject thiz, jint width, jint height)
{
LOGV("setVideoSize(%d, %d)", width, height);
@@ -475,7 +461,6 @@ static JNINativeMethod gMethods[] = {
{"setAudioEncoder", "(I)V", (void *)android_media_MediaRecorder_setAudioEncoder},
{"setParameter", "(Ljava/lang/String;)V", (void *)android_media_MediaRecorder_setParameter},
{"_setOutputFile", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaRecorder_setOutputFileFD},
- {"_setOutputFileAux", "(Ljava/io/FileDescriptor;)V", (void *)android_media_MediaRecorder_setOutputFileAuxFD},
{"setVideoSize", "(II)V", (void *)android_media_MediaRecorder_setVideoSize},
{"setVideoFrameRate", "(I)V", (void *)android_media_MediaRecorder_setVideoFrameRate},
{"setMaxDuration", "(I)V", (void *)android_media_MediaRecorder_setMaxDuration},
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index a44ef5a..7e44c29 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -23,14 +23,17 @@
#include <camera/ICamera.h>
#include <media/IMediaRecorderClient.h>
#include <media/IMediaRecorder.h>
+#include <gui/ISurfaceTexture.h>
#include <unistd.h>
+
namespace android {
enum {
RELEASE = IBinder::FIRST_CALL_TRANSACTION,
INIT,
CLOSE,
+ QUERY_SURFACE_MEDIASOURCE,
RESET,
STOP,
START,
@@ -71,6 +74,19 @@ public:
return reply.readInt32();
}
+ sp<ISurfaceTexture> querySurfaceMediaSource()
+ {
+ LOGV("Query SurfaceMediaSource");
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+ remote()->transact(QUERY_SURFACE_MEDIASOURCE, data, &reply);
+ int returnedNull = reply.readInt32();
+ if (returnedNull) {
+ return NULL;
+ }
+ return interface_cast<ISurfaceTexture>(reply.readStrongBinder());
+ }
+
status_t setPreviewSurface(const sp<Surface>& surface)
{
LOGV("setPreviewSurface(%p)", surface.get());
@@ -440,6 +456,20 @@ status_t BnMediaRecorder::onTransact(
reply->writeInt32(setCamera(camera, proxy));
return NO_ERROR;
} break;
+ case QUERY_SURFACE_MEDIASOURCE: {
+ LOGV("QUERY_SURFACE_MEDIASOURCE");
+ CHECK_INTERFACE(IMediaRecorder, data, reply);
+ // call the mediaserver side to create
+ // a surfacemediasource
+ sp<ISurfaceTexture> surfaceMediaSource = querySurfaceMediaSource();
+ // The mediaserver might have failed to create a source
+ int returnedNull= (surfaceMediaSource == NULL) ? 1 : 0 ;
+ reply->writeInt32(returnedNull);
+ if (!returnedNull) {
+ reply->writeStrongBinder(surfaceMediaSource->asBinder());
+ }
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index ed6e3c7..a11fb80 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -228,6 +228,7 @@ status_t MediaPlayer::setVideoSurface(const sp<Surface>& surface)
NATIVE_WINDOW_API_MEDIA);
if (err != OK) {
+ LOGE("setVideoSurface failed: %d", err);
// Note that we must do the reset before disconnecting from the ANW.
// Otherwise queue/dequeue calls could be made on the disconnected
// ANW, which may result in errors.
@@ -277,6 +278,7 @@ status_t MediaPlayer::setVideoSurfaceTexture(
NATIVE_WINDOW_API_MEDIA);
if (err != OK) {
+ LOGE("setVideoSurfaceTexture failed: %d", err);
// Note that we must do the reset before disconnecting from the ANW.
// Otherwise queue/dequeue calls could be made on the disconnected
// ANW, which may result in errors.
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 9e4edd0..fab674c 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -25,6 +25,7 @@
#include <media/IMediaPlayerService.h>
#include <media/IMediaRecorder.h>
#include <media/mediaplayer.h> // for MEDIA_ERROR_SERVER_DIED
+#include <gui/ISurfaceTexture.h>
namespace android {
@@ -127,7 +128,9 @@ status_t MediaRecorder::setVideoSource(int vs)
return INVALID_OPERATION;
}
+ // following call is made over the Binder Interface
status_t ret = mMediaRecorder->setVideoSource(vs);
+
if (OK != ret) {
LOGV("setVideoSource failed: %d", ret);
mCurrentState = MEDIA_RECORDER_ERROR;
@@ -357,7 +360,7 @@ status_t MediaRecorder::setVideoSize(int width, int height)
return INVALID_OPERATION;
}
if (!mIsVideoSourceSet) {
- LOGE("try to set video size without setting video source first");
+ LOGE("Cannot set video size without setting video source first");
return INVALID_OPERATION;
}
@@ -367,9 +370,27 @@ status_t MediaRecorder::setVideoSize(int width, int height)
mCurrentState = MEDIA_RECORDER_ERROR;
return ret;
}
+
return ret;
}
+// Query a SurfaceMediaSurface through the Mediaserver, over the
+// binder interface. This is used by the Filter Framework (MeidaEncoder)
+// to get an <ISurfaceTexture> object to hook up to ANativeWindow.
+sp<ISurfaceTexture> MediaRecorder::
+ querySurfaceMediaSourceFromMediaServer()
+{
+ Mutex::Autolock _l(mLock);
+ mSurfaceMediaSource =
+ mMediaRecorder->querySurfaceMediaSource();
+ if (mSurfaceMediaSource == NULL) {
+ LOGE("SurfaceMediaSource could not be initialized!");
+ }
+ return mSurfaceMediaSource;
+}
+
+
+
status_t MediaRecorder::setVideoFrameRate(int frames_per_second)
{
LOGV("setVideoFrameRate(%d)", frames_per_second);
@@ -382,7 +403,7 @@ status_t MediaRecorder::setVideoFrameRate(int frames_per_second)
return INVALID_OPERATION;
}
if (!mIsVideoSourceSet) {
- LOGE("try to set video frame rate without setting video source first");
+ LOGE("Cannot set video frame rate without setting video source first");
return INVALID_OPERATION;
}
@@ -621,7 +642,7 @@ status_t MediaRecorder::release()
return INVALID_OPERATION;
}
-MediaRecorder::MediaRecorder()
+MediaRecorder::MediaRecorder() : mSurfaceMediaSource(NULL)
{
LOGV("constructor");
@@ -632,6 +653,8 @@ MediaRecorder::MediaRecorder()
if (mMediaRecorder != NULL) {
mCurrentState = MEDIA_RECORDER_IDLE;
}
+
+
doCleanUp();
}
@@ -646,6 +669,10 @@ MediaRecorder::~MediaRecorder()
if (mMediaRecorder != NULL) {
mMediaRecorder.clear();
}
+
+ if (mSurfaceMediaSource != NULL) {
+ mSurfaceMediaSource.clear();
+ }
}
status_t MediaRecorder::setListener(const sp<MediaRecorderListener>& listener)
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 115db1a..905b885 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -41,6 +41,7 @@
#include "MediaPlayerService.h"
#include "StagefrightRecorder.h"
+#include <gui/ISurfaceTexture.h>
namespace android {
@@ -57,6 +58,20 @@ static bool checkPermission(const char* permissionString) {
return ok;
}
+
+sp<ISurfaceTexture> MediaRecorderClient::querySurfaceMediaSource()
+{
+ LOGV("Query SurfaceMediaSource");
+ Mutex::Autolock lock(mLock);
+ if (mRecorder == NULL) {
+ LOGE("recorder is not initialized");
+ return NULL;
+ }
+ return mRecorder->querySurfaceMediaSource();
+}
+
+
+
status_t MediaRecorderClient::setCamera(const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy)
{
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index bbca529..c87a3c0 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -25,45 +25,51 @@ namespace android {
class MediaRecorderBase;
class MediaPlayerService;
class ICameraRecordingProxy;
+class ISurfaceTexture;
class MediaRecorderClient : public BnMediaRecorder
{
public:
- virtual status_t setCamera(const sp<ICamera>& camera,
- const sp<ICameraRecordingProxy>& proxy);
- virtual status_t setPreviewSurface(const sp<Surface>& surface);
- virtual status_t setVideoSource(int vs);
- virtual status_t setAudioSource(int as);
- virtual status_t setOutputFormat(int of);
- virtual status_t setVideoEncoder(int ve);
- virtual status_t setAudioEncoder(int ae);
- virtual status_t setOutputFile(const char* path);
- virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
- virtual status_t setOutputFileAuxiliary(int fd);
- virtual status_t setVideoSize(int width, int height);
- virtual status_t setVideoFrameRate(int frames_per_second);
- virtual status_t setParameters(const String8& params);
- virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
- virtual status_t prepare();
- virtual status_t getMaxAmplitude(int* max);
- virtual status_t start();
- virtual status_t stop();
- virtual status_t reset();
- virtual status_t init();
- virtual status_t close();
- virtual status_t release();
+ virtual status_t setCamera(const sp<ICamera>& camera,
+ const sp<ICameraRecordingProxy>& proxy);
+ virtual status_t setPreviewSurface(const sp<Surface>& surface);
+ virtual status_t setVideoSource(int vs);
+ virtual status_t setAudioSource(int as);
+ virtual status_t setOutputFormat(int of);
+ virtual status_t setVideoEncoder(int ve);
+ virtual status_t setAudioEncoder(int ae);
+ virtual status_t setOutputFile(const char* path);
+ virtual status_t setOutputFile(int fd, int64_t offset,
+ int64_t length);
+ virtual status_t setOutputFileAuxiliary(int fd);
+ virtual status_t setVideoSize(int width, int height);
+ virtual status_t setVideoFrameRate(int frames_per_second);
+ virtual status_t setParameters(const String8& params);
+ virtual status_t setListener(
+ const sp<IMediaRecorderClient>& listener);
+ virtual status_t prepare();
+ virtual status_t getMaxAmplitude(int* max);
+ virtual status_t start();
+ virtual status_t stop();
+ virtual status_t reset();
+ virtual status_t init();
+ virtual status_t close();
+ virtual status_t release();
+ virtual status_t dump(int fd, const Vector<String16>& args) const;
+ virtual sp<ISurfaceTexture> querySurfaceMediaSource();
- virtual status_t dump(int fd, const Vector<String16>& args) const;
private:
- friend class MediaPlayerService; // for accessing private constructor
+ friend class MediaPlayerService; // for accessing private constructor
- MediaRecorderClient(const sp<MediaPlayerService>& service, pid_t pid);
- virtual ~MediaRecorderClient();
+ MediaRecorderClient(
+ const sp<MediaPlayerService>& service,
+ pid_t pid);
+ virtual ~MediaRecorderClient();
- pid_t mPid;
- Mutex mLock;
- MediaRecorderBase *mRecorder;
- sp<MediaPlayerService> mMediaPlayerService;
+ pid_t mPid;
+ Mutex mLock;
+ MediaRecorderBase *mRecorder;
+ sp<MediaPlayerService> mMediaPlayerService;
};
}; // namespace android
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 223e0be..6427bb7 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -38,10 +38,12 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
+#include <media/stagefright/SurfaceMediaSource.h>
#include <media/MediaProfiles.h>
#include <camera/ICamera.h>
#include <camera/CameraParameters.h>
#include <surfaceflinger/Surface.h>
+
#include <utils/Errors.h>
#include <sys/types.h>
#include <ctype.h>
@@ -69,7 +71,7 @@ StagefrightRecorder::StagefrightRecorder()
mOutputFd(-1), mOutputFdAux(-1),
mAudioSource(AUDIO_SOURCE_CNT),
mVideoSource(VIDEO_SOURCE_LIST_END),
- mStarted(false) {
+ mStarted(false), mSurfaceMediaSource(NULL) {
LOGV("Constructor");
reset();
@@ -85,6 +87,14 @@ status_t StagefrightRecorder::init() {
return OK;
}
+// The client side of mediaserver asks it to creat a SurfaceMediaSource
+// and return a interface reference. The client side will use that
+// while encoding GL Frames
+sp<ISurfaceTexture> StagefrightRecorder::querySurfaceMediaSource() const {
+ LOGV("Get SurfaceMediaSource");
+ return mSurfaceMediaSource;
+}
+
status_t StagefrightRecorder::setAudioSource(audio_source_t as) {
LOGV("setAudioSource: %d", as);
if (as < AUDIO_SOURCE_DEFAULT ||
@@ -1006,13 +1016,13 @@ status_t StagefrightRecorder::startRTPRecording() {
source = createAudioSource();
} else {
- sp<CameraSource> cameraSource;
- status_t err = setupCameraSource(&cameraSource);
+ sp<MediaSource> mediaSource;
+ status_t err = setupMediaSource(&mediaSource);
if (err != OK) {
return err;
}
- err = setupVideoEncoder(cameraSource, mVideoBitRate, &source);
+ err = setupVideoEncoder(mediaSource, mVideoBitRate, &source);
if (err != OK) {
return err;
}
@@ -1042,20 +1052,19 @@ status_t StagefrightRecorder::startMPEG2TSRecording() {
}
}
- if (mVideoSource == VIDEO_SOURCE_DEFAULT
- || mVideoSource == VIDEO_SOURCE_CAMERA) {
+ if (mVideoSource < VIDEO_SOURCE_LIST_END) {
if (mVideoEncoder != VIDEO_ENCODER_H264) {
return ERROR_UNSUPPORTED;
}
- sp<CameraSource> cameraSource;
- status_t err = setupCameraSource(&cameraSource);
+ sp<MediaSource> mediaSource;
+ status_t err = setupMediaSource(&mediaSource);
if (err != OK) {
return err;
}
sp<MediaSource> encoder;
- err = setupVideoEncoder(cameraSource, mVideoBitRate, &encoder);
+ err = setupVideoEncoder(mediaSource, mVideoBitRate, &encoder);
if (err != OK) {
return err;
@@ -1289,6 +1298,60 @@ void StagefrightRecorder::clipVideoFrameHeight() {
}
}
+// Set up the appropriate MediaSource depending on the chosen option
+status_t StagefrightRecorder::setupMediaSource(
+ sp<MediaSource> *mediaSource) {
+ if (mVideoSource == VIDEO_SOURCE_DEFAULT
+ || mVideoSource == VIDEO_SOURCE_CAMERA) {
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+ *mediaSource = cameraSource;
+ } else if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
+ // If using GRAlloc buffers, setup surfacemediasource.
+ // Later a handle to that will be passed
+ // to the client side when queried
+ status_t err = setupSurfaceMediaSource();
+ if (err != OK) {
+ return err;
+ }
+ *mediaSource = mSurfaceMediaSource;
+ } else {
+ return INVALID_OPERATION;
+ }
+ return OK;
+}
+
+// setupSurfaceMediaSource creates a source with the given
+// width and height and framerate.
+// TODO: This could go in a static function inside SurfaceMediaSource
+// similar to that in CameraSource
+status_t StagefrightRecorder::setupSurfaceMediaSource() {
+ status_t err = OK;
+ mSurfaceMediaSource = new SurfaceMediaSource(mVideoWidth, mVideoHeight);
+ if (mSurfaceMediaSource == NULL) {
+ return NO_INIT;
+ }
+
+ if (mFrameRate == -1) {
+ int32_t frameRate = 0;
+ CHECK (mSurfaceMediaSource->getFormat()->findInt32(
+ kKeyFrameRate, &frameRate));
+ LOGI("Frame rate is not explicitly set. Use the current frame "
+ "rate (%d fps)", frameRate);
+ mFrameRate = frameRate;
+ } else {
+ err = mSurfaceMediaSource->setFrameRate(mFrameRate);
+ }
+ CHECK(mFrameRate != -1);
+
+ mIsMetaDataStoredInVideoBuffers =
+ mSurfaceMediaSource->isMetaDataStoredInVideoBuffers();
+ return err;
+}
+
status_t StagefrightRecorder::setupCameraSource(
sp<CameraSource> *cameraSource) {
status_t err = OK;
@@ -1465,29 +1528,37 @@ status_t StagefrightRecorder::setupMPEG4Recording(
status_t err = OK;
sp<MediaWriter> writer = new MPEG4Writer(outputFd);
- if (mVideoSource == VIDEO_SOURCE_DEFAULT
- || mVideoSource == VIDEO_SOURCE_CAMERA) {
+ if (mVideoSource < VIDEO_SOURCE_LIST_END) {
- sp<MediaSource> cameraMediaSource;
+ sp<MediaSource> mediaSource;
if (useSplitCameraSource) {
+ // TODO: Check if there is a better way to handle this
+ if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
+ LOGE("Cannot use split camera when encoding frames");
+ return INVALID_OPERATION;
+ }
LOGV("Using Split camera source");
- cameraMediaSource = mCameraSourceSplitter->createClient();
+ mediaSource = mCameraSourceSplitter->createClient();
} else {
- sp<CameraSource> cameraSource;
- err = setupCameraSource(&cameraSource);
- cameraMediaSource = cameraSource;
+ err = setupMediaSource(&mediaSource);
}
+
if ((videoWidth != mVideoWidth) || (videoHeight != mVideoHeight)) {
+ // TODO: Might be able to handle downsampling even if using GRAlloc
+ if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
+ LOGE("Cannot change size or Downsample when encoding frames");
+ return INVALID_OPERATION;
+ }
// Use downsampling from the original source.
- cameraMediaSource =
- new VideoSourceDownSampler(cameraMediaSource, videoWidth, videoHeight);
+ mediaSource =
+ new VideoSourceDownSampler(mediaSource, videoWidth, videoHeight);
}
if (err != OK) {
return err;
}
sp<MediaSource> encoder;
- err = setupVideoEncoder(cameraMediaSource, videoBitRate, &encoder);
+ err = setupVideoEncoder(mediaSource, videoBitRate, &encoder);
if (err != OK) {
return err;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 034b373..1618b92 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -36,6 +36,8 @@ struct MediaWriter;
class MetaData;
struct AudioSource;
class MediaProfiles;
+class ISurfaceTexture;
+class SurfaceMediaSource;
struct StagefrightRecorder : public MediaRecorderBase {
StagefrightRecorder();
@@ -64,6 +66,8 @@ struct StagefrightRecorder : public MediaRecorderBase {
virtual status_t reset();
virtual status_t getMaxAmplitude(int *max);
virtual status_t dump(int fd, const Vector<String16>& args) const;
+ // Querying a SurfaceMediaSourcer
+ virtual sp<ISurfaceTexture> querySurfaceMediaSource() const;
private:
sp<ICamera> mCamera;
@@ -109,12 +113,18 @@ private:
sp<MediaSourceSplitter> mCameraSourceSplitter;
sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
+
String8 mParams;
bool mIsMetaDataStoredInVideoBuffers;
MediaProfiles *mEncoderProfiles;
bool mStarted;
+ // Needed when GLFrames are encoded.
+ // An <ISurfaceTexture> pointer
+ // will be sent to the client side using which the
+ // frame buffers will be queued and dequeued
+ sp<SurfaceMediaSource> mSurfaceMediaSource;
status_t setupMPEG4Recording(
bool useSplitCameraSource,
@@ -134,7 +144,14 @@ private:
sp<MediaSource> createAudioSource();
status_t checkVideoEncoderCapabilities();
status_t checkAudioEncoderCapabilities();
+ // Generic MediaSource set-up. Returns the appropriate
+ // source (CameraSource or SurfaceMediaSource)
+ // depending on the videosource type
+ status_t setupMediaSource(sp<MediaSource> *mediaSource);
status_t setupCameraSource(sp<CameraSource> *cameraSource);
+ // setup the surfacemediasource for the encoder
+ status_t setupSurfaceMediaSource();
+
status_t setupAudioEncoder(const sp<MediaWriter>& writer);
status_t setupVideoEncoder(
sp<MediaSource> cameraSource,
@@ -176,6 +193,7 @@ private:
void clipNumberOfAudioChannels();
void setDefaultProfileIfNecessary();
+
StagefrightRecorder(const StagefrightRecorder &);
StagefrightRecorder &operator=(const StagefrightRecorder &);
};
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index e17e1e8..3a3c082 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -42,6 +42,7 @@ LOCAL_SRC_FILES:= \
SampleTable.cpp \
StagefrightMediaScanner.cpp \
StagefrightMetadataRetriever.cpp \
+ SurfaceMediaSource.cpp \
ThrottledSource.cpp \
TimeSource.cpp \
TimedEventQueue.cpp \
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index 77a6602..4edb613 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -185,7 +185,8 @@ NuCachedSource2::NuCachedSource2(const sp<DataSource> &source)
mFinalStatus(OK),
mLastAccessPos(0),
mFetching(true),
- mLastFetchTimeUs(-1) {
+ mLastFetchTimeUs(-1),
+ mNumRetriesLeft(kMaxNumRetries) {
mLooper->setName("NuCachedSource2");
mLooper->registerHandler(mReflector);
mLooper->start();
@@ -254,7 +255,27 @@ void NuCachedSource2::onMessageReceived(const sp<AMessage> &msg) {
void NuCachedSource2::fetchInternal() {
LOGV("fetchInternal");
- CHECK_EQ(mFinalStatus, (status_t)OK);
+ {
+ Mutex::Autolock autoLock(mLock);
+ CHECK(mFinalStatus == OK || mNumRetriesLeft > 0);
+
+ if (mFinalStatus != OK) {
+ --mNumRetriesLeft;
+
+ status_t err =
+ mSource->reconnectAtOffset(mCacheOffset + mCache->totalSize());
+
+ if (err == ERROR_UNSUPPORTED) {
+ mNumRetriesLeft = 0;
+ return;
+ } else if (err != OK) {
+ LOGI("The attempt to reconnect failed, %d retries remaining",
+ mNumRetriesLeft);
+
+ return;
+ }
+ }
+ }
PageCache::Page *page = mCache->acquirePage();
@@ -264,14 +285,23 @@ void NuCachedSource2::fetchInternal() {
Mutex::Autolock autoLock(mLock);
if (n < 0) {
- LOGE("source returned error %ld", n);
+ LOGE("source returned error %ld, %d retries left", n, mNumRetriesLeft);
mFinalStatus = n;
mCache->releasePage(page);
} else if (n == 0) {
LOGI("ERROR_END_OF_STREAM");
+
+ mNumRetriesLeft = 0;
mFinalStatus = ERROR_END_OF_STREAM;
+
mCache->releasePage(page);
} else {
+ if (mFinalStatus != OK) {
+ LOGI("retrying a previously failed read succeeded.");
+ }
+ mNumRetriesLeft = kMaxNumRetries;
+ mFinalStatus = OK;
+
page->mSize = n;
mCache->appendPage(page);
}
@@ -280,7 +310,7 @@ void NuCachedSource2::fetchInternal() {
void NuCachedSource2::onFetch() {
LOGV("onFetch");
- if (mFinalStatus != OK) {
+ if (mFinalStatus != OK && mNumRetriesLeft == 0) {
LOGV("EOS reached, done prefetching for now");
mFetching = false;
}
@@ -308,8 +338,19 @@ void NuCachedSource2::onFetch() {
restartPrefetcherIfNecessary_l();
}
- (new AMessage(kWhatFetchMore, mReflector->id()))->post(
- mFetching ? 0 : 100000ll);
+ int64_t delayUs;
+ if (mFetching) {
+ if (mFinalStatus != OK && mNumRetriesLeft > 0) {
+ // We failed this time and will try again in 3 seconds.
+ delayUs = 3000000ll;
+ } else {
+ delayUs = 0;
+ }
+ } else {
+ delayUs = 100000ll;
+ }
+
+ (new AMessage(kWhatFetchMore, mReflector->id()))->post(delayUs);
}
void NuCachedSource2::onRead(const sp<AMessage> &msg) {
@@ -345,7 +386,7 @@ void NuCachedSource2::restartPrefetcherIfNecessary_l(
bool ignoreLowWaterThreshold, bool force) {
static const size_t kGrayArea = 1024 * 1024;
- if (mFetching || mFinalStatus != OK) {
+ if (mFetching || (mFinalStatus != OK && mNumRetriesLeft == 0)) {
return;
}
@@ -427,6 +468,12 @@ size_t NuCachedSource2::approxDataRemaining(status_t *finalStatus) {
size_t NuCachedSource2::approxDataRemaining_l(status_t *finalStatus) {
*finalStatus = mFinalStatus;
+
+ if (mFinalStatus != OK && mNumRetriesLeft > 0) {
+ // Pretend that everything is fine until we're out of retries.
+ *finalStatus = OK;
+ }
+
off64_t lastBytePosCached = mCacheOffset + mCache->totalSize();
if (mLastAccessPos < lastBytePosCached) {
return lastBytePosCached - mLastAccessPos;
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
new file mode 100644
index 0000000..ff4b08f
--- /dev/null
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -0,0 +1,756 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "SurfaceMediaSource"
+
+#include <media/stagefright/SurfaceMediaSource.h>
+#include <ui/GraphicBuffer.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/openmax/OMX_IVCommon.h>
+
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+#include <surfaceflinger/IGraphicBufferAlloc.h>
+#include <OMX_Component.h>
+
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+namespace android {
+
+SurfaceMediaSource::SurfaceMediaSource(uint32_t bufW, uint32_t bufH) :
+ mDefaultWidth(bufW),
+ mDefaultHeight(bufH),
+ mPixelFormat(0),
+ mBufferCount(MIN_ASYNC_BUFFER_SLOTS),
+ mClientBufferCount(0),
+ mServerBufferCount(MIN_ASYNC_BUFFER_SLOTS),
+ mCurrentSlot(INVALID_BUFFER_SLOT),
+ mCurrentTimestamp(0),
+ mSynchronousMode(true),
+ mConnectedApi(NO_CONNECTED_API),
+ mFrameRate(30),
+ mStarted(false) {
+ LOGV("SurfaceMediaSource::SurfaceMediaSource");
+ sp<ISurfaceComposer> composer(ComposerService::getComposerService());
+ mGraphicBufferAlloc = composer->createGraphicBufferAlloc();
+}
+
+SurfaceMediaSource::~SurfaceMediaSource() {
+ LOGV("SurfaceMediaSource::~SurfaceMediaSource");
+ if (mStarted) {
+ stop();
+ }
+ freeAllBuffers();
+}
+
+size_t SurfaceMediaSource::getQueuedCount() const {
+ Mutex::Autolock lock(mMutex);
+ return mQueue.size();
+}
+
+status_t SurfaceMediaSource::setBufferCountServerLocked(int bufferCount) {
+ if (bufferCount > NUM_BUFFER_SLOTS)
+ return BAD_VALUE;
+
+ // special-case, nothing to do
+ if (bufferCount == mBufferCount)
+ return OK;
+
+ if (!mClientBufferCount &&
+ bufferCount >= mBufferCount) {
+ // easy, we just have more buffers
+ mBufferCount = bufferCount;
+ mServerBufferCount = bufferCount;
+ mDequeueCondition.signal();
+ } else {
+ // we're here because we're either
+ // - reducing the number of available buffers
+ // - or there is a client-buffer-count in effect
+
+ // less than 2 buffers is never allowed
+ if (bufferCount < 2)
+ return BAD_VALUE;
+
+ // when there is non client-buffer-count in effect, the client is not
+ // allowed to dequeue more than one buffer at a time,
+ // so the next time they dequeue a buffer, we know that they don't
+ // own one. the actual resizing will happen during the next
+ // dequeueBuffer.
+
+ mServerBufferCount = bufferCount;
+ }
+ return OK;
+}
+
+// Called from the consumer side
+status_t SurfaceMediaSource::setBufferCountServer(int bufferCount) {
+ Mutex::Autolock lock(mMutex);
+ return setBufferCountServerLocked(bufferCount);
+}
+
+status_t SurfaceMediaSource::setBufferCount(int bufferCount) {
+ LOGV("SurfaceMediaSource::setBufferCount");
+ if (bufferCount > NUM_BUFFER_SLOTS) {
+ LOGE("setBufferCount: bufferCount is larger than the number of buffer slots");
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mMutex);
+ // Error out if the user has dequeued buffers
+ for (int i = 0 ; i < mBufferCount ; i++) {
+ if (mSlots[i].mBufferState == BufferSlot::DEQUEUED) {
+ LOGE("setBufferCount: client owns some buffers");
+ return INVALID_OPERATION;
+ }
+ }
+
+ if (bufferCount == 0) {
+ const int minBufferSlots = mSynchronousMode ?
+ MIN_SYNC_BUFFER_SLOTS : MIN_ASYNC_BUFFER_SLOTS;
+ mClientBufferCount = 0;
+ bufferCount = (mServerBufferCount >= minBufferSlots) ?
+ mServerBufferCount : minBufferSlots;
+ return setBufferCountServerLocked(bufferCount);
+ }
+
+ // We don't allow the client to set a buffer-count less than
+ // MIN_ASYNC_BUFFER_SLOTS (3), there is no reason for it.
+ if (bufferCount < MIN_ASYNC_BUFFER_SLOTS) {
+ return BAD_VALUE;
+ }
+
+ // here we're guaranteed that the client doesn't have dequeued buffers
+ // and will release all of its buffer references.
+ freeAllBuffers();
+ mBufferCount = bufferCount;
+ mClientBufferCount = bufferCount;
+ mCurrentSlot = INVALID_BUFFER_SLOT;
+ mQueue.clear();
+ mDequeueCondition.signal();
+ return OK;
+}
+
+status_t SurfaceMediaSource::requestBuffer(int slot, sp<GraphicBuffer>* buf) {
+ LOGV("SurfaceMediaSource::requestBuffer");
+ Mutex::Autolock lock(mMutex);
+ if (slot < 0 || mBufferCount <= slot) {
+ LOGE("requestBuffer: slot index out of range [0, %d]: %d",
+ mBufferCount, slot);
+ return BAD_VALUE;
+ }
+ mSlots[slot].mRequestBufferCalled = true;
+ *buf = mSlots[slot].mGraphicBuffer;
+ return NO_ERROR;
+}
+
+status_t SurfaceMediaSource::dequeueBuffer(int *outBuf, uint32_t w, uint32_t h,
+ uint32_t format, uint32_t usage) {
+ LOGV("dequeueBuffer");
+
+
+ // Check for the buffer size- the client should just use the
+ // default width and height, and not try to set those.
+ // This is needed since
+ // the getFormat() returns mDefaultWidth/ Height for the OMX. It is
+ // queried by OMX in the beginning and not every time a frame comes.
+ // Not sure if there is a way to update the
+ // frame size while recording. So as of now, the client side
+ // sets the default values via the constructor, and the encoder is
+ // setup to encode frames of that size
+ // The design might need to change in the future.
+ // TODO: Currently just uses mDefaultWidth/Height. In the future
+ // we might declare mHeight and mWidth and check against those here.
+ if ((w != 0) || (h != 0)) {
+ LOGE("dequeuebuffer: invalid buffer size! Req: %dx%d, Found: %dx%d",
+ mDefaultWidth, mDefaultHeight, w, h);
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mMutex);
+
+ status_t returnFlags(OK);
+
+ int found, foundSync;
+ int dequeuedCount = 0;
+ bool tryAgain = true;
+ while (tryAgain) {
+ // We need to wait for the FIFO to drain if the number of buffer
+ // needs to change.
+ //
+ // The condition "number of buffer needs to change" is true if
+ // - the client doesn't care about how many buffers there are
+ // - AND the actual number of buffer is different from what was
+ // set in the last setBufferCountServer()
+ // - OR -
+ // setBufferCountServer() was set to a value incompatible with
+ // the synchronization mode (for instance because the sync mode
+ // changed since)
+ //
+ // As long as this condition is true AND the FIFO is not empty, we
+ // wait on mDequeueCondition.
+
+ int minBufferCountNeeded = mSynchronousMode ?
+ MIN_SYNC_BUFFER_SLOTS : MIN_ASYNC_BUFFER_SLOTS;
+
+ if (!mClientBufferCount &&
+ ((mServerBufferCount != mBufferCount) ||
+ (mServerBufferCount < minBufferCountNeeded))) {
+ // wait for the FIFO to drain
+ while (!mQueue.isEmpty()) {
+ LOGV("Waiting for the FIFO to drain");
+ mDequeueCondition.wait(mMutex);
+ }
+ // need to check again since the mode could have changed
+ // while we were waiting
+ minBufferCountNeeded = mSynchronousMode ?
+ MIN_SYNC_BUFFER_SLOTS : MIN_ASYNC_BUFFER_SLOTS;
+ }
+
+ if (!mClientBufferCount &&
+ ((mServerBufferCount != mBufferCount) ||
+ (mServerBufferCount < minBufferCountNeeded))) {
+ // here we're guaranteed that mQueue is empty
+ freeAllBuffers();
+ mBufferCount = mServerBufferCount;
+ if (mBufferCount < minBufferCountNeeded)
+ mBufferCount = minBufferCountNeeded;
+ mCurrentSlot = INVALID_BUFFER_SLOT;
+ returnFlags |= ISurfaceTexture::RELEASE_ALL_BUFFERS;
+ }
+
+ // look for a free buffer to give to the client
+ found = INVALID_BUFFER_SLOT;
+ foundSync = INVALID_BUFFER_SLOT;
+ dequeuedCount = 0;
+ for (int i = 0; i < mBufferCount; i++) {
+ const int state = mSlots[i].mBufferState;
+ if (state == BufferSlot::DEQUEUED) {
+ dequeuedCount++;
+ continue; // won't be continuing if could
+ // dequeue a non 'FREE' current slot like
+ // that in SurfaceTexture
+ }
+ // In case of Encoding, we do not deque the mCurrentSlot buffer
+ // since we follow synchronous mode (unlike possibly in
+ // SurfaceTexture that could be using the asynch mode
+ // or has some mechanism in GL to be able to wait till the
+ // currentslot is done using the data)
+ // Here, we have to wait for the MPEG4Writer(or equiv)
+ // to tell us when it's done using the current buffer
+ if (state == BufferSlot::FREE) {
+ foundSync = i;
+ // Unlike that in SurfaceTexture,
+ // We don't need to worry if it is the
+ // currentslot or not as it is in state FREE
+ found = i;
+ break;
+ }
+ }
+
+ // clients are not allowed to dequeue more than one buffer
+ // if they didn't set a buffer count.
+ if (!mClientBufferCount && dequeuedCount) {
+ return -EINVAL;
+ }
+
+ // See whether a buffer has been queued since the last setBufferCount so
+ // we know whether to perform the MIN_UNDEQUEUED_BUFFERS check below.
+ bool bufferHasBeenQueued = mCurrentSlot != INVALID_BUFFER_SLOT;
+ if (bufferHasBeenQueued) {
+ // make sure the client is not trying to dequeue more buffers
+ // than allowed.
+ const int avail = mBufferCount - (dequeuedCount+1);
+ if (avail < (MIN_UNDEQUEUED_BUFFERS-int(mSynchronousMode))) {
+ LOGE("dequeueBuffer: MIN_UNDEQUEUED_BUFFERS=%d exceeded (dequeued=%d)",
+ MIN_UNDEQUEUED_BUFFERS-int(mSynchronousMode),
+ dequeuedCount);
+ return -EBUSY;
+ }
+ }
+
+ // we're in synchronous mode and didn't find a buffer, we need to wait
+ // for for some buffers to be consumed
+ tryAgain = mSynchronousMode && (foundSync == INVALID_BUFFER_SLOT);
+ if (tryAgain) {
+ LOGW("Waiting..In synchronous mode and no buffer to dQ");
+ mDequeueCondition.wait(mMutex);
+ }
+ }
+
+ if (mSynchronousMode && found == INVALID_BUFFER_SLOT) {
+ // foundSync guaranteed to be != INVALID_BUFFER_SLOT
+ found = foundSync;
+ }
+
+ if (found == INVALID_BUFFER_SLOT) {
+ return -EBUSY;
+ }
+
+ const int buf = found;
+ *outBuf = found;
+
+ const bool useDefaultSize = !w && !h;
+ if (useDefaultSize) {
+ // use the default size
+ w = mDefaultWidth;
+ h = mDefaultHeight;
+ }
+
+ const bool updateFormat = (format != 0);
+ if (!updateFormat) {
+ // keep the current (or default) format
+ format = mPixelFormat;
+ }
+
+ // buffer is now in DEQUEUED (but can also be current at the same time,
+ // if we're in synchronous mode)
+ mSlots[buf].mBufferState = BufferSlot::DEQUEUED;
+
+ const sp<GraphicBuffer>& buffer(mSlots[buf].mGraphicBuffer);
+ if ((buffer == NULL) ||
+ (uint32_t(buffer->width) != w) ||
+ (uint32_t(buffer->height) != h) ||
+ (uint32_t(buffer->format) != format) ||
+ ((uint32_t(buffer->usage) & usage) != usage)) {
+ usage |= GraphicBuffer::USAGE_HW_TEXTURE;
+ status_t error;
+ sp<GraphicBuffer> graphicBuffer(
+ mGraphicBufferAlloc->createGraphicBuffer(
+ w, h, format, usage, &error));
+ if (graphicBuffer == 0) {
+ LOGE("dequeueBuffer: SurfaceComposer::createGraphicBuffer failed");
+ return error;
+ }
+ if (updateFormat) {
+ mPixelFormat = format;
+ }
+ mSlots[buf].mGraphicBuffer = graphicBuffer;
+ mSlots[buf].mRequestBufferCalled = false;
+ returnFlags |= ISurfaceTexture::BUFFER_NEEDS_REALLOCATION;
+ }
+ return returnFlags;
+}
+
+status_t SurfaceMediaSource::setSynchronousMode(bool enabled) {
+ Mutex::Autolock lock(mMutex);
+
+ status_t err = OK;
+ if (!enabled) {
+ // going to asynchronous mode, drain the queue
+ while (mSynchronousMode != enabled && !mQueue.isEmpty()) {
+ mDequeueCondition.wait(mMutex);
+ }
+ }
+
+ if (mSynchronousMode != enabled) {
+ // - if we're going to asynchronous mode, the queue is guaranteed to be
+ // empty here
+ // - if the client set the number of buffers, we're guaranteed that
+ // we have at least 3 (because we don't allow less)
+ mSynchronousMode = enabled;
+ mDequeueCondition.signal();
+ }
+ return err;
+}
+
+status_t SurfaceMediaSource::connect(int api) {
+ LOGV("SurfaceMediaSource::connect");
+ Mutex::Autolock lock(mMutex);
+ status_t err = NO_ERROR;
+ switch (api) {
+ case NATIVE_WINDOW_API_EGL:
+ case NATIVE_WINDOW_API_CPU:
+ case NATIVE_WINDOW_API_MEDIA:
+ case NATIVE_WINDOW_API_CAMERA:
+ if (mConnectedApi != NO_CONNECTED_API) {
+ err = -EINVAL;
+ } else {
+ mConnectedApi = api;
+ }
+ break;
+ default:
+ err = -EINVAL;
+ break;
+ }
+ return err;
+}
+
+status_t SurfaceMediaSource::disconnect(int api) {
+ LOGV("SurfaceMediaSource::disconnect");
+ Mutex::Autolock lock(mMutex);
+ status_t err = NO_ERROR;
+ switch (api) {
+ case NATIVE_WINDOW_API_EGL:
+ case NATIVE_WINDOW_API_CPU:
+ case NATIVE_WINDOW_API_MEDIA:
+ case NATIVE_WINDOW_API_CAMERA:
+ if (mConnectedApi == api) {
+ mConnectedApi = NO_CONNECTED_API;
+ } else {
+ err = -EINVAL;
+ }
+ break;
+ default:
+ err = -EINVAL;
+ break;
+ }
+ return err;
+}
+
+status_t SurfaceMediaSource::queueBuffer(int buf, int64_t timestamp,
+ uint32_t* outWidth, uint32_t* outHeight, uint32_t* outTransform) {
+ LOGV("queueBuffer");
+
+ Mutex::Autolock lock(mMutex);
+ if (buf < 0 || buf >= mBufferCount) {
+ LOGE("queueBuffer: slot index out of range [0, %d]: %d",
+ mBufferCount, buf);
+ return -EINVAL;
+ } else if (mSlots[buf].mBufferState != BufferSlot::DEQUEUED) {
+ LOGE("queueBuffer: slot %d is not owned by the client (state=%d)",
+ buf, mSlots[buf].mBufferState);
+ return -EINVAL;
+ } else if (!mSlots[buf].mRequestBufferCalled) {
+ LOGE("queueBuffer: slot %d was enqueued without requesting a "
+ "buffer", buf);
+ return -EINVAL;
+ }
+
+ if (mSynchronousMode) {
+ // in synchronous mode we queue all buffers in a FIFO
+ mQueue.push_back(buf);
+ LOGV("Client queued buffer on slot: %d, Q size = %d",
+ buf, mQueue.size());
+ } else {
+ // in asynchronous mode we only keep the most recent buffer
+ if (mQueue.empty()) {
+ mQueue.push_back(buf);
+ } else {
+ Fifo::iterator front(mQueue.begin());
+ // buffer currently queued is freed
+ mSlots[*front].mBufferState = BufferSlot::FREE;
+ // and we record the new buffer index in the queued list
+ *front = buf;
+ }
+ }
+
+ mSlots[buf].mBufferState = BufferSlot::QUEUED;
+ mSlots[buf].mTimestamp = timestamp;
+ // TODO: (Confirm) Don't want to signal dequeue here.
+ // May be just in asynchronous mode?
+ // mDequeueCondition.signal();
+
+ // Once the queuing is done, we need to let the listener
+ // and signal the buffer consumer (encoder) know that a
+ // buffer is available
+ onFrameReceivedLocked();
+
+ *outWidth = mDefaultWidth;
+ *outHeight = mDefaultHeight;
+ *outTransform = 0;
+
+ return OK;
+}
+
+
+// onFrameReceivedLocked informs the buffer consumers (StageFrightRecorder)
+// or listeners that a frame has been received
+// It is supposed to be called only from queuebuffer.
+// The buffer is NOT made available for dequeueing immediately. We need to
+// wait to hear from StageFrightRecorder to set the buffer FREE
+// Make sure this is called when the mutex is locked
+status_t SurfaceMediaSource::onFrameReceivedLocked() {
+ LOGV("On Frame Received");
+ // Signal the encoder that a new frame has arrived
+ mFrameAvailableCondition.signal();
+
+ // call back the listener
+ // TODO: The listener may not be needed in SurfaceMediaSource at all.
+ // This can be made a SurfaceTexture specific thing
+ sp<FrameAvailableListener> listener;
+ if (mSynchronousMode || mQueue.empty()) {
+ listener = mFrameAvailableListener;
+ }
+
+ if (listener != 0) {
+ listener->onFrameAvailable();
+ }
+ return OK;
+}
+
+
+void SurfaceMediaSource::cancelBuffer(int buf) {
+ LOGV("SurfaceMediaSource::cancelBuffer");
+ Mutex::Autolock lock(mMutex);
+ if (buf < 0 || buf >= mBufferCount) {
+ LOGE("cancelBuffer: slot index out of range [0, %d]: %d",
+ mBufferCount, buf);
+ return;
+ } else if (mSlots[buf].mBufferState != BufferSlot::DEQUEUED) {
+ LOGE("cancelBuffer: slot %d is not owned by the client (state=%d)",
+ buf, mSlots[buf].mBufferState);
+ return;
+ }
+ mSlots[buf].mBufferState = BufferSlot::FREE;
+ mDequeueCondition.signal();
+}
+
+nsecs_t SurfaceMediaSource::getTimestamp() {
+ LOGV("SurfaceMediaSource::getTimestamp");
+ Mutex::Autolock lock(mMutex);
+ return mCurrentTimestamp;
+}
+
+
+void SurfaceMediaSource::setFrameAvailableListener(
+ const sp<FrameAvailableListener>& listener) {
+ LOGV("SurfaceMediaSource::setFrameAvailableListener");
+ Mutex::Autolock lock(mMutex);
+ mFrameAvailableListener = listener;
+}
+
+void SurfaceMediaSource::freeAllBuffers() {
+ LOGV("freeAllBuffers");
+ for (int i = 0; i < NUM_BUFFER_SLOTS; i++) {
+ mSlots[i].mGraphicBuffer = 0;
+ mSlots[i].mBufferState = BufferSlot::FREE;
+ }
+}
+
+sp<GraphicBuffer> SurfaceMediaSource::getCurrentBuffer() const {
+ Mutex::Autolock lock(mMutex);
+ return mCurrentBuf;
+}
+
+int SurfaceMediaSource::query(int what, int* outValue)
+{
+ LOGV("query");
+ Mutex::Autolock lock(mMutex);
+ int value;
+ switch (what) {
+ case NATIVE_WINDOW_WIDTH:
+ value = mDefaultWidth;
+ if (!mDefaultWidth && !mDefaultHeight && mCurrentBuf != 0)
+ value = mCurrentBuf->width;
+ break;
+ case NATIVE_WINDOW_HEIGHT:
+ value = mDefaultHeight;
+ if (!mDefaultWidth && !mDefaultHeight && mCurrentBuf != 0)
+ value = mCurrentBuf->height;
+ break;
+ case NATIVE_WINDOW_FORMAT:
+ value = mPixelFormat;
+ break;
+ case NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS:
+ value = mSynchronousMode ?
+ (MIN_UNDEQUEUED_BUFFERS-1) : MIN_UNDEQUEUED_BUFFERS;
+ break;
+ default:
+ return BAD_VALUE;
+ }
+ outValue[0] = value;
+ return NO_ERROR;
+}
+
+void SurfaceMediaSource::dump(String8& result) const
+{
+ char buffer[1024];
+ dump(result, "", buffer, 1024);
+}
+
+void SurfaceMediaSource::dump(String8& result, const char* prefix,
+ char* buffer, size_t SIZE) const
+{
+ Mutex::Autolock _l(mMutex);
+ snprintf(buffer, SIZE,
+ "%smBufferCount=%d, mSynchronousMode=%d, default-size=[%dx%d], "
+ "mPixelFormat=%d, \n",
+ prefix, mBufferCount, mSynchronousMode, mDefaultWidth, mDefaultHeight,
+ mPixelFormat);
+ result.append(buffer);
+
+ String8 fifo;
+ int fifoSize = 0;
+ Fifo::const_iterator i(mQueue.begin());
+ while (i != mQueue.end()) {
+ snprintf(buffer, SIZE, "%02d ", *i++);
+ fifoSize++;
+ fifo.append(buffer);
+ }
+
+ result.append(buffer);
+
+ struct {
+ const char * operator()(int state) const {
+ switch (state) {
+ case BufferSlot::DEQUEUED: return "DEQUEUED";
+ case BufferSlot::QUEUED: return "QUEUED";
+ case BufferSlot::FREE: return "FREE";
+ default: return "Unknown";
+ }
+ }
+ } stateName;
+
+ for (int i = 0; i < mBufferCount; i++) {
+ const BufferSlot& slot(mSlots[i]);
+ snprintf(buffer, SIZE,
+ "%s%s[%02d] state=%-8s, "
+ "timestamp=%lld\n",
+ prefix, (i==mCurrentSlot)?">":" ", i, stateName(slot.mBufferState),
+ slot.mTimestamp
+ );
+ result.append(buffer);
+ }
+}
+
+status_t SurfaceMediaSource::setFrameRate(int32_t fps)
+{
+ Mutex::Autolock lock(mMutex);
+ const int MAX_FRAME_RATE = 60;
+ if (fps < 0 || fps > MAX_FRAME_RATE) {
+ return BAD_VALUE;
+ }
+ mFrameRate = fps;
+ return OK;
+}
+
+bool SurfaceMediaSource::isMetaDataStoredInVideoBuffers() const {
+ LOGV("isMetaDataStoredInVideoBuffers");
+ return true;
+}
+
+int32_t SurfaceMediaSource::getFrameRate( ) const {
+ Mutex::Autolock lock(mMutex);
+ return mFrameRate;
+}
+
+status_t SurfaceMediaSource::start(MetaData *params)
+{
+ LOGV("start");
+ Mutex::Autolock lock(mMutex);
+ CHECK(!mStarted);
+ mStarted = true;
+ return OK;
+}
+
+
+status_t SurfaceMediaSource::stop()
+{
+ LOGV("Stop");
+
+ Mutex::Autolock lock(mMutex);
+ // TODO: Add waiting on mFrameCompletedCondition here?
+ mStarted = false;
+ mFrameAvailableCondition.signal();
+
+ return OK;
+}
+
+sp<MetaData> SurfaceMediaSource::getFormat()
+{
+ LOGV("getFormat");
+ Mutex::Autolock autoLock(mMutex);
+ sp<MetaData> meta = new MetaData;
+
+ meta->setInt32(kKeyWidth, mDefaultWidth);
+ meta->setInt32(kKeyHeight, mDefaultHeight);
+ // The encoder format is set as an opaque colorformat
+ // The encoder will later find out the actual colorformat
+ // from the GL Frames itself.
+ meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatAndroidOpaque);
+ meta->setInt32(kKeyStride, mDefaultWidth);
+ meta->setInt32(kKeySliceHeight, mDefaultHeight);
+ meta->setInt32(kKeyFrameRate, mFrameRate);
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+ return meta;
+}
+
+status_t SurfaceMediaSource::read( MediaBuffer **buffer,
+ const ReadOptions *options)
+{
+ LOGV("Read. Size of queued buffer: %d", mQueue.size());
+ *buffer = NULL;
+
+ Mutex::Autolock autoLock(mMutex) ;
+ // If the recording has started and the queue is empty, then just
+ // wait here till the frames come in from the client side
+ while (mStarted && mQueue.empty()) {
+ LOGV("NO FRAMES! Recorder waiting for FrameAvailableCondition");
+ mFrameAvailableCondition.wait(mMutex);
+ }
+
+ // If the loop was exited as a result of stopping the recording,
+ // it is OK
+ if (!mStarted) {
+ return OK;
+ }
+
+ // Update the current buffer info
+ // TODO: mCurrentSlot can be made a bufferstate since there
+ // can be more than one "current" slots.
+ Fifo::iterator front(mQueue.begin());
+ mCurrentSlot = *front;
+ mCurrentBuf = mSlots[mCurrentSlot].mGraphicBuffer;
+ mCurrentTimestamp = mSlots[mCurrentSlot].mTimestamp;
+
+ // Pass the data to the MediaBuffer
+ // TODO: Change later to pass in only the metadata
+ *buffer = new MediaBuffer(mCurrentBuf);
+ (*buffer)->setObserver(this);
+ (*buffer)->add_ref();
+ (*buffer)->meta_data()->setInt64(kKeyTime, mCurrentTimestamp);
+
+ return OK;
+}
+
+void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
+ LOGV("signalBufferReturned");
+
+ bool foundBuffer = false;
+ Mutex::Autolock autoLock(mMutex);
+
+ if (!mStarted) {
+ LOGV("started = false. Nothing to do");
+ return;
+ }
+
+ for (Fifo::iterator it = mQueue.begin(); it != mQueue.end(); ++it) {
+ if (mSlots[*it].mGraphicBuffer == buffer->graphicBuffer()) {
+ LOGV("Buffer %d returned. Setting it 'FREE'. New Queue size = %d",
+ *it, mQueue.size()-1);
+ mSlots[*it].mBufferState = BufferSlot::FREE;
+ mQueue.erase(it);
+ buffer->setObserver(0);
+ buffer->release();
+ mDequeueCondition.signal();
+ mFrameCompleteCondition.signal();
+ foundBuffer = true;
+ break;
+ }
+ }
+
+ if (!foundBuffer) {
+ CHECK_EQ(0, "signalBufferReturned: bogus buffer");
+ }
+}
+
+
+
+} // end of namespace android
diff --git a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
index 588a74d..07a9eb8 100644
--- a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
+++ b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
@@ -25,6 +25,8 @@
#include "support.h"
+#include <cutils/properties.h> // for property_get
+
namespace android {
ChromiumHTTPDataSource::ChromiumHTTPDataSource(uint32_t flags)
@@ -111,7 +113,7 @@ void ChromiumHTTPDataSource::onConnectionFailed(status_t err) {
mState = DISCONNECTED;
mCondition.broadcast();
- mURI.clear();
+ // mURI.clear();
mIOResult = err;
@@ -150,8 +152,18 @@ ssize_t ChromiumHTTPDataSource::readAt(off64_t offset, void *data, size_t size)
Mutex::Autolock autoLock(mLock);
if (mState != CONNECTED) {
- return ERROR_NOT_CONNECTED;
+ return INVALID_OPERATION;
+ }
+
+#if 0
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("media.stagefright.disable-net", value, 0)
+ && (!strcasecmp(value, "true") || !strcmp(value, "1"))) {
+ LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Simulating that the network is down.");
+ disconnect_l();
+ return ERROR_IO;
}
+#endif
if (offset != mCurrentOffset) {
AString tmp = mURI;
@@ -236,7 +248,7 @@ void ChromiumHTTPDataSource::onDisconnectComplete() {
CHECK_EQ((int)mState, (int)DISCONNECTING);
mState = DISCONNECTED;
- mURI.clear();
+ // mURI.clear();
mCondition.broadcast();
@@ -299,5 +311,21 @@ void ChromiumHTTPDataSource::clearDRMState_l() {
}
}
+status_t ChromiumHTTPDataSource::reconnectAtOffset(off64_t offset) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mURI.empty()) {
+ return INVALID_OPERATION;
+ }
+
+ LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnecting...");
+ status_t err = connect_l(mURI.c_str(), &mHeaders, offset);
+ if (err != OK) {
+ LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnect failed w/ err 0x%08x", err);
+ }
+
+ return err;
+}
+
} // namespace android
diff --git a/media/libstagefright/include/ChromiumHTTPDataSource.h b/media/libstagefright/include/ChromiumHTTPDataSource.h
index d833e2e..18f8913 100644
--- a/media/libstagefright/include/ChromiumHTTPDataSource.h
+++ b/media/libstagefright/include/ChromiumHTTPDataSource.h
@@ -51,6 +51,8 @@ struct ChromiumHTTPDataSource : public HTTPBase {
virtual String8 getMIMEType() const;
+ virtual status_t reconnectAtOffset(off64_t offset);
+
protected:
virtual ~ChromiumHTTPDataSource();
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index 2d6cb84..22b2855 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -77,6 +77,10 @@ private:
kWhatRead = 'read',
};
+ enum {
+ kMaxNumRetries = 10,
+ };
+
sp<DataSource> mSource;
sp<AHandlerReflector<NuCachedSource2> > mReflector;
sp<ALooper> mLooper;
@@ -93,6 +97,8 @@ private:
bool mFetching;
int64_t mLastFetchTimeUs;
+ int32_t mNumRetriesLeft;
+
void onMessageReceived(const sp<AMessage> &msg);
void onFetch();
void onRead(const sp<AMessage> &msg);
diff --git a/media/libstagefright/tests/Android.mk b/media/libstagefright/tests/Android.mk
new file mode 100644
index 0000000..3ea8f39
--- /dev/null
+++ b/media/libstagefright/tests/Android.mk
@@ -0,0 +1,53 @@
+# Build the unit tests.
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+ifneq ($(TARGET_SIMULATOR),true)
+
+LOCAL_MODULE := SurfaceMediaSource_test
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+ SurfaceMediaSource_test.cpp \
+ DummyRecorder.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libEGL \
+ libGLESv2 \
+ libandroid \
+ libbinder \
+ libcutils \
+ libgui \
+ libstlport \
+ libui \
+ libutils \
+ libstagefright \
+ libstagefright_omx \
+ libstagefright_foundation \
+
+LOCAL_STATIC_LIBRARIES := \
+ libgtest \
+ libgtest_main \
+
+LOCAL_C_INCLUDES := \
+ bionic \
+ bionic/libstdc++/include \
+ external/gtest/include \
+ external/stlport/stlport \
+ frameworks/base/media/libstagefright \
+ frameworks/base/media/libstagefright/include \
+ $(TOP)/frameworks/base/include/media/stagefright/openmax \
+
+include $(BUILD_EXECUTABLE)
+
+endif
+
+# Include subdirectory makefiles
+# ============================================================
+
+# If we're building with ONE_SHOT_MAKEFILE (mm, mmm), then what the framework
+# team really wants is to build the stuff defined by this makefile.
+ifeq (,$(ONE_SHOT_MAKEFILE))
+include $(call first-makefiles-under,$(LOCAL_PATH))
+endif
diff --git a/media/libstagefright/tests/DummyRecorder.cpp b/media/libstagefright/tests/DummyRecorder.cpp
new file mode 100644
index 0000000..8d75d6b
--- /dev/null
+++ b/media/libstagefright/tests/DummyRecorder.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "DummyRecorder"
+// #define LOG_NDEBUG 0
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaSource.h>
+#include "DummyRecorder.h"
+
+#include <utils/Log.h>
+
+namespace android {
+
+// static
+void *DummyRecorder::threadWrapper(void *pthis) {
+ LOGV("ThreadWrapper: %p", pthis);
+ DummyRecorder *writer = static_cast<DummyRecorder *>(pthis);
+ writer->readFromSource();
+ return NULL;
+}
+
+
+status_t DummyRecorder::start() {
+ LOGV("Start");
+ mStarted = true;
+
+ mSource->start();
+
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+ int err = pthread_create(&mThread, &attr, threadWrapper, this);
+ pthread_attr_destroy(&attr);
+
+ if (err) {
+ LOGE("Error creating thread!");
+ return -ENODEV;
+ }
+ return OK;
+}
+
+
+status_t DummyRecorder::stop() {
+ LOGV("Stop");
+ mStarted = false;
+
+ mSource->stop();
+ void *dummy;
+ pthread_join(mThread, &dummy);
+ status_t err = (status_t) dummy;
+
+ LOGV("Ending the reading thread");
+ return err;
+}
+
+// pretend to read the source buffers
+void DummyRecorder::readFromSource() {
+ LOGV("ReadFromSource");
+ if (!mStarted) {
+ return;
+ }
+
+ status_t err = OK;
+ MediaBuffer *buffer;
+ LOGV("A fake writer accessing the frames");
+ while (mStarted && (err = mSource->read(&buffer)) == OK){
+ // if not getting a valid buffer from source, then exit
+ if (buffer == NULL) {
+ return;
+ }
+ buffer->release();
+ buffer = NULL;
+ }
+}
+
+
+} // end of namespace android
diff --git a/media/libstagefright/tests/DummyRecorder.h b/media/libstagefright/tests/DummyRecorder.h
new file mode 100644
index 0000000..1cbea1b
--- /dev/null
+++ b/media/libstagefright/tests/DummyRecorder.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DUMMY_RECORDER_H_
+#define DUMMY_RECORDER_H_
+
+#include <pthread.h>
+#include <utils/String8.h>
+#include <media/stagefright/foundation/ABase.h>
+
+
+namespace android {
+
+class MediaSource;
+class MediaBuffer;
+
+class DummyRecorder {
+ public:
+ // The media source from which this will receive frames
+ sp<MediaSource> mSource;
+ bool mStarted;
+ pthread_t mThread;
+
+ status_t start();
+ status_t stop();
+
+ // actual entry point for the thread
+ void readFromSource();
+
+ // static function to wrap the actual thread entry point
+ static void *threadWrapper(void *pthis);
+
+ DummyRecorder(const sp<MediaSource> &source) : mSource(source)
+ , mStarted(false) {}
+ ~DummyRecorder( ) {}
+
+ private:
+
+ DISALLOW_EVIL_CONSTRUCTORS(DummyRecorder);
+};
+
+} // end of namespace android
+#endif
+
+
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
new file mode 100644
index 0000000..ce10812
--- /dev/null
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -0,0 +1,349 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "SurfaceMediaSource_test"
+// #define LOG_NDEBUG 0
+
+#include <gtest/gtest.h>
+#include <utils/String8.h>
+#include <utils/Errors.h>
+
+#include <media/stagefright/SurfaceMediaSource.h>
+
+#include <gui/SurfaceTextureClient.h>
+#include <ui/GraphicBuffer.h>
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/Surface.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+
+#include <binder/ProcessState.h>
+#include <ui/FramebufferNativeWindow.h>
+
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+#include <OMX_Component.h>
+
+#include "DummyRecorder.h"
+
+namespace android {
+
+
+class SurfaceMediaSourceTest : public ::testing::Test {
+public:
+
+ SurfaceMediaSourceTest( ): mYuvTexWidth(64), mYuvTexHeight(66) { }
+ sp<MPEG4Writer> setUpWriter(OMXClient &client );
+ void oneBufferPass(int width, int height );
+ static void fillYV12Buffer(uint8_t* buf, int w, int h, int stride) ;
+ static void fillYV12BufferRect(uint8_t* buf, int w, int h,
+ int stride, const android_native_rect_t& rect) ;
+protected:
+
+ virtual void SetUp() {
+ mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
+ mSMS->setSynchronousMode(true);
+ mSTC = new SurfaceTextureClient(mSMS);
+ mANW = mSTC;
+
+ }
+
+
+ virtual void TearDown() {
+ mSMS.clear();
+ mSTC.clear();
+ mANW.clear();
+ }
+
+ const int mYuvTexWidth;// = 64;
+ const int mYuvTexHeight;// = 66;
+
+ sp<SurfaceMediaSource> mSMS;
+ sp<SurfaceTextureClient> mSTC;
+ sp<ANativeWindow> mANW;
+
+};
+
+void SurfaceMediaSourceTest::oneBufferPass(int width, int height ) {
+ LOGV("One Buffer Pass");
+ ANativeWindowBuffer* anb;
+ ASSERT_EQ(NO_ERROR, mANW->dequeueBuffer(mANW.get(), &anb));
+ ASSERT_TRUE(anb != NULL);
+
+ sp<GraphicBuffer> buf(new GraphicBuffer(anb, false));
+ ASSERT_EQ(NO_ERROR, mANW->lockBuffer(mANW.get(), buf->getNativeBuffer()));
+
+ // Fill the buffer with the a checkerboard pattern
+ uint8_t* img = NULL;
+ buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
+ SurfaceMediaSourceTest::fillYV12Buffer(img, width, height, buf->getStride());
+ buf->unlock();
+
+ ASSERT_EQ(NO_ERROR, mANW->queueBuffer(mANW.get(), buf->getNativeBuffer()));
+}
+
+sp<MPEG4Writer> SurfaceMediaSourceTest::setUpWriter(OMXClient &client ) {
+ // Writing to a file
+ const char *fileName = "/sdcard/outputSurfEnc.mp4";
+ sp<MetaData> enc_meta = new MetaData;
+ enc_meta->setInt32(kKeyBitRate, 300000);
+ enc_meta->setInt32(kKeyFrameRate, 30);
+
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+
+ sp<MetaData> meta = mSMS->getFormat();
+
+ int32_t width, height, stride, sliceHeight, colorFormat;
+ CHECK(meta->findInt32(kKeyWidth, &width));
+ CHECK(meta->findInt32(kKeyHeight, &height));
+ CHECK(meta->findInt32(kKeyStride, &stride));
+ CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
+ CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+
+ enc_meta->setInt32(kKeyWidth, width);
+ enc_meta->setInt32(kKeyHeight, height);
+ enc_meta->setInt32(kKeyIFramesInterval, 1);
+ enc_meta->setInt32(kKeyStride, stride);
+ enc_meta->setInt32(kKeySliceHeight, sliceHeight);
+ // TODO: overwriting the colorformat since the format set by GRAlloc
+ // could be wrong or not be read by OMX
+ enc_meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
+ // colorFormat);
+
+
+ sp<MediaSource> encoder =
+ OMXCodec::Create(
+ client.interface(), enc_meta, true /* createEncoder */, mSMS);
+
+ sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
+ writer->addSource(encoder);
+
+ return writer;
+}
+
+// Fill a YV12 buffer with a multi-colored checkerboard pattern
+void SurfaceMediaSourceTest::fillYV12Buffer(uint8_t* buf, int w, int h, int stride) {
+ const int blockWidth = w > 16 ? w / 16 : 1;
+ const int blockHeight = h > 16 ? h / 16 : 1;
+ const int yuvTexOffsetY = 0;
+ int yuvTexStrideY = stride;
+ int yuvTexOffsetV = yuvTexStrideY * h;
+ int yuvTexStrideV = (yuvTexStrideY/2 + 0xf) & ~0xf;
+ int yuvTexOffsetU = yuvTexOffsetV + yuvTexStrideV * h/2;
+ int yuvTexStrideU = yuvTexStrideV;
+ for (int x = 0; x < w; x++) {
+ for (int y = 0; y < h; y++) {
+ int parityX = (x / blockWidth) & 1;
+ int parityY = (y / blockHeight) & 1;
+ unsigned char intensity = (parityX ^ parityY) ? 63 : 191;
+ buf[yuvTexOffsetY + (y * yuvTexStrideY) + x] = intensity;
+ if (x < w / 2 && y < h / 2) {
+ buf[yuvTexOffsetU + (y * yuvTexStrideU) + x] = intensity;
+ if (x * 2 < w / 2 && y * 2 < h / 2) {
+ buf[yuvTexOffsetV + (y*2 * yuvTexStrideV) + x*2 + 0] =
+ buf[yuvTexOffsetV + (y*2 * yuvTexStrideV) + x*2 + 1] =
+ buf[yuvTexOffsetV + ((y*2+1) * yuvTexStrideV) + x*2 + 0] =
+ buf[yuvTexOffsetV + ((y*2+1) * yuvTexStrideV) + x*2 + 1] =
+ intensity;
+ }
+ }
+ }
+ }
+}
+
+// Fill a YV12 buffer with red outside a given rectangle and green inside it.
+void SurfaceMediaSourceTest::fillYV12BufferRect(uint8_t* buf, int w,
+ int h, int stride, const android_native_rect_t& rect) {
+ const int yuvTexOffsetY = 0;
+ int yuvTexStrideY = stride;
+ int yuvTexOffsetV = yuvTexStrideY * h;
+ int yuvTexStrideV = (yuvTexStrideY/2 + 0xf) & ~0xf;
+ int yuvTexOffsetU = yuvTexOffsetV + yuvTexStrideV * h/2;
+ int yuvTexStrideU = yuvTexStrideV;
+ for (int x = 0; x < w; x++) {
+ for (int y = 0; y < h; y++) {
+ bool inside = rect.left <= x && x < rect.right &&
+ rect.top <= y && y < rect.bottom;
+ buf[yuvTexOffsetY + (y * yuvTexStrideY) + x] = inside ? 240 : 64;
+ if (x < w / 2 && y < h / 2) {
+ bool inside = rect.left <= 2*x && 2*x < rect.right &&
+ rect.top <= 2*y && 2*y < rect.bottom;
+ buf[yuvTexOffsetU + (y * yuvTexStrideU) + x] = 16;
+ buf[yuvTexOffsetV + (y * yuvTexStrideV) + x] =
+ inside ? 16 : 255;
+ }
+ }
+ }
+} ///////// End of class SurfaceMediaSourceTest
+
+///////////////////////////////////////////////////////////////////
+// Class to imitate the recording /////////////////////////////
+// ////////////////////////////////////////////////////////////////
+struct SimpleDummyRecorder {
+ sp<MediaSource> mSource;
+
+ SimpleDummyRecorder
+ (const sp<MediaSource> &source): mSource(source) {}
+
+ status_t start() { return mSource->start();}
+ status_t stop() { return mSource->stop();}
+
+ // fakes reading from a media source
+ status_t readFromSource() {
+ MediaBuffer *buffer;
+ status_t err = mSource->read(&buffer);
+ if (err != OK) {
+ return err;
+ }
+ buffer->release();
+ buffer = NULL;
+ return OK;
+ }
+};
+
+///////////////////////////////////////////////////////////////////
+// TESTS
+// Just pass one buffer from the native_window to the SurfaceMediaSource
+TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotOneBufferPass) {
+ LOGV("Testing OneBufferPass ******************************");
+
+ ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
+ 0, 0, HAL_PIXEL_FORMAT_YV12));
+ // OMX_COLOR_FormatYUV420Planar)); // ));
+ ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
+
+ oneBufferPass(mYuvTexWidth, mYuvTexHeight);
+}
+
+// Pass the buffer with the wrong height and weight and should not be accepted
+TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotWrongSizeBufferPass) {
+ LOGV("Testing Wrong size BufferPass ******************************");
+
+ // setting the client side buffer size different than the server size
+ ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
+ 10, 10, HAL_PIXEL_FORMAT_YV12));
+ // OMX_COLOR_FormatYUV420Planar)); // ));
+ ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
+
+ ANativeWindowBuffer* anb;
+
+ // make sure we get an error back when dequeuing!
+ ASSERT_NE(NO_ERROR, mANW->dequeueBuffer(mANW.get(), &anb));
+}
+
+
+// pass multiple buffers from the native_window the SurfaceMediaSource
+// A dummy writer is used to simulate actual MPEG4Writer
+TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotMultiBufferPass) {
+ LOGV("Testing MultiBufferPass, Dummy Recorder *********************");
+ ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
+ 0, 0, HAL_PIXEL_FORMAT_YV12));
+ ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
+ SimpleDummyRecorder writer(mSMS);
+ writer.start();
+
+ int32_t nFramesCount = 0;
+ while (nFramesCount < 300) {
+ oneBufferPass(mYuvTexWidth, mYuvTexHeight);
+
+ ASSERT_EQ(NO_ERROR, writer.readFromSource());
+
+ nFramesCount++;
+ }
+ writer.stop();
+}
+
+// Delayed pass of multiple buffers from the native_window the SurfaceMediaSource
+// A dummy writer is used to simulate actual MPEG4Writer
+TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotMultiBufferPassLag) {
+ LOGV("Testing MultiBufferPass, Dummy Recorder Lagging **************");
+ ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
+ 0, 0, HAL_PIXEL_FORMAT_YV12));
+ ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
+ SimpleDummyRecorder writer(mSMS);
+ writer.start();
+
+ int32_t nFramesCount = 1;
+ const int FRAMES_LAG = mSMS->getBufferCount() - 1;
+ while (nFramesCount <= 300) {
+ oneBufferPass(mYuvTexWidth, mYuvTexHeight);
+ // Forcing the writer to lag behind a few frames
+ if (nFramesCount > FRAMES_LAG) {
+ ASSERT_EQ(NO_ERROR, writer.readFromSource());
+ }
+ nFramesCount++;
+ }
+ writer.stop();
+}
+
+// pass multiple buffers from the native_window the SurfaceMediaSource
+// A dummy writer (MULTITHREADED) is used to simulate actual MPEG4Writer
+TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotMultiBufferPassThreaded) {
+ LOGV("Testing MultiBufferPass, Dummy Recorder Multi-Threaded **********");
+ ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
+ 0, 0, HAL_PIXEL_FORMAT_YV12));
+ ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
+
+ DummyRecorder writer(mSMS);
+ writer.start();
+
+ int32_t nFramesCount = 0;
+ while (nFramesCount <= 300) {
+ oneBufferPass(mYuvTexWidth, mYuvTexHeight);
+
+ nFramesCount++;
+ }
+ writer.stop();
+}
+
+// Test to examine the actual encoding. Temporarily disabled till the
+// colorformat and encoding from GRAlloc data is resolved
+TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuFilledYV12BufferNpotWrite) {
+ LOGV("Testing the whole pipeline with actual Recorder");
+ ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
+ 0, 0, HAL_PIXEL_FORMAT_YV12)); // OMX_COLOR_FormatYUV420Planar)); // ));
+ ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
+
+ OMXClient client;
+ CHECK_EQ(OK, client.connect());
+
+ sp<MPEG4Writer> writer = setUpWriter(client);
+ int64_t start = systemTime();
+ CHECK_EQ(OK, writer->start());
+
+ int32_t nFramesCount = 0;
+ while (nFramesCount <= 300) {
+ oneBufferPass(mYuvTexWidth, mYuvTexHeight);
+ nFramesCount++;
+ }
+
+ CHECK_EQ(OK, writer->stop());
+ writer.clear();
+ int64_t end = systemTime();
+ client.disconnect();
+}
+
+
+} // namespace android
diff --git a/packages/SystemUI/res/values/config.xml b/packages/SystemUI/res/values/config.xml
index 5298f2e..d7d7817 100644
--- a/packages/SystemUI/res/values/config.xml
+++ b/packages/SystemUI/res/values/config.xml
@@ -34,7 +34,7 @@
<string name="config_systemBarComponent" translatable="false">com.android.systemui.statusbar.tablet.TabletStatusBar</string>
<!-- Whether or not we show the number in the bar. -->
- <bool name="config_statusBarShowNumber">true</bool>
+ <bool name="config_statusBarShowNumber">false</bool>
<!-- How many icons may be shown at once in the system bar. Includes any
slots that may be reused for things like IME control. -->
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 482336b..0323fe0 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2954,7 +2954,7 @@ AudioFlinger::PlaybackThread::Track::Track(
mStreamType = streamType;
// NOTE: audio_track_cblk_t::frameSize for 8 bit PCM data is based on a sample size of
// 16 bit because data is converted to 16 bit before being stored in buffer by AudioTrack
- mCblk->frameSize = audio_is_linear_pcm(format) ? mChannelCount * audio_bytes_per_sample(format) : sizeof(uint8_t);
+ mCblk->frameSize = audio_is_linear_pcm(format) ? mChannelCount * sizeof(int16_t) : sizeof(uint8_t);
}
}
diff --git a/services/java/com/android/server/AppWidgetService.java b/services/java/com/android/server/AppWidgetService.java
index 438883e..a679ca7 100644
--- a/services/java/com/android/server/AppWidgetService.java
+++ b/services/java/com/android/server/AppWidgetService.java
@@ -230,8 +230,14 @@ class AppWidgetService extends IAppWidgetService.Stub
pw.println(':');
pw.print(" min=("); pw.print(info.minWidth);
pw.print("x"); pw.print(info.minHeight);
+ pw.print(") minResize=("); pw.print(info.minResizeWidth);
+ pw.print("x"); pw.print(info.minResizeHeight);
pw.print(") updatePeriodMillis=");
pw.print(info.updatePeriodMillis);
+ pw.print(" resizeMode=");
+ pw.print(info.resizeMode);
+ pw.print(" autoAdvanceViewId=");
+ pw.print(info.autoAdvanceViewId);
pw.print(" initialLayout=#");
pw.print(Integer.toHexString(info.initialLayout));
pw.print(" zombie="); pw.println(p.zombie);
diff --git a/services/java/com/android/server/ConnectivityService.java b/services/java/com/android/server/ConnectivityService.java
index 5dd3a6a..ea78b52 100644
--- a/services/java/com/android/server/ConnectivityService.java
+++ b/services/java/com/android/server/ConnectivityService.java
@@ -1090,7 +1090,7 @@ public class ConnectivityService extends IConnectivityManager.Stub {
try {
InetAddress addr = InetAddress.getByAddress(hostAddress);
LinkProperties lp = tracker.getLinkProperties();
- return addRoute(lp, RouteInfo.makeHostRoute(addr));
+ return addRouteToAddress(lp, addr);
} catch (UnknownHostException e) {}
return false;
}
@@ -1103,6 +1103,31 @@ public class ConnectivityService extends IConnectivityManager.Stub {
return modifyRoute(p.getInterfaceName(), p, r, 0, false);
}
+ private boolean addRouteToAddress(LinkProperties lp, InetAddress addr) {
+ return modifyRouteToAddress(lp, addr, true);
+ }
+
+ private boolean removeRouteToAddress(LinkProperties lp, InetAddress addr) {
+ return modifyRouteToAddress(lp, addr, false);
+ }
+
+ private boolean modifyRouteToAddress(LinkProperties lp, InetAddress addr, boolean doAdd) {
+ RouteInfo bestRoute = RouteInfo.selectBestRoute(lp.getRoutes(), addr);
+ if (bestRoute == null) {
+ bestRoute = RouteInfo.makeHostRoute(addr);
+ } else {
+ if (bestRoute.getGateway().equals(addr)) {
+ // if there is no better route, add the implied hostroute for our gateway
+ bestRoute = RouteInfo.makeHostRoute(addr);
+ } else {
+ // if we will connect to this through another route, add a direct route
+ // to it's gateway
+ bestRoute = RouteInfo.makeHostRoute(addr, bestRoute.getGateway());
+ }
+ }
+ return modifyRoute(lp.getInterfaceName(), lp, bestRoute, 0, doAdd);
+ }
+
private boolean modifyRoute(String ifaceName, LinkProperties lp, RouteInfo r, int cycleCount,
boolean doAdd) {
if ((ifaceName == null) || (lp == null) || (r == null)) return false;
@@ -1713,49 +1738,50 @@ public class ConnectivityService extends IConnectivityManager.Stub {
*/
private void updateRoutes(LinkProperties newLp, LinkProperties curLp, boolean isLinkDefault) {
Collection<RouteInfo> routesToAdd = null;
- CompareResult<InetAddress> dnsDiff = null;
-
+ CompareResult<InetAddress> dnsDiff = new CompareResult<InetAddress>();
+ CompareResult<RouteInfo> routeDiff = new CompareResult<RouteInfo>();
if (curLp != null) {
// check for the delta between the current set and the new
- CompareResult<RouteInfo> routeDiff = curLp.compareRoutes(newLp);
+ routeDiff = curLp.compareRoutes(newLp);
dnsDiff = curLp.compareDnses(newLp);
-
- for (RouteInfo r : routeDiff.removed) {
- if (isLinkDefault || ! r.isDefaultRoute()) {
- removeRoute(curLp, r);
- }
- }
- routesToAdd = routeDiff.added;
+ } else if (newLp != null) {
+ routeDiff.added = newLp.getRoutes();
+ dnsDiff.added = newLp.getDnses();
}
- if (newLp != null) {
- // if we didn't get a diff from cur -> new, then just use the new
- if (routesToAdd == null) {
- routesToAdd = newLp.getRoutes();
+ for (RouteInfo r : routeDiff.removed) {
+ if (isLinkDefault || ! r.isDefaultRoute()) {
+ removeRoute(curLp, r);
}
+ }
- for (RouteInfo r : routesToAdd) {
- if (isLinkDefault || ! r.isDefaultRoute()) {
- addRoute(newLp, r);
- }
+ for (RouteInfo r : routeDiff.added) {
+ if (isLinkDefault || ! r.isDefaultRoute()) {
+ addRoute(newLp, r);
}
}
if (!isLinkDefault) {
// handle DNS routes
- Collection<InetAddress> dnsToAdd = null;
- if (dnsDiff != null) {
- dnsToAdd = dnsDiff.added;
- for (InetAddress dnsAddress : dnsDiff.removed) {
- removeRoute(curLp, RouteInfo.makeHostRoute(dnsAddress));
+ if (routeDiff.removed.size() == 0 && routeDiff.added.size() == 0) {
+ // no change in routes, check for change in dns themselves
+ for (InetAddress oldDns : dnsDiff.removed) {
+ removeRouteToAddress(curLp, oldDns);
}
- }
- if (newLp != null) {
- if (dnsToAdd == null) {
- dnsToAdd = newLp.getDnses();
+ for (InetAddress newDns : dnsDiff.added) {
+ addRouteToAddress(newLp, newDns);
}
- for(InetAddress dnsAddress : dnsToAdd) {
- addRoute(newLp, RouteInfo.makeHostRoute(dnsAddress));
+ } else {
+ // routes changed - remove all old dns entries and add new
+ if (curLp != null) {
+ for (InetAddress oldDns : curLp.getDnses()) {
+ removeRouteToAddress(curLp, oldDns);
+ }
+ }
+ if (newLp != null) {
+ for (InetAddress newDns : newLp.getDnses()) {
+ addRouteToAddress(newLp, newDns);
+ }
}
}
}
diff --git a/services/java/com/android/server/InputMethodManagerService.java b/services/java/com/android/server/InputMethodManagerService.java
index d39f565f..73d790a 100644
--- a/services/java/com/android/server/InputMethodManagerService.java
+++ b/services/java/com/android/server/InputMethodManagerService.java
@@ -1127,13 +1127,21 @@ public class InputMethodManagerService extends IInputMethodManager.Stub
mBackDisposition = backDisposition;
mStatusBar.setImeWindowStatus(token, vis, backDisposition);
final boolean iconVisibility = (vis & InputMethodService.IME_ACTIVE) != 0;
- if (iconVisibility && needsToShowImeSwitchOngoingNotification()) {
+ final InputMethodInfo imi = mMethodMap.get(mCurMethodId);
+ if (imi != null && iconVisibility && needsToShowImeSwitchOngoingNotification()) {
final PackageManager pm = mContext.getPackageManager();
- final CharSequence label = mMethodMap.get(mCurMethodId).loadLabel(pm);
final CharSequence title = mRes.getText(
com.android.internal.R.string.select_input_method);
+ final CharSequence imiLabel = imi.loadLabel(pm);
+ final CharSequence summary = mCurrentSubtype != null
+ ? TextUtils.concat(mCurrentSubtype.getDisplayName(mContext,
+ imi.getPackageName(), imi.getServiceInfo().applicationInfo),
+ (TextUtils.isEmpty(imiLabel) ?
+ "" : " (" + imiLabel + ")"))
+ : imiLabel;
+
mImeSwitcherNotification.setLatestEventInfo(
- mContext, title, label, mImeSwitchPendingIntent);
+ mContext, title, summary, mImeSwitchPendingIntent);
mNotificationManager.notify(
com.android.internal.R.string.select_input_method,
mImeSwitcherNotification);
diff --git a/services/java/com/android/server/connectivity/Vpn.java b/services/java/com/android/server/connectivity/Vpn.java
index cf75d6b..9cb772e 100644
--- a/services/java/com/android/server/connectivity/Vpn.java
+++ b/services/java/com/android/server/connectivity/Vpn.java
@@ -396,7 +396,7 @@ public class Vpn extends INetworkManagementEventObserver.Stub {
if (mTimer == -1) {
mTimer = now;
Thread.sleep(1);
- } else if (now - mTimer <= 30000) {
+ } else if (now - mTimer <= 60000) {
Thread.sleep(yield ? 200 : 1);
} else {
mInfo.state = LegacyVpnInfo.STATE_TIMEOUT;
diff --git a/services/java/com/android/server/wm/BlackFrame.java b/services/java/com/android/server/wm/BlackFrame.java
index d8fd7fe..36f5dcb 100644
--- a/services/java/com/android/server/wm/BlackFrame.java
+++ b/services/java/com/android/server/wm/BlackFrame.java
@@ -32,10 +32,12 @@ public class BlackFrame {
final int top;
final Surface surface;
- BlackSurface(SurfaceSession session, int layer, int l, int t, int w, int h)
+ BlackSurface(SurfaceSession session, int layer, int l, int t, int r, int b)
throws Surface.OutOfResourcesException {
left = l;
top = t;
+ int w = r-l;
+ int h = b-t;
surface = new Surface(session, 0, "BlackSurface",
-1, w, h, PixelFormat.OPAQUE, Surface.FX_SURFACE_DIM);
if (WindowManagerService.SHOW_TRANSACTIONS ||
diff --git a/services/surfaceflinger/SurfaceTextureLayer.cpp b/services/surfaceflinger/SurfaceTextureLayer.cpp
index 40659d4..91e010f 100644
--- a/services/surfaceflinger/SurfaceTextureLayer.cpp
+++ b/services/surfaceflinger/SurfaceTextureLayer.cpp
@@ -64,7 +64,7 @@ status_t SurfaceTextureLayer::queueBuffer(int buf, int64_t timestamp,
if (orientation & Transform::ROT_INVALID) {
orientation = 0;
}
- *outTransform = layer->getOrientation();
+ *outTransform = orientation;
}
return res;
diff --git a/tests/RenderScriptTests/ImageProcessing/src/com/android/rs/image/ImageProcessingActivity.java b/tests/RenderScriptTests/ImageProcessing/src/com/android/rs/image/ImageProcessingActivity.java
index e776463..9aa70b0 100644
--- a/tests/RenderScriptTests/ImageProcessing/src/com/android/rs/image/ImageProcessingActivity.java
+++ b/tests/RenderScriptTests/ImageProcessing/src/com/android/rs/image/ImageProcessingActivity.java
@@ -87,8 +87,6 @@ public class ImageProcessingActivity extends Activity
mIsProcessing = false;
}
- // This is a hack to work around an invalidation bug
- mBitmapOut.setPixel(0, 0, 0);
mOutPixelsAllocation.copyTo(mBitmapOut);
mDisplayView.invalidate();
}
diff --git a/wifi/java/android/net/wifi/WifiWatchdogStateMachine.java b/wifi/java/android/net/wifi/WifiWatchdogStateMachine.java
index b09e04b..fa7cf21 100644
--- a/wifi/java/android/net/wifi/WifiWatchdogStateMachine.java
+++ b/wifi/java/android/net/wifi/WifiWatchdogStateMachine.java
@@ -68,7 +68,7 @@ import java.util.regex.Pattern;
public class WifiWatchdogStateMachine extends StateMachine {
- private static final boolean VDBG = true; //TODO : Remove this before merge
+ private static final boolean VDBG = false;
private static final boolean DBG = true;
private static final String WWSM_TAG = "WifiWatchdogStateMachine";