summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--core/java/android/hardware/Camera.java49
-rw-r--r--core/java/android/hardware/camera2/CameraManager.java23
-rw-r--r--core/java/android/hardware/camera2/CaptureRequest.java16
-rw-r--r--core/java/android/hardware/camera2/ICameraDeviceUser.aidl24
-rw-r--r--core/java/android/hardware/camera2/impl/CameraDevice.java4
-rw-r--r--core/java/android/hardware/camera2/impl/CameraMetadataNative.java2
-rw-r--r--core/java/android/hardware/camera2/impl/CaptureResultExtras.java9
-rw-r--r--core/java/android/hardware/camera2/legacy/BurstHolder.java82
-rw-r--r--core/java/android/hardware/camera2/legacy/CameraDeviceState.java259
-rw-r--r--core/java/android/hardware/camera2/legacy/CameraDeviceUserShim.java273
-rw-r--r--core/java/android/hardware/camera2/legacy/GLThreadManager.java234
-rw-r--r--core/java/android/hardware/camera2/legacy/LegacyCameraDevice.java275
-rw-r--r--core/java/android/hardware/camera2/legacy/RequestHandlerThread.java101
-rw-r--r--core/java/android/hardware/camera2/legacy/RequestHolder.java159
-rw-r--r--core/java/android/hardware/camera2/legacy/RequestQueue.java132
-rw-r--r--core/java/android/hardware/camera2/legacy/RequestThreadManager.java491
-rw-r--r--core/java/android/hardware/camera2/legacy/SurfaceTextureRenderer.java522
-rw-r--r--core/java/android/hardware/camera2/legacy/package.html3
-rw-r--r--core/java/android/hardware/camera2/utils/CameraBinderDecorator.java5
-rw-r--r--core/jni/Android.mk1
-rw-r--r--core/jni/AndroidRuntime.cpp2
-rw-r--r--core/jni/android_hardware_Camera.cpp23
-rw-r--r--core/jni/android_hardware_camera2_CameraMetadata.cpp4
-rw-r--r--core/jni/android_hardware_camera2_legacy_LegacyCameraDevice.cpp419
-rw-r--r--media/jni/android_media_ImageReader.cpp39
25 files changed, 3109 insertions, 42 deletions
diff --git a/core/java/android/hardware/Camera.java b/core/java/android/hardware/Camera.java
index 35c86e7..0705e0c 100644
--- a/core/java/android/hardware/Camera.java
+++ b/core/java/android/hardware/Camera.java
@@ -169,6 +169,10 @@ public class Camera {
private boolean mFaceDetectionRunning = false;
private Object mAutoFocusCallbackLock = new Object();
+ private static final int NO_ERROR = 0;
+ private static final int EACCESS = -13;
+ private static final int ENODEV = -19;
+
/**
* Broadcast Action: A new picture is taken by the camera, and the entry of
* the picture has been added to the media store.
@@ -328,6 +332,24 @@ public class Camera {
}
Camera(int cameraId) {
+ int err = cameraInit(cameraId);
+ if (checkInitErrors(err)) {
+ switch(err) {
+ case EACCESS:
+ throw new RuntimeException("Fail to connect to camera service");
+ case ENODEV:
+ throw new RuntimeException("Camera initialization failed");
+ default:
+ // Should never hit this.
+ throw new RuntimeException("Unknown camera error");
+ }
+ }
+ }
+
+ /**
+ * @hide
+ */
+ public int cameraInit(int cameraId) {
mShutterCallback = null;
mRawImageCallback = null;
mJpegCallback = null;
@@ -347,7 +369,21 @@ public class Camera {
String packageName = ActivityThread.currentPackageName();
- native_setup(new WeakReference<Camera>(this), cameraId, packageName);
+ return native_setup(new WeakReference<Camera>(this), cameraId, packageName);
+ }
+
+ /**
+ * @hide
+ */
+ public static boolean checkInitErrors(int err) {
+ return err != NO_ERROR;
+ }
+
+ /**
+ * @hide
+ */
+ public static Camera openUninitialized() {
+ return new Camera();
}
/**
@@ -360,7 +396,7 @@ public class Camera {
release();
}
- private native final void native_setup(Object camera_this, int cameraId,
+ private native final int native_setup(Object camera_this, int cameraId,
String packageName);
private native final void native_release();
@@ -458,13 +494,16 @@ public class Camera {
*/
public final void setPreviewDisplay(SurfaceHolder holder) throws IOException {
if (holder != null) {
- setPreviewDisplay(holder.getSurface());
+ setPreviewSurface(holder.getSurface());
} else {
- setPreviewDisplay((Surface)null);
+ setPreviewSurface((Surface)null);
}
}
- private native final void setPreviewDisplay(Surface surface) throws IOException;
+ /**
+ * @hide
+ */
+ public native final void setPreviewSurface(Surface surface) throws IOException;
/**
* Sets the {@link SurfaceTexture} to be used for live preview.
diff --git a/core/java/android/hardware/camera2/CameraManager.java b/core/java/android/hardware/camera2/CameraManager.java
index 0fcd598..cb463a6 100644
--- a/core/java/android/hardware/camera2/CameraManager.java
+++ b/core/java/android/hardware/camera2/CameraManager.java
@@ -20,6 +20,7 @@ import android.content.Context;
import android.hardware.ICameraService;
import android.hardware.ICameraServiceListener;
import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.legacy.CameraDeviceUserShim;
import android.hardware.camera2.utils.CameraBinderDecorator;
import android.hardware.camera2.utils.CameraRuntimeException;
import android.hardware.camera2.utils.BinderHolder;
@@ -194,7 +195,6 @@ public final class CameraManager {
// impossible
return null;
}
-
return new CameraCharacteristics(info);
}
@@ -236,10 +236,23 @@ public final class CameraManager {
handler);
BinderHolder holder = new BinderHolder();
- mCameraService.connectDevice(device.getCallbacks(),
- Integer.parseInt(cameraId),
- mContext.getPackageName(), USE_CALLING_UID, holder);
- cameraUser = ICameraDeviceUser.Stub.asInterface(holder.getBinder());
+
+ ICameraDeviceCallbacks callbacks = device.getCallbacks();
+ int id = Integer.parseInt(cameraId);
+ try {
+ mCameraService.connectDevice(callbacks, id, mContext.getPackageName(),
+ USE_CALLING_UID, holder);
+ cameraUser = ICameraDeviceUser.Stub.asInterface(holder.getBinder());
+ } catch (CameraRuntimeException e) {
+ if (e.getReason() == CameraAccessException.CAMERA_DEPRECATED_HAL) {
+ // Use legacy camera implementation for HAL1 devices
+ Log.i(TAG, "Using legacy camera HAL.");
+ cameraUser = CameraDeviceUserShim.connectBinderShim(callbacks, id);
+ } else {
+ // Rethrow otherwise
+ throw e;
+ }
+ }
// TODO: factor out listener to be non-nested, then move setter to constructor
// For now, calling setRemoteDevice will fire initial
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index a70aa3b..54ffd6b 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -22,6 +22,8 @@ import android.os.Parcelable;
import android.util.Rational;
import android.view.Surface;
+import java.util.Collection;
+import java.util.Collections;
import java.util.HashSet;
import java.util.Objects;
@@ -199,6 +201,20 @@ public final class CaptureRequest extends CameraMetadata implements Parcelable {
}
/**
+ * @hide
+ */
+ public boolean containsTarget(Surface surface) {
+ return mSurfaceSet.contains(surface);
+ }
+
+ /**
+ * @hide
+ */
+ public Collection<Surface> getTargets() {
+ return Collections.unmodifiableCollection(mSurfaceSet);
+ }
+
+ /**
* A builder for capture requests.
*
* <p>To obtain a builder instance, use the
diff --git a/core/java/android/hardware/camera2/ICameraDeviceUser.aidl b/core/java/android/hardware/camera2/ICameraDeviceUser.aidl
index 0815170..50a58ed 100644
--- a/core/java/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/core/java/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -26,7 +26,8 @@ import android.hardware.camera2.utils.LongParcelable;
interface ICameraDeviceUser
{
/**
- * Keep up-to-date with frameworks/av/include/camera/camera2/ICameraDeviceUser.h
+ * Keep up-to-date with frameworks/av/include/camera/camera2/ICameraDeviceUser.h and
+ * frameworks/base/core/java/android/hardware/camera2/legacy/CameraDeviceUserShim.java
*/
void disconnect();
@@ -41,6 +42,27 @@ interface ICameraDeviceUser
int cancelRequest(int requestId, out LongParcelable lastFrameNumber);
+ /**
+ * Begin the device configuration.
+ *
+ * <p>
+ * beginConfigure must be called before any call to deleteStream, createStream,
+ * or endConfigure. It is not valid to call this when the device is not idle.
+ * <p>
+ */
+ int beginConfigure();
+
+ /**
+ * End the device configuration.
+ *
+ * <p>
+ * endConfigure must be called after stream configuration is complete (i.e. after
+ * a call to beginConfigure and subsequent createStream/deleteStream calls). This
+ * must be called before any requests can be submitted.
+ * <p>
+ */
+ int endConfigure();
+
int deleteStream(int streamId);
// non-negative value is the stream ID. negative value is status_t
diff --git a/core/java/android/hardware/camera2/impl/CameraDevice.java b/core/java/android/hardware/camera2/impl/CameraDevice.java
index dba24a1..e78ffff 100644
--- a/core/java/android/hardware/camera2/impl/CameraDevice.java
+++ b/core/java/android/hardware/camera2/impl/CameraDevice.java
@@ -216,7 +216,7 @@ public class CameraDevice implements android.hardware.camera2.CameraDevice {
try {
waitUntilIdle();
- // TODO: mRemoteDevice.beginConfigure
+ mRemoteDevice.beginConfigure();
// Delete all streams first (to free up HW resources)
for (Integer streamId : deleteList) {
mRemoteDevice.deleteStream(streamId);
@@ -231,7 +231,7 @@ public class CameraDevice implements android.hardware.camera2.CameraDevice {
mConfiguredOutputs.put(streamId, s);
}
- // TODO: mRemoteDevice.endConfigure
+ mRemoteDevice.endConfigure();
} catch (CameraRuntimeException e) {
if (e.getReason() == CAMERA_IN_USE) {
throw new IllegalStateException("The camera is currently busy." +
diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
index db7486d..27cfd38 100644
--- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
+++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
@@ -63,7 +63,7 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
private static final String TAG = "CameraMetadataJV";
private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
// this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h
- private static final int NATIVE_JPEG_FORMAT = 0x21;
+ public static final int NATIVE_JPEG_FORMAT = 0x21;
public CameraMetadataNative() {
super();
diff --git a/core/java/android/hardware/camera2/impl/CaptureResultExtras.java b/core/java/android/hardware/camera2/impl/CaptureResultExtras.java
index b3a9559..7544045 100644
--- a/core/java/android/hardware/camera2/impl/CaptureResultExtras.java
+++ b/core/java/android/hardware/camera2/impl/CaptureResultExtras.java
@@ -45,6 +45,15 @@ public class CaptureResultExtras implements Parcelable {
readFromParcel(in);
}
+ public CaptureResultExtras(int requestId, int subsequenceId, int afTriggerId,
+ int precaptureTriggerId, long frameNumber) {
+ this.requestId = requestId;
+ this.subsequenceId = subsequenceId;
+ this.afTriggerId = afTriggerId;
+ this.precaptureTriggerId = precaptureTriggerId;
+ this.frameNumber = frameNumber;
+ }
+
@Override
public int describeContents() {
return 0;
diff --git a/core/java/android/hardware/camera2/legacy/BurstHolder.java b/core/java/android/hardware/camera2/legacy/BurstHolder.java
new file mode 100644
index 0000000..e35eb50
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/BurstHolder.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.camera2.CaptureRequest;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Immutable container for a burst of capture results.
+ */
+public class BurstHolder {
+
+ private final ArrayList<CaptureRequest> mRequests;
+ private final boolean mRepeating;
+ private final int mRequestId;
+
+ /**
+ * Immutable container for a burst of capture results.
+ *
+ * @param requestId id of the burst request.
+ * @param repeating true if this burst is repeating.
+ * @param requests a {@link java.util.List} of {@link CaptureRequest}s in this burst.
+ */
+ public BurstHolder(int requestId, boolean repeating, List<CaptureRequest> requests) {
+ mRequests = new ArrayList<CaptureRequest>(requests);
+ mRepeating = repeating;
+ mRequestId = requestId;
+ }
+
+ /**
+ * Get the id of this request.
+ */
+ public int getRequestId() {
+ return mRequestId;
+ }
+
+ /**
+ * Return true if this repeating.
+ */
+ public boolean isRepeating() {
+ return mRepeating;
+ }
+
+ /**
+ * Return the number of requests in this burst sequence.
+ */
+ public int getNumberOfRequests() {
+ return mRequests.size();
+ }
+
+ /**
+ * Create a list of {@link RequestHolder} objects encapsulating the requests in this burst.
+ *
+ * @param frameNumber the starting framenumber for this burst.
+ * @return the list of {@link RequestHolder} objects.
+ */
+ public List<RequestHolder> produceRequestHolders(long frameNumber) {
+ ArrayList<RequestHolder> holders = new ArrayList<RequestHolder>();
+ int i = 0;
+ for (CaptureRequest r : mRequests) {
+ holders.add(new RequestHolder(mRequestId, i, r, mRepeating, frameNumber + i));
+ ++i;
+ }
+ return holders;
+ }
+}
diff --git a/core/java/android/hardware/camera2/legacy/CameraDeviceState.java b/core/java/android/hardware/camera2/legacy/CameraDeviceState.java
new file mode 100644
index 0000000..71adf8b
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/CameraDeviceState.java
@@ -0,0 +1,259 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.utils.CameraBinderDecorator;
+import android.os.Handler;
+import android.util.Log;
+
+/**
+ * Emulates a the state of a single Camera2 device.
+ *
+ * <p>
+ * This class acts as the state machine for a camera device. Valid state transitions are given
+ * in the table below:
+ * </p>
+ *
+ * <ul>
+ * <li>{@code UNCONFIGURED -> CONFIGURING}</li>
+ * <li>{@code CONFIGURING -> IDLE}</li>
+ * <li>{@code IDLE -> CONFIGURING}</li>
+ * <li>{@code IDLE -> CAPTURING}</li>
+ * <li>{@code CAPTURING -> IDLE}</li>
+ * <li>{@code ANY -> ERROR}</li>
+ * </ul>
+ */
+public class CameraDeviceState {
+ private static final String TAG = "CameraDeviceState";
+ private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
+
+ private static final int STATE_ERROR = 0;
+ private static final int STATE_UNCONFIGURED = 1;
+ private static final int STATE_CONFIGURING = 2;
+ private static final int STATE_IDLE = 3;
+ private static final int STATE_CAPTURING = 4;
+
+ private int mCurrentState = STATE_UNCONFIGURED;
+ private int mCurrentError = CameraBinderDecorator.NO_ERROR;
+
+ private RequestHolder mCurrentRequest = null;
+
+ private Handler mCurrentHandler = null;
+ private CameraDeviceStateListener mCurrentListener = null;
+
+
+ /**
+ * CameraDeviceStateListener callbacks to be called after state transitions.
+ */
+ public interface CameraDeviceStateListener {
+ void onError(int errorCode, RequestHolder holder);
+ void onConfiguring();
+ void onIdle();
+ void onCaptureStarted(RequestHolder holder);
+ void onCaptureResult(CameraMetadataNative result, RequestHolder holder);
+ }
+
+ /**
+ * Transition to the {@code ERROR} state.
+ *
+ * <p>
+ * The device cannot exit the {@code ERROR} state. If the device was not already in the
+ * {@code ERROR} state, {@link CameraDeviceStateListener#onError(int, RequestHolder)} will be
+ * called.
+ * </p>
+ *
+ * @param error the error to set. Should be one of the error codes defined in
+ * {@link android.hardware.camera2.utils.CameraBinderDecorator}.
+ */
+ public synchronized void setError(int error) {
+ mCurrentError = error;
+ doStateTransition(STATE_ERROR);
+ }
+
+ /**
+ * Transition to the {@code CONFIGURING} state, or {@code ERROR} if in an invalid state.
+ *
+ * <p>
+ * If the device was not already in the {@code CONFIGURING} state,
+ * {@link CameraDeviceStateListener#onConfiguring()} will be called.
+ * </p>
+ *
+ * @returns {@link CameraBinderDecorator#NO_ERROR}, or an error if one has occurred.
+ */
+ public synchronized int setConfiguring() {
+ doStateTransition(STATE_CONFIGURING);
+ return mCurrentError;
+ }
+
+ /**
+ * Transition to the {@code IDLE} state, or {@code ERROR} if in an invalid state.
+ *
+ * <p>
+ * If the device was not already in the {@code IDLE} state,
+ * {@link CameraDeviceStateListener#onIdle()} will be called.
+ * </p>
+ *
+ * @returns {@link CameraBinderDecorator#NO_ERROR}, or an error if one has occurred.
+ */
+ public synchronized int setIdle() {
+ doStateTransition(STATE_IDLE);
+ return mCurrentError;
+ }
+
+ /**
+ * Transition to the {@code CAPTURING} state, or {@code ERROR} if in an invalid state.
+ *
+ * <p>
+ * If the device was not already in the {@code CAPTURING} state,
+ * {@link CameraDeviceStateListener#onCaptureStarted(RequestHolder)} will be called.
+ * </p>
+ *
+ * @param request A {@link RequestHolder} containing the request for the current capture.
+ * @returns {@link CameraBinderDecorator#NO_ERROR}, or an error if one has occurred.
+ */
+ public synchronized int setCaptureStart(final RequestHolder request) {
+ mCurrentRequest = request;
+ doStateTransition(STATE_CAPTURING);
+ return mCurrentError;
+ }
+
+ /**
+ * Set the result for a capture.
+ *
+ * <p>
+ * If the device was in the {@code CAPTURING} state,
+ * {@link CameraDeviceStateListener#onCaptureResult(CameraMetadataNative, RequestHolder)} will
+ * be called with the given result, otherwise this will result in the device transitioning to
+ * the {@code ERROR} state,
+ * </p>
+ *
+ * @param request the {@link RequestHolder} request that created this result.
+ * @param result the {@link CameraMetadataNative} result to set.
+ * @returns {@link CameraBinderDecorator#NO_ERROR}, or an error if one has occurred.
+ */
+ public synchronized int setCaptureResult(final RequestHolder request,
+ final CameraMetadataNative result) {
+ if (mCurrentState != STATE_CAPTURING) {
+ Log.e(TAG, "Cannot receive result while in state: " + mCurrentState);
+ mCurrentError = CameraBinderDecorator.INVALID_OPERATION;
+ doStateTransition(STATE_ERROR);
+ return mCurrentError;
+ }
+
+ if (mCurrentHandler != null && mCurrentListener != null) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onCaptureResult(result, request);
+ }
+ });
+ }
+ return mCurrentError;
+ }
+
+ /**
+ * Set the listener for state transition callbacks.
+ *
+ * @param handler handler on which to call the callbacks.
+ * @param listener the {@link CameraDeviceStateListener} callbacks to call.
+ */
+ public synchronized void setCameraDeviceCallbacks(Handler handler,
+ CameraDeviceStateListener listener) {
+ mCurrentHandler = handler;
+ mCurrentListener = listener;
+ }
+
+ private void doStateTransition(int newState) {
+ if (DEBUG) {
+ if (newState != mCurrentState) {
+ Log.d(TAG, "Transitioning to state " + newState);
+ }
+ }
+ switch(newState) {
+ case STATE_ERROR:
+ if (mCurrentState != STATE_ERROR && mCurrentHandler != null &&
+ mCurrentListener != null) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onError(mCurrentError, mCurrentRequest);
+ }
+ });
+ }
+ mCurrentState = STATE_ERROR;
+ break;
+ case STATE_CONFIGURING:
+ if (mCurrentState != STATE_UNCONFIGURED && mCurrentState != STATE_IDLE) {
+ Log.e(TAG, "Cannot call configure while in state: " + mCurrentState);
+ mCurrentError = CameraBinderDecorator.INVALID_OPERATION;
+ doStateTransition(STATE_ERROR);
+ break;
+ }
+ if (mCurrentState != STATE_CONFIGURING && mCurrentHandler != null &&
+ mCurrentListener != null) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onConfiguring();
+ }
+ });
+ }
+ mCurrentState = STATE_CONFIGURING;
+ break;
+ case STATE_IDLE:
+ if (mCurrentState != STATE_CONFIGURING && mCurrentState != STATE_CAPTURING) {
+ Log.e(TAG, "Cannot call idle while in state: " + mCurrentState);
+ mCurrentError = CameraBinderDecorator.INVALID_OPERATION;
+ doStateTransition(STATE_ERROR);
+ break;
+ }
+ if (mCurrentState != STATE_IDLE && mCurrentHandler != null &&
+ mCurrentListener != null) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onIdle();
+ }
+ });
+ }
+ mCurrentState = STATE_IDLE;
+ break;
+ case STATE_CAPTURING:
+ if (mCurrentState != STATE_IDLE && mCurrentState != STATE_CAPTURING) {
+ Log.e(TAG, "Cannot call capture while in state: " + mCurrentState);
+ mCurrentError = CameraBinderDecorator.INVALID_OPERATION;
+ doStateTransition(STATE_ERROR);
+ break;
+ }
+ if (mCurrentHandler != null && mCurrentListener != null) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onCaptureStarted(mCurrentRequest);
+ }
+ });
+ }
+ mCurrentState = STATE_CAPTURING;
+ break;
+ default:
+ throw new IllegalStateException("Transition to unknown state: " + newState);
+ }
+ }
+
+
+}
diff --git a/core/java/android/hardware/camera2/legacy/CameraDeviceUserShim.java b/core/java/android/hardware/camera2/legacy/CameraDeviceUserShim.java
new file mode 100644
index 0000000..54d9c3c
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/CameraDeviceUserShim.java
@@ -0,0 +1,273 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.Camera;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.ICameraDeviceCallbacks;
+import android.hardware.camera2.ICameraDeviceUser;
+import android.hardware.camera2.utils.LongParcelable;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.utils.CameraBinderDecorator;
+import android.hardware.camera2.utils.CameraRuntimeException;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.util.Log;
+import android.util.SparseArray;
+import android.view.Surface;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Compatibility implementation of the Camera2 API binder interface.
+ *
+ * <p>
+ * This is intended to be called from the same process as client
+ * {@link android.hardware.camera2.CameraDevice}, and wraps a
+ * {@link android.hardware.camera2.legacy.LegacyCameraDevice} that emulates Camera2 service using
+ * the Camera1 API.
+ * </p>
+ *
+ * <p>
+ * Keep up to date with ICameraDeviceUser.aidl.
+ * </p>
+ */
+public class CameraDeviceUserShim implements ICameraDeviceUser {
+ private static final String TAG = "CameraDeviceUserShim";
+
+ private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
+
+ private final LegacyCameraDevice mLegacyDevice;
+
+ private final Object mConfigureLock = new Object();
+ private int mSurfaceIdCounter;
+ private boolean mConfiguring;
+ private final SparseArray<Surface> mSurfaces;
+
+ protected CameraDeviceUserShim(int cameraId, LegacyCameraDevice legacyCamera) {
+ mLegacyDevice = legacyCamera;
+ mConfiguring = false;
+ mSurfaces = new SparseArray<Surface>();
+
+ mSurfaceIdCounter = 0;
+ }
+
+ public static CameraDeviceUserShim connectBinderShim(ICameraDeviceCallbacks callbacks,
+ int cameraId) {
+ if (DEBUG) {
+ Log.d(TAG, "Opening shim Camera device");
+ }
+ // TODO: Move open/init into LegacyCameraDevice thread when API is switched to async.
+ Camera legacyCamera = Camera.openUninitialized();
+ int initErrors = legacyCamera.cameraInit(cameraId);
+ // Check errors old HAL initialization
+ if (Camera.checkInitErrors(initErrors)) {
+ // TODO: Map over old camera error codes. This likely involves improving the error
+ // reporting in the HAL1 connect path.
+ throw new CameraRuntimeException(CameraAccessException.CAMERA_DISCONNECTED);
+ }
+ LegacyCameraDevice device = new LegacyCameraDevice(cameraId, legacyCamera, callbacks);
+ return new CameraDeviceUserShim(cameraId, device);
+ }
+
+ @Override
+ public void disconnect() {
+ if (DEBUG) {
+ Log.d(TAG, "disconnect called.");
+ }
+ mLegacyDevice.close();
+ }
+
+ @Override
+ public int submitRequest(CaptureRequest request, boolean streaming,
+ /*out*/LongParcelable lastFrameNumber) {
+ if (DEBUG) {
+ Log.d(TAG, "submitRequest called.");
+ }
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ Log.e(TAG, "Cannot submit request, configuration change in progress.");
+ return CameraBinderDecorator.INVALID_OPERATION;
+ }
+ }
+ return mLegacyDevice.submitRequest(request, streaming, lastFrameNumber);
+ }
+
+ @Override
+ public int submitRequestList(List<CaptureRequest> request, boolean streaming,
+ /*out*/LongParcelable lastFrameNumber) {
+ if (DEBUG) {
+ Log.d(TAG, "submitRequestList called.");
+ }
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ Log.e(TAG, "Cannot submit request, configuration change in progress.");
+ return CameraBinderDecorator.INVALID_OPERATION;
+ }
+ }
+ return mLegacyDevice.submitRequestList(request, streaming, lastFrameNumber);
+ }
+
+ @Override
+ public int cancelRequest(int requestId, /*out*/LongParcelable lastFrameNumber) {
+ if (DEBUG) {
+ Log.d(TAG, "cancelRequest called.");
+ }
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ Log.e(TAG, "Cannot cancel request, configuration change in progress.");
+ return CameraBinderDecorator.INVALID_OPERATION;
+ }
+ }
+ long lastFrame = mLegacyDevice.cancelRequest(requestId);
+ lastFrameNumber.setNumber(lastFrame);
+ return CameraBinderDecorator.NO_ERROR;
+ }
+
+ @Override
+ public int beginConfigure() {
+ if (DEBUG) {
+ Log.d(TAG, "beginConfigure called.");
+ }
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ Log.e(TAG, "Cannot begin configure, configuration change already in progress.");
+ return CameraBinderDecorator.INVALID_OPERATION;
+ }
+ mConfiguring = true;
+ }
+ return CameraBinderDecorator.NO_ERROR;
+ }
+
+ @Override
+ public int endConfigure() {
+ if (DEBUG) {
+ Log.d(TAG, "endConfigure called.");
+ }
+ ArrayList<Surface> surfaces = null;
+ synchronized(mConfigureLock) {
+ if (!mConfiguring) {
+ Log.e(TAG, "Cannot end configure, no configuration change in progress.");
+ return CameraBinderDecorator.INVALID_OPERATION;
+ }
+ int numSurfaces = mSurfaces.size();
+ if (numSurfaces > 0) {
+ surfaces = new ArrayList<Surface>();
+ for (int i = 0; i < numSurfaces; ++i) {
+ surfaces.add(mSurfaces.valueAt(i));
+ }
+ }
+ mConfiguring = false;
+ }
+ return mLegacyDevice.configureOutputs(surfaces);
+ }
+
+ @Override
+ public int deleteStream(int streamId) {
+ if (DEBUG) {
+ Log.d(TAG, "deleteStream called.");
+ }
+ synchronized(mConfigureLock) {
+ if (!mConfiguring) {
+ Log.e(TAG, "Cannot delete stream, beginConfigure hasn't been called yet.");
+ return CameraBinderDecorator.INVALID_OPERATION;
+ }
+ int index = mSurfaces.indexOfKey(streamId);
+ if (index < 0) {
+ Log.e(TAG, "Cannot delete stream, stream id " + streamId + " doesn't exist.");
+ return CameraBinderDecorator.BAD_VALUE;
+ }
+ mSurfaces.removeAt(index);
+ }
+ return CameraBinderDecorator.NO_ERROR;
+ }
+
+ @Override
+ public int createStream(int width, int height, int format, Surface surface) {
+ if (DEBUG) {
+ Log.d(TAG, "createStream called.");
+ }
+ synchronized(mConfigureLock) {
+ if (!mConfiguring) {
+ Log.e(TAG, "Cannot create stream, beginConfigure hasn't been called yet.");
+ return CameraBinderDecorator.INVALID_OPERATION;
+ }
+ int id = ++mSurfaceIdCounter;
+ mSurfaces.put(id, surface);
+ return id;
+ }
+ }
+
+ @Override
+ public int createDefaultRequest(int templateId, /*out*/CameraMetadataNative request) {
+ if (DEBUG) {
+ Log.d(TAG, "createDefaultRequest called.");
+ }
+ // TODO: implement createDefaultRequest.
+ Log.e(TAG, "createDefaultRequest unimplemented.");
+ return CameraBinderDecorator.NO_ERROR;
+ }
+
+ @Override
+ public int getCameraInfo(/*out*/CameraMetadataNative info) {
+ if (DEBUG) {
+ Log.d(TAG, "getCameraInfo called.");
+ }
+ // TODO: implement getCameraInfo.
+ Log.e(TAG, "getCameraInfo unimplemented.");
+ return CameraBinderDecorator.NO_ERROR;
+ }
+
+ @Override
+ public int waitUntilIdle() throws RemoteException {
+ if (DEBUG) {
+ Log.d(TAG, "waitUntilIdle called.");
+ }
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ Log.e(TAG, "Cannot wait until idle, configuration change in progress.");
+ return CameraBinderDecorator.INVALID_OPERATION;
+ }
+ }
+ mLegacyDevice.waitUntilIdle();
+ return CameraBinderDecorator.NO_ERROR;
+ }
+
+ @Override
+ public int flush(/*out*/LongParcelable lastFrameNumber) {
+ if (DEBUG) {
+ Log.d(TAG, "flush called.");
+ }
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ Log.e(TAG, "Cannot flush, configuration change in progress.");
+ return CameraBinderDecorator.INVALID_OPERATION;
+ }
+ }
+ // TODO: implement flush.
+ return CameraBinderDecorator.NO_ERROR;
+ }
+
+ @Override
+ public IBinder asBinder() {
+ // This is solely intended to be used for in-process binding.
+ return null;
+ }
+}
diff --git a/core/java/android/hardware/camera2/legacy/GLThreadManager.java b/core/java/android/hardware/camera2/legacy/GLThreadManager.java
new file mode 100644
index 0000000..3fd2309
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/GLThreadManager.java
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.SurfaceTexture;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.Message;
+import android.util.Log;
+import android.view.Surface;
+
+import java.util.Collection;
+
+/**
+ * GLThreadManager handles the thread used for rendering into the configured output surfaces.
+ */
+public class GLThreadManager {
+ private final String TAG;
+ private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
+
+ private static final int MSG_NEW_CONFIGURATION = 1;
+ private static final int MSG_NEW_FRAME = 2;
+ private static final int MSG_CLEANUP = 3;
+ private static final int MSG_DROP_FRAMES = 4;
+ private static final int MSG_ALLOW_FRAMES = 5;
+
+ private final SurfaceTextureRenderer mTextureRenderer;
+
+ private final RequestHandlerThread mGLHandlerThread;
+
+ private final RequestThreadManager.FpsCounter mPrevCounter =
+ new RequestThreadManager.FpsCounter("GL Preview Producer");
+
+ /**
+ * Container object for Configure messages.
+ */
+ private static class ConfigureHolder {
+ public final ConditionVariable condition;
+ public final Collection<Surface> surfaces;
+
+ public ConfigureHolder(ConditionVariable condition, Collection<Surface> surfaces) {
+ this.condition = condition;
+ this.surfaces = surfaces;
+ }
+ }
+
+ private final Handler.Callback mGLHandlerCb = new Handler.Callback() {
+ private boolean mCleanup = false;
+ private boolean mConfigured = false;
+ private boolean mDroppingFrames = false;
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public boolean handleMessage(Message msg) {
+ if (mCleanup) {
+ return true;
+ }
+ switch (msg.what) {
+ case MSG_NEW_CONFIGURATION:
+ ConfigureHolder configure = (ConfigureHolder) msg.obj;
+ mTextureRenderer.cleanupEGLContext();
+ mTextureRenderer.configureSurfaces(configure.surfaces);
+ configure.condition.open();
+ mConfigured = true;
+ break;
+ case MSG_NEW_FRAME:
+ if (mDroppingFrames) {
+ Log.w(TAG, "Ignoring frame.");
+ break;
+ }
+ if (DEBUG) {
+ mPrevCounter.countAndLog();
+ }
+ if (!mConfigured) {
+ Log.e(TAG, "Dropping frame, EGL context not configured!");
+ }
+ mTextureRenderer.drawIntoSurfaces((Collection<Surface>) msg.obj);
+ break;
+ case MSG_CLEANUP:
+ mTextureRenderer.cleanupEGLContext();
+ mCleanup = true;
+ mConfigured = false;
+ break;
+ case MSG_DROP_FRAMES:
+ mDroppingFrames = true;
+ break;
+ case MSG_ALLOW_FRAMES:
+ mDroppingFrames = false;
+ default:
+ Log.e(TAG, "Unhandled message " + msg.what + " on GLThread.");
+ break;
+ }
+ return true;
+ }
+ };
+
+ /**
+ * Create a new GL thread and renderer.
+ *
+ * @param cameraId the camera id for this thread.
+ */
+ public GLThreadManager(int cameraId) {
+ mTextureRenderer = new SurfaceTextureRenderer();
+ TAG = String.format("CameraDeviceGLThread-%d", cameraId);
+ mGLHandlerThread = new RequestHandlerThread(TAG, mGLHandlerCb);
+ }
+
+ /**
+ * Start the thread.
+ *
+ * <p>
+ * This must be called before queueing new frames.
+ * </p>
+ */
+ public void start() {
+ mGLHandlerThread.start();
+ }
+
+ /**
+ * Wait until the thread has started.
+ */
+ public void waitUntilStarted() {
+ mGLHandlerThread.waitUntilStarted();
+ }
+
+ /**
+ * Quit the thread.
+ *
+ * <p>
+ * No further methods can be called after this.
+ * </p>
+ */
+ public void quit() {
+ Handler handler = mGLHandlerThread.getHandler();
+ handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
+ mGLHandlerThread.quitSafely();
+ }
+
+ /**
+ * Queue a new call to draw into a given set of surfaces.
+ *
+ * <p>
+ * The set of surfaces passed here must be a subset of the set of surfaces passed in
+ * the last call to {@link #setConfigurationAndWait}.
+ * </p>
+ *
+ * @param targets a collection of {@link android.view.Surface}s to draw into.
+ */
+ public void queueNewFrame(Collection<Surface> targets) {
+ Handler handler = mGLHandlerThread.getHandler();
+
+ /**
+ * Avoid queuing more than one new frame. If we are not consuming faster than frames
+ * are produced, drop frames rather than allowing the queue to back up.
+ */
+ if (!handler.hasMessages(MSG_NEW_FRAME)) {
+ handler.sendMessage(handler.obtainMessage(MSG_NEW_FRAME, targets));
+ } else {
+ Log.e(TAG, "GLThread dropping frame. Not consuming frames quickly enough!");
+ }
+ }
+
+ /**
+ * Configure the GL renderer for the given set of output surfaces, and block until
+ * this configuration has been applied.
+ *
+ * @param surfaces a collection of {@link android.view.Surface}s to configure.
+ */
+ public void setConfigurationAndWait(Collection<Surface> surfaces) {
+ Handler handler = mGLHandlerThread.getHandler();
+
+ final ConditionVariable condition = new ConditionVariable(/*closed*/false);
+ ConfigureHolder configure = new ConfigureHolder(condition, surfaces);
+
+ Message m = handler.obtainMessage(MSG_NEW_CONFIGURATION, /*arg1*/0, /*arg2*/0, configure);
+ handler.sendMessage(m);
+
+ // Block until configuration applied.
+ condition.block();
+ }
+
+ /**
+ * Get the underlying surface to produce frames from.
+ *
+ * <p>
+ * This returns the surface that is drawn into the set of surfaces passed in for each frame.
+ * This method should only be called after a call to
+ * {@link #setConfigurationAndWait(java.util.Collection)}. Calling this before the first call
+ * to {@link #setConfigurationAndWait(java.util.Collection)}, after {@link #quit()}, or
+ * concurrently to one of these calls may result in an invalid
+ * {@link android.graphics.SurfaceTexture} being returned.
+ * </p>
+ *
+ * @return an {@link android.graphics.SurfaceTexture} to draw to.
+ */
+ public SurfaceTexture getCurrentSurfaceTexture() {
+ return mTextureRenderer.getSurfaceTexture();
+ }
+
+ /**
+ * Ignore any subsequent calls to {@link #queueNewFrame(java.util.Collection)}.
+ */
+ public void ignoreNewFrames() {
+ mGLHandlerThread.getHandler().sendEmptyMessage(MSG_DROP_FRAMES);
+ }
+
+ /**
+ * Wait until no messages are queued.
+ */
+ public void waitUntilIdle() {
+ mGLHandlerThread.waitUntilIdle();
+ }
+
+ /**
+ * Re-enable drawing new frames after a call to {@link #ignoreNewFrames()}.
+ */
+ public void allowNewFrames() {
+ mGLHandlerThread.getHandler().sendEmptyMessage(MSG_ALLOW_FRAMES);
+ }
+}
diff --git a/core/java/android/hardware/camera2/legacy/LegacyCameraDevice.java b/core/java/android/hardware/camera2/legacy/LegacyCameraDevice.java
new file mode 100644
index 0000000..f9cf905
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/LegacyCameraDevice.java
@@ -0,0 +1,275 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.ImageFormat;
+import android.hardware.Camera;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.impl.CaptureResultExtras;
+import android.hardware.camera2.ICameraDeviceCallbacks;
+import android.hardware.camera2.utils.LongParcelable;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.utils.CameraBinderDecorator;
+import android.hardware.camera2.utils.CameraRuntimeException;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.RemoteException;
+import android.util.Log;
+import android.view.Surface;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * This class emulates the functionality of a Camera2 device using a the old Camera class.
+ *
+ * <p>
+ * There are two main components that are used to implement this:
+ * - A state machine containing valid Camera2 device states ({@link CameraDeviceState}).
+ * - A message-queue based pipeline that manages an old Camera class, and executes capture and
+ * configuration requests.
+ * </p>
+ */
+public class LegacyCameraDevice implements AutoCloseable {
+ public static final String DEBUG_PROP = "HAL1ShimLogging";
+
+ private final String TAG;
+
+ private final int mCameraId;
+ private final ICameraDeviceCallbacks mDeviceCallbacks;
+ private final CameraDeviceState mDeviceState = new CameraDeviceState();
+
+ private final ConditionVariable mIdle = new ConditionVariable(/*open*/true);
+ private final AtomicInteger mRequestIdCounter = new AtomicInteger(0);
+
+ private final HandlerThread mCallbackHandlerThread = new HandlerThread("ResultThread");
+ private final Handler mCallbackHandler;
+ private static final int ILLEGAL_VALUE = -1;
+
+ private CaptureResultExtras getExtrasFromRequest(RequestHolder holder) {
+ if (holder == null) {
+ return new CaptureResultExtras(ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE,
+ ILLEGAL_VALUE, ILLEGAL_VALUE);
+ }
+ return new CaptureResultExtras(holder.getRequestId(), holder.getSubsequeceId(),
+ /*afTriggerId*/0, /*precaptureTriggerId*/0, holder.getFrameNumber());
+ }
+
+ /**
+ * Listener for the camera device state machine. Calls the appropriate
+ * {@link ICameraDeviceCallbacks} for each state transition.
+ */
+ private final CameraDeviceState.CameraDeviceStateListener mStateListener =
+ new CameraDeviceState.CameraDeviceStateListener() {
+ @Override
+ public void onError(final int errorCode, RequestHolder holder) {
+ mIdle.open();
+ final CaptureResultExtras extras = getExtrasFromRequest(holder);
+ try {
+ mDeviceCallbacks.onCameraError(errorCode, extras);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Received remote exception during onCameraError callback: ", e);
+ }
+
+ }
+
+ @Override
+ public void onConfiguring() {
+ // Do nothing
+ }
+
+ @Override
+ public void onIdle() {
+ mIdle.open();
+
+ try {
+ mDeviceCallbacks.onCameraIdle();
+ } catch (RemoteException e) {
+ Log.e(TAG, "Received remote exception during onCameraIdle callback: ", e);
+ }
+ }
+
+ @Override
+ public void onCaptureStarted(RequestHolder holder) {
+ final CaptureResultExtras extras = getExtrasFromRequest(holder);
+
+ try {
+ // TODO: Don't fake timestamp
+ mDeviceCallbacks.onCaptureStarted(extras, System.nanoTime());
+ } catch (RemoteException e) {
+ Log.e(TAG, "Received remote exception during onCameraError callback: ", e);
+ }
+
+ }
+
+ @Override
+ public void onCaptureResult(CameraMetadataNative result, RequestHolder holder) {
+ final CaptureResultExtras extras = getExtrasFromRequest(holder);
+
+ try {
+ // TODO: Don't fake metadata
+ mDeviceCallbacks.onResultReceived(result, extras);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Received remote exception during onCameraError callback: ", e);
+ }
+ }
+ };
+
+ private final RequestThreadManager mRequestThreadManager;
+
+ /**
+ * Check if a given surface uses {@link ImageFormat#YUV_420_888} format.
+ *
+ * @param s the surface to check.
+ * @return {@code true} if the surfaces uses {@link ImageFormat#YUV_420_888}.
+ */
+ static boolean needsConversion(Surface s) {
+ return LegacyCameraDevice.nativeDetectSurfaceType(s) == ImageFormat.YUV_420_888;
+ }
+
+ /**
+ * Create a new emulated camera device from a given Camera 1 API camera.
+ *
+ * <p>
+ * The {@link Camera} provided to this constructor must already have been successfully opened,
+ * and ownership of the provided camera is passed to this object. No further calls to the
+ * camera methods should be made following this constructor.
+ * </p>
+ *
+ * @param cameraId the id of the camera.
+ * @param camera an open {@link Camera} device.
+ * @param callbacks {@link ICameraDeviceCallbacks} callbacks to call for Camera2 API operations.
+ */
+ public LegacyCameraDevice(int cameraId, Camera camera, ICameraDeviceCallbacks callbacks) {
+ mCameraId = cameraId;
+ mDeviceCallbacks = callbacks;
+ TAG = String.format("CameraDevice-%d-LE", mCameraId);
+
+ mCallbackHandlerThread.start();
+ mCallbackHandler = new Handler(mCallbackHandlerThread.getLooper());
+ mDeviceState.setCameraDeviceCallbacks(mCallbackHandler, mStateListener);
+ mRequestThreadManager =
+ new RequestThreadManager(cameraId, camera, mDeviceState);
+ mRequestThreadManager.start();
+ }
+
+ /**
+ * Configure the device with a set of output surfaces.
+ *
+ * @param outputs a list of surfaces to set.
+ * @return an error code for this binder operation, or {@link CameraBinderDecorator.NO_ERROR}
+ * on success.
+ */
+ public int configureOutputs(List<Surface> outputs) {
+ int error = mDeviceState.setConfiguring();
+ if (error == CameraBinderDecorator.NO_ERROR) {
+ mRequestThreadManager.configure(outputs);
+ error = mDeviceState.setIdle();
+ }
+ return error;
+ }
+
+ /**
+ * Submit a burst of capture requests.
+ *
+ * @param requestList a list of capture requests to execute.
+ * @param repeating {@code true} if this burst is repeating.
+ * @param frameNumber an output argument that contains either the frame number of the last frame
+ * that will be returned for this request, or the frame number of the last
+ * frame that will be returned for the current repeating request if this
+ * burst is set to be repeating.
+ * @return the request id.
+ */
+ public int submitRequestList(List<CaptureRequest> requestList, boolean repeating,
+ /*out*/LongParcelable frameNumber) {
+ // TODO: validate request here
+ mIdle.close();
+ return mRequestThreadManager.submitCaptureRequests(requestList, repeating,
+ frameNumber);
+ }
+
+ /**
+ * Submit a single capture request.
+ *
+ * @param request the capture request to execute.
+ * @param repeating {@code true} if this request is repeating.
+ * @param frameNumber an output argument that contains either the frame number of the last frame
+ * that will be returned for this request, or the frame number of the last
+ * frame that will be returned for the current repeating request if this
+ * request is set to be repeating.
+ * @return the request id.
+ */
+ public int submitRequest(CaptureRequest request, boolean repeating,
+ /*out*/LongParcelable frameNumber) {
+ ArrayList<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
+ requestList.add(request);
+ return submitRequestList(requestList, repeating, frameNumber);
+ }
+
+ /**
+ * Cancel the repeating request with the given request id.
+ *
+ * @param requestId the request id of the request to cancel.
+ * @return the last frame number to be returned from the HAL for the given repeating request, or
+ * {@code INVALID_FRAME} if none exists.
+ */
+ public long cancelRequest(int requestId) {
+ return mRequestThreadManager.cancelRepeating(requestId);
+ }
+
+ /**
+ * Block until the {@link ICameraDeviceCallbacks#onCameraIdle()} callback is received.
+ */
+ public void waitUntilIdle() {
+ mIdle.block();
+ }
+
+ @Override
+ public void close() {
+ mRequestThreadManager.quit();
+ mCallbackHandlerThread.quitSafely();
+ // TODO: throw IllegalStateException in every method after close has been called
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ } catch (CameraRuntimeException e) {
+ Log.e(TAG, "Got error while trying to finalize, ignoring: " + e.getMessage());
+ } finally {
+ super.finalize();
+ }
+ }
+
+ protected static native int nativeDetectSurfaceType(Surface surface);
+
+ protected static native void nativeDetectSurfaceDimens(Surface surface, int[] dimens);
+
+ protected static native void nativeConfigureSurface(Surface surface, int width, int height,
+ int pixelFormat);
+
+ protected static native void nativeProduceFrame(Surface surface, byte[] pixelBuffer, int width,
+ int height, int pixelFormat);
+
+ protected static native void nativeSetSurfaceFormat(Surface surface, int pixelFormat);
+
+ protected static native void nativeSetSurfaceDimens(Surface surface, int width, int height);
+
+}
diff --git a/core/java/android/hardware/camera2/legacy/RequestHandlerThread.java b/core/java/android/hardware/camera2/legacy/RequestHandlerThread.java
new file mode 100644
index 0000000..36cd907
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/RequestHandlerThread.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.os.MessageQueue;
+
+public class RequestHandlerThread extends HandlerThread {
+ private final ConditionVariable mStarted = new ConditionVariable(false);
+ private final ConditionVariable mIdle = new ConditionVariable(true);
+ private Handler.Callback mCallback;
+ private volatile Handler mHandler;
+
+ public RequestHandlerThread(String name, Handler.Callback callback) {
+ super(name, Thread.MAX_PRIORITY);
+ mCallback = callback;
+ }
+
+ @Override
+ protected void onLooperPrepared() {
+ mHandler = new Handler(getLooper(), mCallback);
+ mStarted.open();
+ }
+
+ // Blocks until thread has started
+ public void waitUntilStarted() {
+ mStarted.block();
+ }
+
+ // May return null if the handler is not set up yet.
+ public Handler getHandler() {
+ return mHandler;
+ }
+
+ // Blocks until thread has started
+ public Handler waitAndGetHandler() {
+ waitUntilStarted();
+ return getHandler();
+ }
+
+ // Atomic multi-type message existence check
+ public boolean hasAnyMessages(int[] what) {
+ synchronized (mHandler.getLooper().getQueue()) {
+ for (int i : what) {
+ if (mHandler.hasMessages(i)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ // Atomic multi-type message remove
+ public void removeMessages(int[] what) {
+ synchronized (mHandler.getLooper().getQueue()) {
+ for (int i : what) {
+ mHandler.removeMessages(i);
+ }
+ }
+ }
+
+ private final MessageQueue.IdleHandler mIdleHandler = new MessageQueue.IdleHandler() {
+ @Override
+ public boolean queueIdle() {
+ mIdle.open();
+ return false;
+ }
+ };
+
+ // Blocks until thread is idling
+ public void waitUntilIdle() {
+ Looper looper = waitAndGetHandler().getLooper();
+ if (looper.isIdling()) {
+ return;
+ }
+ mIdle.close();
+ looper.getQueue().addIdleHandler(mIdleHandler);
+ if (looper.isIdling()) {
+ return;
+ }
+ mIdle.block();
+ }
+
+}
diff --git a/core/java/android/hardware/camera2/legacy/RequestHolder.java b/core/java/android/hardware/camera2/legacy/RequestHolder.java
new file mode 100644
index 0000000..8a9052f
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/RequestHolder.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.view.Surface;
+
+import java.util.Collection;
+
+/**
+ * Immutable container for a single capture request and associated information.
+ */
+public class RequestHolder {
+
+ private final boolean mRepeating;
+ private final CaptureRequest mRequest;
+ private final int mRequestId;
+ private final int mSubsequeceId;
+ private final long mFrameNumber;
+
+ RequestHolder(int requestId, int subsequenceId, CaptureRequest request, boolean repeating,
+ long frameNumber) {
+ mRepeating = repeating;
+ mRequest = request;
+ mRequestId = requestId;
+ mSubsequeceId = subsequenceId;
+ mFrameNumber = frameNumber;
+ }
+
+ /**
+ * Return the request id for the contained {@link CaptureRequest}.
+ */
+ public int getRequestId() {
+ return mRequestId;
+ }
+
+ /**
+ * Returns true if the contained request is repeating.
+ */
+ public boolean isRepeating() {
+ return mRepeating;
+ }
+
+ /**
+ * Return the subsequence id for this request.
+ */
+ public int getSubsequeceId() {
+ return mSubsequeceId;
+ }
+
+ /**
+ * Returns the frame number for this request.
+ */
+ public long getFrameNumber() {
+ return mFrameNumber;
+ }
+
+ /**
+ * Returns the contained request.
+ */
+ public CaptureRequest getRequest() {
+ return mRequest;
+ }
+
+ /**
+ * Returns a read-only collection of the surfaces targeted by the contained request.
+ */
+ public Collection<Surface> getHolderTargets() {
+ return getRequest().getTargets();
+ }
+
+ /**
+ * Returns true if any of the surfaces targeted by the contained request require jpeg buffers.
+ */
+ public boolean hasJpegTargets() {
+ for (Surface s : getHolderTargets()) {
+ if (jpegType(s)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Returns true if any of the surfaces targeted by the contained request require a
+ * non-jpeg buffer type.
+ */
+ public boolean hasPreviewTargets() {
+ for (Surface s : getHolderTargets()) {
+ if (previewType(s)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Return the first surface targeted by the contained request that requires a
+ * non-jpeg buffer type.
+ */
+ public Surface getFirstPreviewTarget() {
+ for (Surface s : getHolderTargets()) {
+ if (previewType(s)) {
+ return s;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Returns true if the given surface requires jpeg buffers.
+ *
+ * @param s a {@link Surface} to check.
+ * @return true if the surface requires a jpeg buffer.
+ */
+ public static boolean jpegType(Surface s) {
+ if (LegacyCameraDevice.nativeDetectSurfaceType(s) ==
+ CameraMetadataNative.NATIVE_JPEG_FORMAT) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Returns true if the given surface requires non-jpeg buffer types.
+ *
+ * <p>
+ * "Jpeg buffer" refers to the buffers returned in the jpeg
+ * {@link android.hardware.Camera.PictureCallback}. Non-jpeg buffers are created using a tee
+ * of the preview stream drawn to the surface
+ * set via {@link android.hardware.Camera#setPreviewDisplay(android.view.SurfaceHolder)} or
+ * equivalent methods.
+ * </p>
+ * @param s a {@link Surface} to check.
+ * @return true if the surface requires a non-jpeg buffer type.
+ */
+ public static boolean previewType(Surface s) {
+ if (LegacyCameraDevice.nativeDetectSurfaceType(s) !=
+ CameraMetadataNative.NATIVE_JPEG_FORMAT) {
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/core/java/android/hardware/camera2/legacy/RequestQueue.java b/core/java/android/hardware/camera2/legacy/RequestQueue.java
new file mode 100644
index 0000000..5c68303
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/RequestQueue.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.legacy;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.utils.LongParcelable;
+import android.util.Log;
+import android.util.Pair;
+
+import java.util.ArrayDeque;
+import java.util.List;
+
+/**
+ * A queue of bursts of requests.
+ *
+ * <p>This queue maintains the count of frames that have been produced, and is thread safe.</p>
+ */
+public class RequestQueue {
+ private static final String TAG = "RequestQueue";
+
+ private static final long INVALID_FRAME = -1;
+
+ private BurstHolder mRepeatingRequest = null;
+ private final ArrayDeque<BurstHolder> mRequestQueue = new ArrayDeque<BurstHolder>();
+
+ private long mCurrentFrameNumber = 0;
+ private long mCurrentRepeatingFrameNumber = INVALID_FRAME;
+ private int mCurrentRequestId = 0;
+
+ public RequestQueue() {}
+
+ /**
+ * Return and remove the next burst on the queue.
+ *
+ * <p>If a repeating burst is returned, it will not be removed.</p>
+ *
+ * @return a pair containing the next burst and the current frame number, or null if none exist.
+ */
+ public synchronized Pair<BurstHolder, Long> getNext() {
+ BurstHolder next = mRequestQueue.poll();
+ if (next == null && mRepeatingRequest != null) {
+ next = mRepeatingRequest;
+ mCurrentRepeatingFrameNumber = mCurrentFrameNumber +
+ next.getNumberOfRequests();
+ }
+
+ if (next == null) {
+ return null;
+ }
+
+ Pair<BurstHolder, Long> ret = new Pair<BurstHolder, Long>(next, mCurrentFrameNumber);
+ mCurrentFrameNumber += next.getNumberOfRequests();
+ return ret;
+ }
+
+ /**
+ * Cancel a repeating request.
+ *
+ * @param requestId the id of the repeating request to cancel.
+ * @return the last frame to be returned from the HAL for the given repeating request, or
+ * {@code INVALID_FRAME} if none exists.
+ */
+ public synchronized long stopRepeating(int requestId) {
+ long ret = INVALID_FRAME;
+ if (mRepeatingRequest != null && mRepeatingRequest.getRequestId() == requestId) {
+ mRepeatingRequest = null;
+ ret = mCurrentRepeatingFrameNumber;
+ mCurrentRepeatingFrameNumber = INVALID_FRAME;
+ } else {
+ Log.e(TAG, "cancel failed: no repeating request exists for request id: " + requestId);
+ }
+ return ret;
+ }
+
+ /**
+ * Add a the given burst to the queue.
+ *
+ * <p>If the burst is repeating, replace the current repeating burst.</p>
+ *
+ * @param requests the burst of requests to add to the queue.
+ * @param repeating true if the burst is repeating.
+ * @param frameNumber an output argument that contains either the frame number of the last frame
+ * that will be returned for this request, or the frame number of the last
+ * frame that will be returned for the current repeating request if this
+ * burst is set to be repeating.
+ * @return the request id.
+ */
+ public synchronized int submit(List<CaptureRequest> requests, boolean repeating,
+ /*out*/LongParcelable frameNumber) {
+ int requestId = mCurrentRequestId++;
+ BurstHolder burst = new BurstHolder(requestId, repeating, requests);
+ long ret = INVALID_FRAME;
+ if (burst.isRepeating()) {
+ if (mRepeatingRequest != null) {
+ ret = mCurrentRepeatingFrameNumber;
+ }
+ mCurrentRepeatingFrameNumber = INVALID_FRAME;
+ mRepeatingRequest = burst;
+ } else {
+ mRequestQueue.offer(burst);
+ ret = calculateLastFrame(burst.getRequestId());
+ }
+ frameNumber.setNumber(ret);
+ return requestId;
+ }
+
+ private long calculateLastFrame(int requestId) {
+ long total = mCurrentFrameNumber;
+ for (BurstHolder b : mRequestQueue) {
+ total += b.getNumberOfRequests();
+ if (b.getRequestId() == requestId) {
+ return total;
+ }
+ }
+ throw new IllegalStateException(
+ "At least one request must be in the queue to calculate frame number");
+ }
+
+}
diff --git a/core/java/android/hardware/camera2/legacy/RequestThreadManager.java b/core/java/android/hardware/camera2/legacy/RequestThreadManager.java
new file mode 100644
index 0000000..c4669f5
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/RequestThreadManager.java
@@ -0,0 +1,491 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.utils.LongParcelable;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.Message;
+import android.os.SystemClock;
+import android.util.Log;
+import android.util.Pair;
+import android.view.Surface;
+
+import java.io.IOError;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * This class executes requests to the {@link Camera}.
+ *
+ * <p>
+ * The main components of this class are:
+ * - A message queue of requests to the {@link Camera}.
+ * - A thread that consumes requests to the {@link Camera} and executes them.
+ * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s.
+ * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
+ * </p>
+ */
+public class RequestThreadManager {
+ private final String TAG;
+ private final int mCameraId;
+ private final RequestHandlerThread mRequestThread;
+
+ private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
+ private final Camera mCamera;
+
+ private final CameraDeviceState mDeviceState;
+
+ private static final int MSG_CONFIGURE_OUTPUTS = 1;
+ private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2;
+ private static final int MSG_CLEANUP = 3;
+
+ private static final int PREVIEW_FRAME_TIMEOUT = 300; // ms
+ private static final int JPEG_FRAME_TIMEOUT = 1000; // ms
+
+ private boolean mPreviewRunning = false;
+
+ private volatile RequestHolder mInFlightPreview;
+ private volatile RequestHolder mInFlightJpeg;
+
+ private List<Surface> mPreviewOutputs = new ArrayList<Surface>();
+ private List<Surface> mCallbackOutputs = new ArrayList<Surface>();
+ private GLThreadManager mGLThreadManager;
+ private SurfaceTexture mPreviewTexture;
+
+ private final RequestQueue mRequestQueue = new RequestQueue();
+ private SurfaceTexture mDummyTexture;
+ private Surface mDummySurface;
+
+ private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview");
+
+ /**
+ * Container object for Configure messages.
+ */
+ private static class ConfigureHolder {
+ public final ConditionVariable condition;
+ public final Collection<Surface> surfaces;
+
+ public ConfigureHolder(ConditionVariable condition, Collection<Surface> surfaces) {
+ this.condition = condition;
+ this.surfaces = surfaces;
+ }
+ }
+
+ /**
+ * Counter class used to calculate and log the current FPS of frame production.
+ */
+ public static class FpsCounter {
+ //TODO: Hook this up to SystTrace?
+ private static final String TAG = "FpsCounter";
+ private int mFrameCount = 0;
+ private long mLastTime = 0;
+ private long mLastPrintTime = 0;
+ private double mLastFps = 0;
+ private final String mStreamType;
+ private static final long NANO_PER_SECOND = 1000000000; //ns
+
+ public FpsCounter(String streamType) {
+ mStreamType = streamType;
+ }
+
+ public synchronized void countFrame() {
+ mFrameCount++;
+ long nextTime = SystemClock.elapsedRealtimeNanos();
+ if (mLastTime == 0) {
+ mLastTime = nextTime;
+ }
+ if (nextTime > mLastTime + NANO_PER_SECOND) {
+ long elapsed = nextTime - mLastTime;
+ mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed);
+ mFrameCount = 0;
+ mLastTime = nextTime;
+ }
+ }
+
+ public synchronized double checkFps() {
+ return mLastFps;
+ }
+
+ public synchronized void staggeredLog() {
+ if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) {
+ mLastPrintTime = mLastTime;
+ Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps );
+ }
+ }
+
+ public synchronized void countAndLog() {
+ countFrame();
+ staggeredLog();
+ }
+ }
+ /**
+ * Fake preview for jpeg captures when there is no active preview
+ */
+ private void createDummySurface() {
+ if (mDummyTexture == null || mDummySurface == null) {
+ mDummyTexture = new SurfaceTexture(/*ignored*/0);
+ // TODO: use smallest default sizes
+ mDummyTexture.setDefaultBufferSize(640, 480);
+ mDummySurface = new Surface(mDummyTexture);
+ }
+ }
+
+ private final ConditionVariable mReceivedJpeg = new ConditionVariable(false);
+ private final ConditionVariable mReceivedPreview = new ConditionVariable(false);
+
+ private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() {
+ @Override
+ public void onPictureTaken(byte[] data, Camera camera) {
+ Log.i(TAG, "Received jpeg.");
+ RequestHolder holder = mInFlightJpeg;
+ if (holder == null) {
+ Log.w(TAG, "Dropping jpeg frame.");
+ mInFlightJpeg = null;
+ return;
+ }
+ for (Surface s : holder.getHolderTargets()) {
+ if (RequestHolder.jpegType(s)) {
+ Log.i(TAG, "Producing jpeg buffer...");
+ LegacyCameraDevice.nativeSetSurfaceDimens(s, data.length, /*height*/1);
+ LegacyCameraDevice.nativeProduceFrame(s, data, data.length, /*height*/1,
+ CameraMetadataNative.NATIVE_JPEG_FORMAT);
+ }
+ }
+ mReceivedJpeg.open();
+ }
+ };
+
+ private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback =
+ new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ if (DEBUG) {
+ mPrevCounter.countAndLog();
+ }
+ RequestHolder holder = mInFlightPreview;
+ if (holder == null) {
+ Log.w(TAG, "Dropping preview frame.");
+ mInFlightPreview = null;
+ return;
+ }
+ if (holder.hasPreviewTargets()) {
+ mGLThreadManager.queueNewFrame(holder.getHolderTargets());
+ }
+
+ mReceivedPreview.open();
+ }
+ };
+
+ private void stopPreview() {
+ if (mPreviewRunning) {
+ mCamera.stopPreview();
+ mPreviewRunning = false;
+ }
+ }
+
+ private void startPreview() {
+ if (!mPreviewRunning) {
+ mCamera.startPreview();
+ mPreviewRunning = true;
+ }
+ }
+
+ private void doJpegCapture(RequestHolder request) throws IOException {
+ if (!mPreviewRunning) {
+ createDummySurface();
+ mCamera.setPreviewTexture(mDummyTexture);
+ startPreview();
+ }
+ mInFlightJpeg = request;
+ // TODO: Hook up shutter callback to CameraDeviceStateListener#onCaptureStarted
+ mCamera.takePicture(/*shutter*/null, /*raw*/null, mJpegCallback);
+ mPreviewRunning = false;
+ }
+
+ private void doPreviewCapture(RequestHolder request) throws IOException {
+ mInFlightPreview = request;
+ if (mPreviewRunning) {
+ return; // Already running
+ }
+
+ mPreviewTexture.setDefaultBufferSize(640, 480); // TODO: size selection based on request
+ mCamera.setPreviewTexture(mPreviewTexture);
+ Camera.Parameters params = mCamera.getParameters();
+ List<int[]> supportedFpsRanges = params.getSupportedPreviewFpsRange();
+ int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
+ if (DEBUG) {
+ Log.d(TAG, "doPreviewCapture - Selected range [" +
+ bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," +
+ bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
+ }
+ params.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ params.setRecordingHint(true);
+ mCamera.setParameters(params);
+
+ startPreview();
+ }
+
+ private void configureOutputs(Collection<Surface> outputs) throws IOException {
+ stopPreview();
+ if (mGLThreadManager != null) {
+ mGLThreadManager.waitUntilStarted();
+ mGLThreadManager.ignoreNewFrames();
+ mGLThreadManager.waitUntilIdle();
+ }
+ mPreviewOutputs.clear();
+ mCallbackOutputs.clear();
+ mPreviewTexture = null;
+ mInFlightPreview = null;
+ mInFlightJpeg = null;
+
+ for (Surface s : outputs) {
+ int format = LegacyCameraDevice.nativeDetectSurfaceType(s);
+ switch (format) {
+ case CameraMetadataNative.NATIVE_JPEG_FORMAT:
+ mCallbackOutputs.add(s);
+ break;
+ default:
+ mPreviewOutputs.add(s);
+ break;
+ }
+ }
+
+ // TODO: Detect and optimize single-output paths here to skip stream teeing.
+ if (mGLThreadManager == null) {
+ mGLThreadManager = new GLThreadManager(mCameraId);
+ mGLThreadManager.start();
+ }
+ mGLThreadManager.waitUntilStarted();
+ mGLThreadManager.setConfigurationAndWait(mPreviewOutputs);
+ mGLThreadManager.allowNewFrames();
+ mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
+ mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
+ }
+
+ // Calculate the highest FPS range supported
+ private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) {
+ if (frameRates.size() == 0) {
+ Log.e(TAG, "No supported frame rates returned!");
+ return null;
+ }
+
+ int bestMin = 0;
+ int bestMax = 0;
+ int bestIndex = 0;
+ int index = 0;
+ for (int[] rate : frameRates) {
+ int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) {
+ bestMin = minFps;
+ bestMax = maxFps;
+ bestIndex = index;
+ }
+ index++;
+ }
+
+ return frameRates.get(bestIndex);
+ }
+
+ private final Handler.Callback mRequestHandlerCb = new Handler.Callback() {
+ private boolean mCleanup = false;
+ private List<RequestHolder> mRepeating = null;
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public boolean handleMessage(Message msg) {
+ if (mCleanup) {
+ return true;
+ }
+
+ switch (msg.what) {
+ case MSG_CONFIGURE_OUTPUTS:
+ ConfigureHolder config = (ConfigureHolder) msg.obj;
+ Log.i(TAG, "Configure outputs: " + config.surfaces.size() +
+ " surfaces configured.");
+ try {
+ configureOutputs(config.surfaces);
+ } catch (IOException e) {
+ // TODO: report error to CameraDevice
+ throw new IOError(e);
+ }
+ config.condition.open();
+ break;
+ case MSG_SUBMIT_CAPTURE_REQUEST:
+ Handler handler = RequestThreadManager.this.mRequestThread.getHandler();
+
+ // Get the next burst from the request queue.
+ Pair<BurstHolder, Long> nextBurst = mRequestQueue.getNext();
+ if (nextBurst == null) {
+ mDeviceState.setIdle();
+ stopPreview();
+ break;
+ } else {
+ // Queue another capture if we did not get the last burst.
+ handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
+ }
+
+ // Complete each request in the burst
+ List<RequestHolder> requests =
+ nextBurst.first.produceRequestHolders(nextBurst.second);
+ for (RequestHolder holder : requests) {
+ mDeviceState.setCaptureStart(holder);
+ try {
+ if (holder.hasPreviewTargets()) {
+ mReceivedPreview.close();
+ doPreviewCapture(holder);
+ if (!mReceivedPreview.block(PREVIEW_FRAME_TIMEOUT)) {
+ // TODO: report error to CameraDevice
+ Log.e(TAG, "Hit timeout for preview callback!");
+ }
+ }
+ if (holder.hasJpegTargets()) {
+ mReceivedJpeg.close();
+ doJpegCapture(holder);
+ mReceivedJpeg.block();
+ if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
+ // TODO: report error to CameraDevice
+ Log.e(TAG, "Hit timeout for jpeg callback!");
+ }
+ mInFlightJpeg = null;
+ }
+ } catch (IOException e) {
+ // TODO: err handling
+ throw new IOError(e);
+ }
+ // TODO: Set fields in result.
+ mDeviceState.setCaptureResult(holder, new CameraMetadataNative());
+ }
+ break;
+ case MSG_CLEANUP:
+ mCleanup = true;
+ if (mGLThreadManager != null) {
+ mGLThreadManager.quit();
+ }
+ if (mCamera != null) {
+ mCamera.release();
+ }
+ break;
+ default:
+ throw new AssertionError("Unhandled message " + msg.what +
+ " on RequestThread.");
+ }
+ return true;
+ }
+ };
+
+ /**
+ * Create a new RequestThreadManager.
+ *
+ * @param cameraId the id of the camera to use.
+ * @param camera an open camera object. The RequestThreadManager takes ownership of this camera
+ * object, and is responsible for closing it.
+ * @param deviceState a {@link CameraDeviceState} state machine.
+ */
+ public RequestThreadManager(int cameraId, Camera camera,
+ CameraDeviceState deviceState) {
+ mCamera = camera;
+ mCameraId = cameraId;
+ String name = String.format("RequestThread-%d", cameraId);
+ TAG = name;
+ mDeviceState = deviceState;
+ mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
+ }
+
+ /**
+ * Start the request thread.
+ */
+ public void start() {
+ mRequestThread.start();
+ }
+
+ /**
+ * Flush the pending requests.
+ */
+ public void flush() {
+ // TODO: Implement flush.
+ Log.e(TAG, "flush not yet implemented.");
+ }
+
+ /**
+ * Quit the request thread, and clean up everything.
+ */
+ public void quit() {
+ Handler handler = mRequestThread.waitAndGetHandler();
+ handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
+ mRequestThread.quitSafely();
+ }
+
+ /**
+ * Submit the given burst of requests to be captured.
+ *
+ * <p>If the burst is repeating, replace the current repeating burst.</p>
+ *
+ * @param requests the burst of requests to add to the queue.
+ * @param repeating true if the burst is repeating.
+ * @param frameNumber an output argument that contains either the frame number of the last frame
+ * that will be returned for this request, or the frame number of the last
+ * frame that will be returned for the current repeating request if this
+ * burst is set to be repeating.
+ * @return the request id.
+ */
+ public int submitCaptureRequests(List<CaptureRequest> requests, boolean repeating,
+ /*out*/LongParcelable frameNumber) {
+ Handler handler = mRequestThread.waitAndGetHandler();
+ int ret = mRequestQueue.submit(requests, repeating, frameNumber);
+ handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
+ return ret;
+ }
+
+ /**
+ * Cancel a repeating request.
+ *
+ * @param requestId the id of the repeating request to cancel.
+ * @return the last frame to be returned from the HAL for the given repeating request, or
+ * {@code INVALID_FRAME} if none exists.
+ */
+ public long cancelRepeating(int requestId) {
+ return mRequestQueue.stopRepeating(requestId);
+ }
+
+
+ /**
+ * Configure with the current output Surfaces.
+ *
+ * <p>
+ * This operation blocks until the configuration is complete.
+ * </p>
+ *
+ * @param outputs a {@link java.util.Collection} of outputs to configure.
+ */
+ public void configure(Collection<Surface> outputs) {
+ Handler handler = mRequestThread.waitAndGetHandler();
+ final ConditionVariable condition = new ConditionVariable(/*closed*/false);
+ ConfigureHolder holder = new ConfigureHolder(condition, outputs);
+ handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder));
+ condition.block();
+ }
+}
diff --git a/core/java/android/hardware/camera2/legacy/SurfaceTextureRenderer.java b/core/java/android/hardware/camera2/legacy/SurfaceTextureRenderer.java
new file mode 100644
index 0000000..2f0f6bc
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/SurfaceTextureRenderer.java
@@ -0,0 +1,522 @@
+/*
+* Copyright (C) 2014 The Android Open Source Project
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package android.hardware.camera2.legacy;
+
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLSurface;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import android.util.Log;
+import android.view.Surface;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * A renderer class that manages the GL state, and can draw a frame into a set of output
+ * {@link Surface}s.
+ */
+public class SurfaceTextureRenderer {
+ private static final String TAG = SurfaceTextureRenderer.class.getSimpleName();
+ private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142; // from EGL/eglext.h
+ private static final int GL_MATRIX_SIZE = 16;
+ private static final int VERTEX_POS_SIZE = 3;
+ private static final int VERTEX_UV_SIZE = 2;
+ private static final int EGL_COLOR_BITLENGTH = 8;
+ private static final int GLES_VERSION = 2;
+ private static final int PBUFFER_PIXEL_BYTES = 4;
+
+ private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
+ private EGLConfig mConfigs;
+
+ private class EGLSurfaceHolder {
+ Surface surface;
+ EGLSurface eglSurface;
+ int width;
+ int height;
+ }
+
+ private List<EGLSurfaceHolder> mSurfaces = new ArrayList<EGLSurfaceHolder>();
+ private List<EGLSurfaceHolder> mConversionSurfaces = new ArrayList<EGLSurfaceHolder>();
+
+ private ByteBuffer mPBufferPixels;
+
+ // Hold this to avoid GC
+ private volatile SurfaceTexture mSurfaceTexture;
+
+ private static final int FLOAT_SIZE_BYTES = 4;
+ private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+ private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+ private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+ private final float[] mTriangleVerticesData = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 0.f, 0.f,
+ 1.0f, -1.0f, 0, 1.f, 0.f,
+ -1.0f, 1.0f, 0, 0.f, 1.f,
+ 1.0f, 1.0f, 0, 1.f, 1.f,
+ };
+
+ private FloatBuffer mTriangleVertices;
+
+ /**
+ * As used in this file, this vertex shader maps a unit square to the view, and
+ * tells the fragment shader to interpolate over it. Each surface pixel position
+ * is mapped to a 2D homogeneous texture coordinate of the form (s, t, 0, 1) with
+ * s and t in the inclusive range [0, 1], and the matrix from
+ * {@link SurfaceTexture#getTransformMatrix(float[])} is used to map this
+ * coordinate to a texture location.
+ */
+ private static final String VERTEX_SHADER =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ " gl_Position = uMVPMatrix * aPosition;\n" +
+ " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+
+ /**
+ * This fragment shader simply draws the color in the 2D texture at
+ * the location from the {@code VERTEX_SHADER}.
+ */
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "precision mediump float;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "uniform samplerExternalOES sTexture;\n" +
+ "void main() {\n" +
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ "}\n";
+
+ private float[] mMVPMatrix = new float[GL_MATRIX_SIZE];
+ private float[] mSTMatrix = new float[GL_MATRIX_SIZE];
+
+ private int mProgram;
+ private int mTextureID = 0;
+ private int muMVPMatrixHandle;
+ private int muSTMatrixHandle;
+ private int maPositionHandle;
+ private int maTextureHandle;
+
+ public SurfaceTextureRenderer() {
+ mTriangleVertices = ByteBuffer.allocateDirect(mTriangleVerticesData.length *
+ FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mTriangleVertices.put(mTriangleVerticesData).position(0);
+ Matrix.setIdentityM(mSTMatrix, 0);
+ }
+
+ private int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ checkGlError("glCreateShader type=" + shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":");
+ Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ // TODO: handle this more gracefully
+ throw new IllegalStateException("Could not compile shader " + shaderType);
+ }
+ return shader;
+ }
+
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+
+ int program = GLES20.glCreateProgram();
+ checkGlError("glCreateProgram");
+ if (program == 0) {
+ Log.e(TAG, "Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: ");
+ Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ // TODO: handle this more gracefully
+ throw new IllegalStateException("Could not link program");
+ }
+ return program;
+ }
+
+ private void drawFrame(SurfaceTexture st) {
+ checkGlError("onDrawFrame start");
+ st.getTransformMatrix(mSTMatrix);
+
+ if (DEBUG) {
+ GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+ }
+
+ GLES20.glUseProgram(mProgram);
+ checkGlError("glUseProgram");
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+ GLES20.glVertexAttribPointer(maPositionHandle, VERTEX_POS_SIZE, GLES20.GL_FLOAT,
+ /*normalized*/ false,TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maPosition");
+ GLES20.glEnableVertexAttribArray(maPositionHandle);
+ checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+ GLES20.glVertexAttribPointer(maTextureHandle, VERTEX_UV_SIZE, GLES20.GL_FLOAT,
+ /*normalized*/ false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maTextureHandle");
+ GLES20.glEnableVertexAttribArray(maTextureHandle);
+ checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+ Matrix.setIdentityM(mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muMVPMatrixHandle, /*count*/ 1, /*transpose*/ false, mMVPMatrix,
+ /*offset*/ 0);
+ GLES20.glUniformMatrix4fv(muSTMatrixHandle, /*count*/ 1, /*transpose*/ false, mSTMatrix,
+ /*offset*/ 0);
+
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /*offset*/ 0, /*count*/ 4);
+ checkGlError("glDrawArrays");
+ GLES20.glFinish();
+ }
+
+ /**
+ * Initializes GL state. Call this after the EGL surface has been created and made current.
+ */
+ private void initializeGLState() {
+ mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
+ if (mProgram == 0) {
+ throw new IllegalStateException("failed creating program");
+ }
+ maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+ checkGlError("glGetAttribLocation aPosition");
+ if (maPositionHandle == -1) {
+ throw new IllegalStateException("Could not get attrib location for aPosition");
+ }
+ maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+ checkGlError("glGetAttribLocation aTextureCoord");
+ if (maTextureHandle == -1) {
+ throw new IllegalStateException("Could not get attrib location for aTextureCoord");
+ }
+
+ muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+ checkGlError("glGetUniformLocation uMVPMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new IllegalStateException("Could not get attrib location for uMVPMatrix");
+ }
+
+ muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+ checkGlError("glGetUniformLocation uSTMatrix");
+ if (muSTMatrixHandle == -1) {
+ throw new IllegalStateException("Could not get attrib location for uSTMatrix");
+ }
+
+ int[] textures = new int[1];
+ GLES20.glGenTextures(/*n*/ 1, textures, /*offset*/ 0);
+
+ mTextureID = textures[0];
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+ checkGlError("glBindTexture mTextureID");
+
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_NEAREST);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameter");
+ }
+
+ private int getTextureId() {
+ return mTextureID;
+ }
+
+ private void clearState() {
+ mSurfaces.clear();
+ mConversionSurfaces.clear();
+ mPBufferPixels = null;
+ mSurfaceTexture = null;
+ }
+
+ private void configureEGLContext() {
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new IllegalStateException("No EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, /*offset*/ 0, version, /*offset*/ 1)) {
+ throw new IllegalStateException("Cannot initialize EGL14");
+ }
+
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, EGL_COLOR_BITLENGTH,
+ EGL14.EGL_GREEN_SIZE, EGL_COLOR_BITLENGTH,
+ EGL14.EGL_BLUE_SIZE, EGL_COLOR_BITLENGTH,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT | EGL14.EGL_WINDOW_BIT,
+ EGL14.EGL_NONE
+ };
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ EGL14.eglChooseConfig(mEGLDisplay, attribList, /*offset*/ 0, configs, /*offset*/ 0,
+ configs.length, numConfigs, /*offset*/ 0);
+ checkEglError("eglCreateContext RGB888+recordable ES2");
+ mConfigs = configs[0];
+ int[] attrib_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, GLES_VERSION,
+ EGL14.EGL_NONE
+ };
+ mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
+ attrib_list, /*offset*/ 0);
+ checkEglError("eglCreateContext");
+ if(mEGLContext == EGL14.EGL_NO_CONTEXT) {
+ throw new IllegalStateException("No EGLContext could be made");
+ }
+ }
+
+ private void configureEGLOutputSurfaces(Collection<EGLSurfaceHolder> surfaces) {
+ if (surfaces == null || surfaces.size() == 0) {
+ throw new IllegalStateException("No Surfaces were provided to draw to");
+ }
+ int[] surfaceAttribs = {
+ EGL14.EGL_NONE
+ };
+ for (EGLSurfaceHolder holder : surfaces) {
+ holder.eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mConfigs, holder.surface,
+ surfaceAttribs, 0);
+ checkEglError("eglCreateWindowSurface");
+ }
+ }
+
+ private void configureEGLPbufferSurfaces(Collection<EGLSurfaceHolder> surfaces) {
+ if (surfaces == null || surfaces.size() == 0) {
+ throw new IllegalStateException("No Surfaces were provided to draw to");
+ }
+
+ int maxLength = 0;
+ int[] dimens = new int[2];
+ for (EGLSurfaceHolder holder : surfaces) {
+ LegacyCameraDevice.nativeDetectSurfaceDimens(holder.surface, dimens);
+ int length = dimens[0] * dimens[1];
+ // Find max surface size, ensure PBuffer can hold this many pixels
+ maxLength = (length > maxLength) ? length : maxLength;
+ int[] surfaceAttribs = {
+ EGL14.EGL_WIDTH, dimens[0],
+ EGL14.EGL_HEIGHT, dimens[1],
+ EGL14.EGL_NONE
+ };
+ holder.width = dimens[0];
+ holder.height = dimens[1];
+ holder.eglSurface =
+ EGL14.eglCreatePbufferSurface(mEGLDisplay, mConfigs, surfaceAttribs, 0);
+ checkEglError("eglCreatePbufferSurface");
+ }
+ mPBufferPixels = ByteBuffer.allocateDirect(maxLength * PBUFFER_PIXEL_BYTES)
+ .order(ByteOrder.nativeOrder());
+ }
+
+ private void releaseEGLContext() {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
+ EGL14.EGL_NO_CONTEXT);
+ if (mSurfaces != null) {
+ for (EGLSurfaceHolder holder : mSurfaces) {
+ if (holder.eglSurface != null) {
+ EGL14.eglDestroySurface(mEGLDisplay, holder.eglSurface);
+ }
+ }
+ }
+ if (mConversionSurfaces != null) {
+ for (EGLSurfaceHolder holder : mConversionSurfaces) {
+ if (holder.eglSurface != null) {
+ EGL14.eglDestroySurface(mEGLDisplay, holder.eglSurface);
+ }
+ }
+ }
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(mEGLDisplay);
+ }
+
+ mConfigs = null;
+ mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ mEGLContext = EGL14.EGL_NO_CONTEXT;
+ clearState();
+ }
+
+ private void makeCurrent(EGLSurface surface) {
+ EGL14.eglMakeCurrent(mEGLDisplay, surface, surface, mEGLContext);
+ checkEglError("makeCurrent");
+ }
+
+ private boolean swapBuffers(EGLSurface surface) {
+ boolean result = EGL14.eglSwapBuffers(mEGLDisplay, surface);
+ checkEglError("swapBuffers");
+ return result;
+ }
+
+ private void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ throw new IllegalStateException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+
+ private void checkGlError(String msg) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ throw new IllegalStateException(msg + ": GLES20 error: 0x" + Integer.toHexString(error));
+ }
+ }
+
+ /**
+ * Return the surface texture to draw to - this is the texture use to when producing output
+ * surface buffers.
+ *
+ * @return a {@link SurfaceTexture}.
+ */
+ public SurfaceTexture getSurfaceTexture() {
+ return mSurfaceTexture;
+ }
+
+ /**
+ * Set a collection of output {@link Surface}s that can be drawn to.
+ *
+ * @param surfaces a {@link Collection} of surfaces.
+ */
+ public void configureSurfaces(Collection<Surface> surfaces) {
+ releaseEGLContext();
+
+ for (Surface s : surfaces) {
+ // If pixel conversions aren't handled by egl, use a pbuffer
+ if (LegacyCameraDevice.needsConversion(s)) {
+ LegacyCameraDevice.nativeSetSurfaceFormat(s, ImageFormat.NV21);
+ EGLSurfaceHolder holder = new EGLSurfaceHolder();
+ holder.surface = s;
+ mConversionSurfaces.add(holder);
+ } else {
+ EGLSurfaceHolder holder = new EGLSurfaceHolder();
+ holder.surface = s;
+ mSurfaces.add(holder);
+ }
+ }
+
+ // Set up egl display
+ configureEGLContext();
+
+ // Set up regular egl surfaces if needed
+ if (mSurfaces.size() > 0) {
+ configureEGLOutputSurfaces(mSurfaces);
+ }
+
+ // Set up pbuffer surface if needed
+ if (mConversionSurfaces.size() > 0) {
+ configureEGLPbufferSurfaces(mConversionSurfaces);
+ }
+ makeCurrent((mSurfaces.size() > 0) ? mSurfaces.get(0).eglSurface :
+ mConversionSurfaces.get(0).eglSurface);
+ initializeGLState();
+ mSurfaceTexture = new SurfaceTexture(getTextureId());
+ }
+
+ /**
+ * Draw the current buffer in the {@link SurfaceTexture} returned from
+ * {@link #getSurfaceTexture()} into the given set of target surfaces.
+ *
+ * <p>
+ * The given surfaces must be a subset of the surfaces set in the last
+ * {@link #configureSurfaces(java.util.Collection)} call.
+ * </p>
+ *
+ * @param targetSurfaces the surfaces to draw to.
+ */
+ public void drawIntoSurfaces(Collection<Surface> targetSurfaces) {
+ if ((mSurfaces == null || mSurfaces.size() == 0)
+ && (mConversionSurfaces == null || mConversionSurfaces.size() == 0)) {
+ return;
+ }
+ checkGlError("before updateTexImage");
+ mSurfaceTexture.updateTexImage();
+ for (EGLSurfaceHolder holder : mSurfaces) {
+ if (targetSurfaces.contains(holder.surface)) {
+ makeCurrent(holder.eglSurface);
+ drawFrame(mSurfaceTexture);
+ swapBuffers(holder.eglSurface);
+ }
+
+ }
+ for (EGLSurfaceHolder holder : mConversionSurfaces) {
+ if (targetSurfaces.contains(holder.surface)) {
+ makeCurrent(holder.eglSurface);
+ drawFrame(mSurfaceTexture);
+ mPBufferPixels.clear();
+ GLES20.glReadPixels(/*x*/ 0, /*y*/ 0, holder.width, holder.height, GLES20.GL_RGBA,
+ GLES20.GL_UNSIGNED_BYTE, mPBufferPixels);
+ checkGlError("glReadPixels");
+ int format = LegacyCameraDevice.nativeDetectSurfaceType(holder.surface);
+ LegacyCameraDevice.nativeProduceFrame(holder.surface, mPBufferPixels.array(),
+ holder.width, holder.height, format);
+ swapBuffers(holder.eglSurface);
+ }
+ }
+ }
+
+ /**
+ * Clean up the current GL context.
+ */
+ public void cleanupEGLContext() {
+ releaseEGLContext();
+ }
+
+ /**
+ * Drop all current GL operations on the floor.
+ */
+ public void flush() {
+ // TODO: implement flush
+ Log.e(TAG, "Flush not yet implemented.");
+ }
+}
diff --git a/core/java/android/hardware/camera2/legacy/package.html b/core/java/android/hardware/camera2/legacy/package.html
new file mode 100644
index 0000000..db6f78b
--- /dev/null
+++ b/core/java/android/hardware/camera2/legacy/package.html
@@ -0,0 +1,3 @@
+<body>
+{@hide}
+</body> \ No newline at end of file
diff --git a/core/java/android/hardware/camera2/utils/CameraBinderDecorator.java b/core/java/android/hardware/camera2/utils/CameraBinderDecorator.java
index 328ccbe..40cda08 100644
--- a/core/java/android/hardware/camera2/utils/CameraBinderDecorator.java
+++ b/core/java/android/hardware/camera2/utils/CameraBinderDecorator.java
@@ -40,6 +40,7 @@ public class CameraBinderDecorator {
public static final int ALREADY_EXISTS = -17;
public static final int BAD_VALUE = -22;
public static final int DEAD_OBJECT = -32;
+ public static final int INVALID_OPERATION = -38;
/**
* TODO: add as error codes in Errors.h
@@ -53,6 +54,7 @@ public class CameraBinderDecorator {
public static final int EOPNOTSUPP = -95;
public static final int EUSERS = -87;
+
private static class CameraBinderDecoratorListener implements Decorator.DecoratorListener {
@Override
@@ -125,6 +127,9 @@ public class CameraBinderDecorator {
case EOPNOTSUPP:
UncheckedThrow.throwAnyException(new CameraRuntimeException(
CAMERA_DEPRECATED_HAL));
+ case INVALID_OPERATION:
+ UncheckedThrow.throwAnyException(new IllegalStateException(
+ "Illegal state encountered in camera service."));
}
/**
diff --git a/core/jni/Android.mk b/core/jni/Android.mk
index 355204e..c3654fb 100644
--- a/core/jni/Android.mk
+++ b/core/jni/Android.mk
@@ -135,6 +135,7 @@ LOCAL_SRC_FILES:= \
android_media_ToneGenerator.cpp \
android_hardware_Camera.cpp \
android_hardware_camera2_CameraMetadata.cpp \
+ android_hardware_camera2_legacy_LegacyCameraDevice.cpp \
android_hardware_SensorManager.cpp \
android_hardware_SerialPort.cpp \
android_hardware_UsbDevice.cpp \
diff --git a/core/jni/AndroidRuntime.cpp b/core/jni/AndroidRuntime.cpp
index a4dc824..02ccdda 100644
--- a/core/jni/AndroidRuntime.cpp
+++ b/core/jni/AndroidRuntime.cpp
@@ -77,6 +77,7 @@ extern int register_android_opengl_jni_GLES30(JNIEnv* env);
extern int register_android_hardware_Camera(JNIEnv *env);
extern int register_android_hardware_camera2_CameraMetadata(JNIEnv *env);
+extern int register_android_hardware_camera2_legacy_LegacyCameraDevice(JNIEnv *env);
extern int register_android_hardware_SensorManager(JNIEnv *env);
extern int register_android_hardware_SerialPort(JNIEnv *env);
extern int register_android_hardware_UsbDevice(JNIEnv *env);
@@ -1280,6 +1281,7 @@ static const RegJNIRec gRegJNI[] = {
REG_JNI(register_com_android_internal_util_VirtualRefBasePtr),
REG_JNI(register_android_hardware_Camera),
REG_JNI(register_android_hardware_camera2_CameraMetadata),
+ REG_JNI(register_android_hardware_camera2_legacy_LegacyCameraDevice),
REG_JNI(register_android_hardware_SensorManager),
REG_JNI(register_android_hardware_SerialPort),
REG_JNI(register_android_hardware_UsbDevice),
diff --git a/core/jni/android_hardware_Camera.cpp b/core/jni/android_hardware_Camera.cpp
index 307293f..3a53331 100644
--- a/core/jni/android_hardware_Camera.cpp
+++ b/core/jni/android_hardware_Camera.cpp
@@ -27,6 +27,7 @@
#include <cutils/properties.h>
#include <utils/Vector.h>
+#include <utils/Errors.h>
#include <gui/GLConsumer.h>
#include <gui/Surface.h>
@@ -464,7 +465,7 @@ static void android_hardware_Camera_getCameraInfo(JNIEnv *env, jobject thiz,
}
// connect to camera service
-static void android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
+static jint android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
jobject weak_this, jint cameraId, jstring clientPackageName)
{
// Convert jstring to String16
@@ -477,20 +478,19 @@ static void android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
Camera::USE_CALLING_UID);
if (camera == NULL) {
- jniThrowRuntimeException(env, "Fail to connect to camera service");
- return;
+ return -EACCES;
}
// make sure camera hardware is alive
if (camera->getStatus() != NO_ERROR) {
- jniThrowRuntimeException(env, "Camera initialization failed");
- return;
+ return NO_INIT;
}
jclass clazz = env->GetObjectClass(thiz);
if (clazz == NULL) {
+ // This should never happen
jniThrowRuntimeException(env, "Can't find android/hardware/Camera");
- return;
+ return INVALID_OPERATION;
}
// We use a weak reference so the Camera object can be garbage collected.
@@ -501,6 +501,7 @@ static void android_hardware_Camera_native_setup(JNIEnv *env, jobject thiz,
// save context in opaque field
env->SetLongField(thiz, fields.context, (jlong)context.get());
+ return NO_ERROR;
}
// disconnect from camera service
@@ -538,9 +539,9 @@ static void android_hardware_Camera_release(JNIEnv *env, jobject thiz)
}
}
-static void android_hardware_Camera_setPreviewDisplay(JNIEnv *env, jobject thiz, jobject jSurface)
+static void android_hardware_Camera_setPreviewSurface(JNIEnv *env, jobject thiz, jobject jSurface)
{
- ALOGV("setPreviewDisplay");
+ ALOGV("setPreviewSurface");
sp<Camera> camera = get_native_camera(env, thiz, NULL);
if (camera == 0) return;
@@ -890,14 +891,14 @@ static JNINativeMethod camMethods[] = {
"(ILandroid/hardware/Camera$CameraInfo;)V",
(void*)android_hardware_Camera_getCameraInfo },
{ "native_setup",
- "(Ljava/lang/Object;ILjava/lang/String;)V",
+ "(Ljava/lang/Object;ILjava/lang/String;)I",
(void*)android_hardware_Camera_native_setup },
{ "native_release",
"()V",
(void*)android_hardware_Camera_release },
- { "setPreviewDisplay",
+ { "setPreviewSurface",
"(Landroid/view/Surface;)V",
- (void *)android_hardware_Camera_setPreviewDisplay },
+ (void *)android_hardware_Camera_setPreviewSurface },
{ "setPreviewTexture",
"(Landroid/graphics/SurfaceTexture;)V",
(void *)android_hardware_Camera_setPreviewTexture },
diff --git a/core/jni/android_hardware_camera2_CameraMetadata.cpp b/core/jni/android_hardware_camera2_CameraMetadata.cpp
index 3312109..0d2df80 100644
--- a/core/jni/android_hardware_camera2_CameraMetadata.cpp
+++ b/core/jni/android_hardware_camera2_CameraMetadata.cpp
@@ -518,7 +518,7 @@ static jint CameraMetadata_getTagFromKey(JNIEnv *env, jobject thiz, jstring keyN
SortedVector<String8> vendorSections;
size_t vendorSectionCount = 0;
- if (vTags != 0) {
+ if (vTags != NULL) {
vendorSections = vTags->getAllSectionNames();
vendorSectionCount = vendorSections.size();
}
@@ -592,7 +592,7 @@ static jint CameraMetadata_getTagFromKey(JNIEnv *env, jobject thiz, jstring keyN
"Could not find tag name for key '%s')", key);
return 0;
}
- } else if (vTags != 0) {
+ } else if (vTags != NULL) {
// Match vendor tags (typically com.*)
const String8 sectionName(section);
const String8 tagName(keyTagName);
diff --git a/core/jni/android_hardware_camera2_legacy_LegacyCameraDevice.cpp b/core/jni/android_hardware_camera2_legacy_LegacyCameraDevice.cpp
new file mode 100644
index 0000000..40e9544
--- /dev/null
+++ b/core/jni/android_hardware_camera2_legacy_LegacyCameraDevice.cpp
@@ -0,0 +1,419 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Legacy-CameraDevice-JNI"
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include <utils/Trace.h>
+
+#include "jni.h"
+#include "JNIHelp.h"
+#include "android_runtime/AndroidRuntime.h"
+#include "android_runtime/android_view_Surface.h"
+
+#include <ui/GraphicBuffer.h>
+#include <system/window.h>
+
+using namespace android;
+
+// fully-qualified class name
+#define CAMERA_DEVICE_CLASS_NAME "android/hardware/camera2/legacy/LegacyCameraDevice"
+#define CAMERA_DEVICE_BUFFER_SLACK 3
+
+#define ARRAY_SIZE(a) (sizeof(a)/sizeof(*(a)))
+
+/**
+ * Convert from RGB 888 to Y'CbCr using the conversion specified in ITU-R BT.601 for
+ * digital RGB with K_b = 0.114, and K_r = 0.299.
+ */
+static void rgbToYuv420(uint8_t* rgbBuf, int32_t width, int32_t height, uint8_t* yPlane,
+ uint8_t* uPlane, uint8_t* vPlane, size_t chromaStep, size_t yStride, size_t chromaStride) {
+ uint8_t R, G, B;
+ size_t index = 0;
+
+ int32_t cStrideDiff = chromaStride - width;
+
+ for (int32_t j = 0; j < height; j++) {
+ for (int32_t i = 0; i < width; i++) {
+ R = rgbBuf[index++];
+ G = rgbBuf[index++];
+ B = rgbBuf[index++];
+ *(yPlane + i) = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
+
+ if (j % 2 == 0 && i % 2 == 0){
+ *uPlane = (( -38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
+ *vPlane = (( 112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
+ uPlane += chromaStep;
+ vPlane += chromaStep;
+ }
+ // Skip alpha
+ index++;
+ }
+ yPlane += yStride;
+ if (j % 2 == 0) {
+ uPlane += cStrideDiff;
+ vPlane += cStrideDiff;
+ }
+ }
+}
+
+static void rgbToYuv420(uint8_t* rgbBuf, int32_t width, int32_t height, android_ycbcr* ycbcr) {
+ size_t cStep = ycbcr->chroma_step;
+ size_t cStride = ycbcr->cstride;
+ size_t yStride = ycbcr->ystride;
+ rgbToYuv420(rgbBuf, width, height, reinterpret_cast<uint8_t*>(ycbcr->y),
+ reinterpret_cast<uint8_t*>(ycbcr->cb), reinterpret_cast<uint8_t*>(ycbcr->cr),
+ cStep, yStride, cStride);
+}
+
+static status_t configureSurface(const sp<ANativeWindow>& anw,
+ int32_t width,
+ int32_t height,
+ int32_t pixelFmt,
+ int32_t maxBufferSlack) {
+ status_t err = NO_ERROR;
+ err = native_window_set_buffers_dimensions(anw.get(), width, height);
+ if (err != NO_ERROR) {
+ ALOGE("%s: Failed to set native window buffer dimensions, error %s (%d).", __FUNCTION__,
+ strerror(-err), err);
+ return err;
+ }
+
+ err = native_window_set_buffers_format(anw.get(), pixelFmt);
+ if (err != NO_ERROR) {
+ ALOGE("%s: Failed to set native window buffer format, error %s (%d).", __FUNCTION__,
+ strerror(-err), err);
+ return err;
+ }
+
+ err = native_window_set_usage(anw.get(), GRALLOC_USAGE_SW_WRITE_OFTEN);
+ if (err != NO_ERROR) {
+ ALOGE("%s: Failed to set native window usage flag, error %s (%d).", __FUNCTION__,
+ strerror(-err), err);
+ return err;
+ }
+
+ int minUndequeuedBuffers;
+ err = anw.get()->query(anw.get(),
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &minUndequeuedBuffers);
+ if (err != NO_ERROR) {
+ ALOGE("%s: Failed to get native window min undequeued buffers, error %s (%d).",
+ __FUNCTION__, strerror(-err), err);
+ return err;
+ }
+
+ ALOGV("%s: Setting buffer count to %d", __FUNCTION__,
+ maxBufferSlack + 1 + minUndequeuedBuffers);
+ err = native_window_set_buffer_count(anw.get(), maxBufferSlack + 1 + minUndequeuedBuffers);
+ if (err != NO_ERROR) {
+ ALOGE("%s: Failed to set native window buffer count, error %s (%d).", __FUNCTION__,
+ strerror(-err), err);
+ return err;
+ }
+ return NO_ERROR;
+}
+
+/**
+ * Produce a frame in the given surface.
+ *
+ * Args:
+ * anw - a surface to produce a frame in.
+ * pixelBuffer - image buffer to generate a frame from.
+ * width - width of the pixelBuffer in pixels.
+ * height - height of the pixelBuffer in pixels.
+ * pixelFmt - format of the pixelBuffer, one of:
+ * HAL_PIXEL_FORMAT_YCrCb_420_SP,
+ * HAL_PIXEL_FORMAT_YCbCr_420_888,
+ * HAL_PIXEL_FORMAT_BLOB
+ * bufSize - the size of the pixelBuffer in bytes.
+ */
+static status_t produceFrame(const sp<ANativeWindow>& anw,
+ uint8_t* pixelBuffer,
+ int32_t width, // Width of the pixelBuffer
+ int32_t height, // Height of the pixelBuffer
+ int32_t pixelFmt, // Format of the pixelBuffer
+ int64_t bufSize) {
+ ATRACE_CALL();
+ status_t err = NO_ERROR;
+ ANativeWindowBuffer* anb;
+ ALOGV("%s: Dequeue buffer from %p",__FUNCTION__, anw.get());
+
+ // TODO: Switch to using Surface::lock and Surface::unlockAndPost
+ err = native_window_dequeue_buffer_and_wait(anw.get(), &anb);
+ if (err != NO_ERROR) return err;
+
+ sp<GraphicBuffer> buf(new GraphicBuffer(anb, /*keepOwnership*/false));
+
+ switch(pixelFmt) {
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP: {
+ if (bufSize < width * height * 4) {
+ ALOGE("%s: PixelBuffer size %lld to small for given dimensions", __FUNCTION__,
+ bufSize);
+ return BAD_VALUE;
+ }
+ uint8_t* img = NULL;
+ ALOGV("%s: Lock buffer from %p for write", __FUNCTION__, anw.get());
+ err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
+ if (err != NO_ERROR) return err;
+
+ uint8_t* yPlane = img;
+ uint8_t* uPlane = img + height * width;
+ uint8_t* vPlane = uPlane + 1;
+ size_t chromaStep = 2;
+ size_t yStride = width;
+ size_t chromaStride = width;
+
+ rgbToYuv420(pixelBuffer, width, height, yPlane,
+ uPlane, vPlane, chromaStep, yStride, chromaStride);
+ break;
+ }
+ case HAL_PIXEL_FORMAT_YCbCr_420_888: {
+ // Software writes with YCbCr_420_888 format are unsupported
+ // by the gralloc module for now
+ if (bufSize < width * height * 4) {
+ ALOGE("%s: PixelBuffer size %lld to small for given dimensions", __FUNCTION__,
+ bufSize);
+ return BAD_VALUE;
+ }
+ android_ycbcr ycbcr = android_ycbcr();
+ ALOGV("%s: Lock buffer from %p for write", __FUNCTION__, anw.get());
+
+ err = buf->lockYCbCr(GRALLOC_USAGE_SW_WRITE_OFTEN, &ycbcr);
+ if (err != NO_ERROR) {
+ ALOGE("%s: Failed to lock ycbcr buffer, error %s (%d).", __FUNCTION__,
+ strerror(-err), err);
+ return err;
+ }
+ rgbToYuv420(pixelBuffer, width, height, &ycbcr);
+ break;
+ }
+ case HAL_PIXEL_FORMAT_BLOB: {
+ if (bufSize != width || height != 1) {
+ ALOGE("%s: Incorrect pixelBuffer size: %lld", __FUNCTION__, bufSize);
+ return BAD_VALUE;
+ }
+ int8_t* img = NULL;
+
+ ALOGV("%s: Lock buffer from %p for write", __FUNCTION__, anw.get());
+ err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
+ if (err != NO_ERROR) {
+ ALOGE("%s: Failed to lock buffer, error %s (%d).", __FUNCTION__, strerror(-err),
+ err);
+ return err;
+ }
+ memcpy(img, pixelBuffer, width);
+ break;
+ }
+ default: {
+ ALOGE("%s: Invalid pixel format in produceFrame: %x", __FUNCTION__, pixelFmt);
+ return BAD_VALUE;
+ }
+ }
+
+ ALOGV("%s: Unlock buffer from %p", __FUNCTION__, anw.get());
+ err = buf->unlock();
+ if (err != NO_ERROR) {
+ ALOGE("%s: Failed to unlock buffer, error %s (%d).", __FUNCTION__, strerror(-err), err);
+ return err;
+ }
+
+ ALOGV("%s: Queue buffer to %p", __FUNCTION__, anw.get());
+ err = anw->queueBuffer(anw.get(), buf->getNativeBuffer(), /*fenceFd*/-1);
+ if (err != NO_ERROR) {
+ ALOGE("%s: Failed to queue buffer, error %s (%d).", __FUNCTION__, strerror(-err), err);
+ return err;
+ }
+ return NO_ERROR;
+}
+
+static sp<ANativeWindow> getNativeWindow(JNIEnv* env, jobject surface) {
+ sp<ANativeWindow> anw;
+ if (surface) {
+ anw = android_view_Surface_getNativeWindow(env, surface);
+ if (env->ExceptionCheck()) {
+ return anw;
+ }
+ } else {
+ jniThrowNullPointerException(env, "surface");
+ return anw;
+ }
+ if (anw == NULL) {
+ jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
+ "Surface had no valid native window.");
+ return anw;
+ }
+ return anw;
+}
+
+extern "C" {
+
+static jint LegacyCameraDevice_nativeDetectSurfaceType(JNIEnv* env, jobject thiz, jobject surface) {
+ ALOGV("nativeDetectSurfaceType");
+ sp<ANativeWindow> anw;
+ if ((anw = getNativeWindow(env, surface)) == NULL) {
+ ALOGE("%s: Could not retrieve native window from surface.", __FUNCTION__);
+ return 0;
+ }
+ int32_t fmt = 0;
+ status_t err = anw->query(anw.get(), NATIVE_WINDOW_FORMAT, &fmt);
+ if(err != NO_ERROR) {
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Error while querying surface pixel format (error code %d)", err);
+ return 0;
+ }
+ return fmt;
+}
+
+static void LegacyCameraDevice_nativeDetectSurfaceDimens(JNIEnv* env, jobject thiz,
+ jobject surface, jintArray dimens) {
+ ALOGV("nativeGetSurfaceDimens");
+ sp<ANativeWindow> anw;
+ if ((anw = getNativeWindow(env, surface)) == NULL) {
+ ALOGE("%s: Could not retrieve native window from surface.", __FUNCTION__);
+ return;
+ }
+ int32_t dimenBuf[2];
+ status_t err = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, dimenBuf);
+ if(err != NO_ERROR) {
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Error while querying surface width (error code %d)", err);
+ return;
+ }
+ err = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, dimenBuf + 1);
+ if(err != NO_ERROR) {
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Error while querying surface height (error code %d)", err);
+ return;
+ }
+ env->SetIntArrayRegion(dimens, /*start*/0, /*length*/ARRAY_SIZE(dimenBuf), dimenBuf);
+}
+
+static void LegacyCameraDevice_nativeConfigureSurface(JNIEnv* env, jobject thiz, jobject surface,
+ jint width, jint height, jint pixelFormat) {
+ ALOGV("nativeConfigureSurface");
+ sp<ANativeWindow> anw;
+ if ((anw = getNativeWindow(env, surface)) == NULL) {
+ ALOGE("%s: Could not retrieve native window from surface.", __FUNCTION__);
+ return;
+ }
+ status_t err = configureSurface(anw, width, height, pixelFormat, CAMERA_DEVICE_BUFFER_SLACK);
+ if (err != NO_ERROR) {
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Error while producing frame (error code %d)", err);
+ return;
+ }
+}
+
+static void LegacyCameraDevice_nativeProduceFrame(JNIEnv* env, jobject thiz, jobject surface,
+ jbyteArray pixelBuffer, jint width, jint height, jint pixelFormat) {
+ ALOGV("nativeProduceFrame");
+ sp<ANativeWindow> anw;
+
+ if ((anw = getNativeWindow(env, surface)) == NULL) {
+ ALOGE("%s: Could not retrieve native window from surface.", __FUNCTION__);
+ return;
+ }
+
+ if (pixelBuffer == NULL) {
+ jniThrowNullPointerException(env, "pixelBuffer");
+ return;
+ }
+
+ int32_t bufSize = static_cast<int32_t>(env->GetArrayLength(pixelBuffer));
+ jbyte* pixels = env->GetByteArrayElements(pixelBuffer, /*is_copy*/NULL);
+
+ if (pixels == NULL) {
+ jniThrowNullPointerException(env, "pixels");
+ return;
+ }
+
+ status_t err = produceFrame(anw, reinterpret_cast<uint8_t*>(pixels), width, height,
+ pixelFormat, bufSize);
+ env->ReleaseByteArrayElements(pixelBuffer, pixels, JNI_ABORT);
+
+ if (err != NO_ERROR) {
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Error while producing frame (error code %d)", err);
+ return;
+ }
+}
+
+static void LegacyCameraDevice_nativeSetSurfaceFormat(JNIEnv* env, jobject thiz, jobject surface,
+ jint pixelFormat) {
+ ALOGV("nativeSetSurfaceType");
+ sp<ANativeWindow> anw;
+ if ((anw = getNativeWindow(env, surface)) == NULL) {
+ ALOGE("%s: Could not retrieve native window from surface.", __FUNCTION__);
+ return;
+ }
+ status_t err = native_window_set_buffers_format(anw.get(), pixelFormat);
+ if (err != NO_ERROR) {
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Error while setting surface format (error code %d)", err);
+ return;
+ }
+}
+
+static void LegacyCameraDevice_nativeSetSurfaceDimens(JNIEnv* env, jobject thiz, jobject surface,
+ jint width, jint height) {
+ ALOGV("nativeSetSurfaceDimens");
+ sp<ANativeWindow> anw;
+ if ((anw = getNativeWindow(env, surface)) == NULL) {
+ ALOGE("%s: Could not retrieve native window from surface.", __FUNCTION__);
+ return;
+ }
+ status_t err = native_window_set_buffers_dimensions(anw.get(), width, height);
+ if (err != NO_ERROR) {
+ jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
+ "Error while setting surface format (error code %d)", err);
+ return;
+ }
+}
+
+} // extern "C"
+
+static JNINativeMethod gCameraDeviceMethods[] = {
+ { "nativeDetectSurfaceType",
+ "(Landroid/view/Surface;)I",
+ (void *)LegacyCameraDevice_nativeDetectSurfaceType },
+ { "nativeDetectSurfaceDimens",
+ "(Landroid/view/Surface;[I)V",
+ (void *)LegacyCameraDevice_nativeDetectSurfaceDimens },
+ { "nativeConfigureSurface",
+ "(Landroid/view/Surface;III)V",
+ (void *)LegacyCameraDevice_nativeConfigureSurface },
+ { "nativeProduceFrame",
+ "(Landroid/view/Surface;[BIII)V",
+ (void *)LegacyCameraDevice_nativeProduceFrame },
+ { "nativeSetSurfaceFormat",
+ "(Landroid/view/Surface;I)V",
+ (void *)LegacyCameraDevice_nativeSetSurfaceFormat },
+ { "nativeSetSurfaceDimens",
+ "(Landroid/view/Surface;II)V",
+ (void *)LegacyCameraDevice_nativeSetSurfaceDimens },
+};
+
+// Get all the required offsets in java class and register native functions
+int register_android_hardware_camera2_legacy_LegacyCameraDevice(JNIEnv* env)
+{
+ // Register native functions
+ return AndroidRuntime::registerNativeMethods(env,
+ CAMERA_DEVICE_CLASS_NAME,
+ gCameraDeviceMethods,
+ NELEM(gCameraDeviceMethods));
+}
+
diff --git a/media/jni/android_media_ImageReader.cpp b/media/jni/android_media_ImageReader.cpp
index 7a86811..36cfb0f 100644
--- a/media/jni/android_media_ImageReader.cpp
+++ b/media/jni/android_media_ImageReader.cpp
@@ -764,21 +764,30 @@ static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz,
return -1;
}
- if (ctx->getBufferFormat() != buffer->format) {
- // Return the buffer to the queue.
- consumer->unlockBuffer(*buffer);
- ctx->returnLockedBuffer(buffer);
-
- // Throw exception
- ALOGE("Producer output buffer format: 0x%x, ImageReader configured format: 0x%x",
- buffer->format, ctx->getBufferFormat());
- String8 msg;
- msg.appendFormat("The producer output buffer format 0x%x doesn't "
- "match the ImageReader's configured buffer format 0x%x.",
- buffer->format, ctx->getBufferFormat());
- jniThrowException(env, "java/lang/UnsupportedOperationException",
- msg.string());
- return -1;
+ int imgReaderFmt = ctx->getBufferFormat();
+ int bufFmt = buffer->format;
+ if (imgReaderFmt != bufFmt) {
+ // Special casing for when producer switches format
+ if (imgReaderFmt == HAL_PIXEL_FORMAT_YCbCr_420_888 && bufFmt ==
+ HAL_PIXEL_FORMAT_YCrCb_420_SP) {
+ ctx->setBufferFormat(HAL_PIXEL_FORMAT_YCrCb_420_SP);
+ ALOGV("%s: Overriding NV21 to YUV_420_888.", __FUNCTION__);
+ } else {
+ // Return the buffer to the queue.
+ consumer->unlockBuffer(*buffer);
+ ctx->returnLockedBuffer(buffer);
+
+ // Throw exception
+ ALOGE("Producer output buffer format: 0x%x, ImageReader configured format: 0x%x",
+ buffer->format, ctx->getBufferFormat());
+ String8 msg;
+ msg.appendFormat("The producer output buffer format 0x%x doesn't "
+ "match the ImageReader's configured buffer format 0x%x.",
+ buffer->format, ctx->getBufferFormat());
+ jniThrowException(env, "java/lang/UnsupportedOperationException",
+ msg.string());
+ return -1;
+ }
}
// Set SurfaceImage instance member variables
Image_setBuffer(env, image, buffer);