OSDN Git Service

Camera2: Basic ZSL and precapture trigger support.
authorEino-Ville Talvala <etalvala@google.com>
Thu, 30 Aug 2012 00:37:16 +0000 (17:37 -0700)
committerAndroid (Google) Code Review <android-gerrit@google.com>
Thu, 6 Sep 2012 17:38:22 +0000 (10:38 -0700)
- Add capture sequencer to control still image capture process
- Use precapture trigger for standard capture in sequencer
- Add infrastructure for reprocessing streams
- Add ZSL processor to maintain ZSL queue
- Add ZSL capture sequence to sequencer

This patch sets up ZSL mode and precapture triggers.

For now, to enable zsl mode, set the system property camera.zsl_mode
to 1.

Bug: 6243944

Change-Id: Icf8cb1a83a7c11a152a11007c8f3c54f8ea1c70c

19 files changed:
services/camera/libcameraservice/Android.mk
services/camera/libcameraservice/Camera2Client.cpp
services/camera/libcameraservice/Camera2Client.h
services/camera/libcameraservice/Camera2Device.cpp
services/camera/libcameraservice/Camera2Device.h
services/camera/libcameraservice/camera2/CallbackProcessor.cpp
services/camera/libcameraservice/camera2/CallbackProcessor.h
services/camera/libcameraservice/camera2/CameraMetadata.cpp
services/camera/libcameraservice/camera2/CameraMetadata.h
services/camera/libcameraservice/camera2/CaptureSequencer.cpp [new file with mode: 0644]
services/camera/libcameraservice/camera2/CaptureSequencer.h [new file with mode: 0644]
services/camera/libcameraservice/camera2/FrameProcessor.cpp
services/camera/libcameraservice/camera2/FrameProcessor.h
services/camera/libcameraservice/camera2/JpegProcessor.cpp [moved from services/camera/libcameraservice/camera2/CaptureProcessor.cpp with 90% similarity]
services/camera/libcameraservice/camera2/JpegProcessor.h [moved from services/camera/libcameraservice/camera2/CaptureProcessor.h with 83% similarity]
services/camera/libcameraservice/camera2/Parameters.cpp
services/camera/libcameraservice/camera2/Parameters.h
services/camera/libcameraservice/camera2/ZslProcessor.cpp [new file with mode: 0644]
services/camera/libcameraservice/camera2/ZslProcessor.h [new file with mode: 0644]

index 1370c62..e27a065 100644 (file)
@@ -14,8 +14,10 @@ LOCAL_SRC_FILES:=               \
     camera2/CameraMetadata.cpp \
     camera2/Parameters.cpp \
     camera2/FrameProcessor.cpp \
-    camera2/CaptureProcessor.cpp \
-    camera2/CallbackProcessor.cpp
+    camera2/JpegProcessor.cpp \
+    camera2/CallbackProcessor.cpp \
+    camera2/ZslProcessor.cpp \
+    camera2/CaptureSequencer.cpp \
 
 LOCAL_SHARED_LIBRARIES:= \
     libui \
index acd290d..5400604 100644 (file)
@@ -59,12 +59,21 @@ Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
         mRecordingHeapCount(kDefaultRecordingHeapCount)
 {
     ATRACE_CALL();
-    ALOGV("%s: Created client for camera %d", __FUNCTION__, cameraId);
+    ALOGI("Camera %d: Opened", cameraId);
 
     mDevice = new Camera2Device(cameraId);
 
     SharedParameters::Lock l(mParameters);
     l.mParameters.state = Parameters::DISCONNECTED;
+
+    char value[PROPERTY_VALUE_MAX];
+    property_get("camera.zsl_mode", value, "0");
+    if (!strcmp(value,"1")) {
+        ALOGI("Camera %d: Enabling ZSL mode", cameraId);
+        l.mParameters.zslMode = true;
+    } else {
+        l.mParameters.zslMode = false;
+    }
 }
 
 status_t Camera2Client::checkPid(const char* checkLocation) const {
@@ -100,20 +109,32 @@ status_t Camera2Client::initialize(camera_module_t *module)
         return NO_INIT;
     }
 
+    String8 threadName;
+
     mFrameProcessor = new FrameProcessor(this);
-    String8 frameThreadName = String8::format("Camera2Client[%d]::FrameProcessor",
+    threadName = String8::format("Camera2Client[%d]::FrameProcessor",
+            mCameraId);
+    mFrameProcessor->run(threadName.string());
+
+    mCaptureSequencer = new CaptureSequencer(this);
+    threadName = String8::format("Camera2Client[%d]::CaptureSequencer",
+            mCameraId);
+    mCaptureSequencer->run(threadName.string());
+
+    mJpegProcessor = new JpegProcessor(this, mCaptureSequencer);
+    threadName = String8::format("Camera2Client[%d]::JpegProcessor",
             mCameraId);
-    mFrameProcessor->run(frameThreadName.string());
+    mJpegProcessor->run(threadName.string());
 
-    mCaptureProcessor = new CaptureProcessor(this);
-    String8 captureThreadName =
-            String8::format("Camera2Client[%d]::CaptureProcessor", mCameraId);
-    mCaptureProcessor->run(captureThreadName.string());
+    mZslProcessor = new ZslProcessor(this, mCaptureSequencer);
+    threadName = String8::format("Camera2Client[%d]::ZslProcessor",
+            mCameraId);
+    mZslProcessor->run(threadName.string());
 
     mCallbackProcessor = new CallbackProcessor(this);
-    String8 callbackThreadName =
-            String8::format("Camera2Client[%d]::CallbackProcessor", mCameraId);
-    mCallbackProcessor->run(callbackThreadName.string());
+    threadName = String8::format("Camera2Client[%d]::CallbackProcessor",
+            mCameraId);
+    mCallbackProcessor->run(threadName.string());
 
     if (gLogLevel >= 1) {
         ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__,
@@ -126,7 +147,7 @@ status_t Camera2Client::initialize(camera_module_t *module)
 
 Camera2Client::~Camera2Client() {
     ATRACE_CALL();
-    ALOGV("%s: Camera %d: Shutting down client.", __FUNCTION__, mCameraId);
+    ALOGV("Camera %d: Shutting down", mCameraId);
 
     mDestructionStarted = true;
 
@@ -135,7 +156,7 @@ Camera2Client::~Camera2Client() {
     disconnect();
 
     mFrameProcessor->requestExit();
-    ALOGV("%s: Camera %d: Shutdown complete", __FUNCTION__, mCameraId);
+    ALOGI("Camera %d: Closed", mCameraId);
 }
 
 status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
@@ -299,10 +320,12 @@ status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
             p.videoStabilization ? "enabled" : "disabled");
 
     result.append("  Current streams:\n");
-    result.appendFormat("    Preview stream ID: %d\n", mPreviewStreamId);
+    result.appendFormat("    Preview stream ID: %d\n",
+            getPreviewStreamId());
     result.appendFormat("    Capture stream ID: %d\n",
-            mCaptureProcessor->getStreamId());
-    result.appendFormat("    Recording stream ID: %d\n", mRecordingStreamId);
+            getCaptureStreamId());
+    result.appendFormat("    Recording stream ID: %d\n",
+            getRecordingStreamId());
 
     result.append("  Current requests:\n");
     if (mPreviewRequest.entryCount() != 0) {
@@ -314,15 +337,6 @@ status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
         write(fd, result.string(), result.size());
     }
 
-    if (mCaptureRequest.entryCount() != 0) {
-        result = "    Capture request:\n";
-        write(fd, result.string(), result.size());
-        mCaptureRequest.dump(fd, 2, 6);
-    } else {
-        result = "    Capture request: undefined\n";
-        write(fd, result.string(), result.size());
-    }
-
     if (mRecordingRequest.entryCount() != 0) {
         result = "    Recording request:\n";
         write(fd, result.string(), result.size());
@@ -332,6 +346,8 @@ status_t Camera2Client::dump(int fd, const Vector<String16>& args) {
         write(fd, result.string(), result.size());
     }
 
+    mCaptureSequencer->dump(fd, args);
+
     mFrameProcessor->dump(fd, args);
 
     result = "  Device dump:\n";
@@ -366,7 +382,7 @@ void Camera2Client::disconnect() {
         mPreviewStreamId = NO_STREAM;
     }
 
-    mCaptureProcessor->deleteStream();
+    mJpegProcessor->deleteStream();
 
     if (mRecordingStreamId != NO_STREAM) {
         mDevice->deleteStream(mRecordingStreamId);
@@ -623,6 +639,14 @@ status_t Camera2Client::startPreviewL(Parameters &params, bool restart) {
             return res;
         }
     }
+    if (params.zslMode) {
+        res = mZslProcessor->updateStream(params);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)",
+                    __FUNCTION__, mCameraId, strerror(-res), res);
+            return res;
+        }
+    }
 
     if (mPreviewRequest.entryCount() == 0) {
         res = updatePreviewRequest(params);
@@ -633,18 +657,20 @@ status_t Camera2Client::startPreviewL(Parameters &params, bool restart) {
         }
     }
 
+    Vector<uint8_t> outputStreams;
+    outputStreams.push(getPreviewStreamId());
+
     if (callbacksEnabled) {
-        uint8_t outputStreams[2] =
-                { mPreviewStreamId, mCallbackProcessor->getStreamId() };
-        res = mPreviewRequest.update(
-                ANDROID_REQUEST_OUTPUT_STREAMS,
-                outputStreams, 2);
-    } else {
-        uint8_t outputStreams[1] = { mPreviewStreamId };
-        res = mPreviewRequest.update(
-                ANDROID_REQUEST_OUTPUT_STREAMS,
-                outputStreams, 1);
+        outputStreams.push(getCallbackStreamId());
     }
+    if (params.zslMode) {
+        outputStreams.push(getZslStreamId());
+    }
+
+    res = mPreviewRequest.update(
+        ANDROID_REQUEST_OUTPUT_STREAMS,
+        outputStreams);
+
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to set up preview request: %s (%d)",
                 __FUNCTION__, mCameraId, strerror(-res), res);
@@ -817,14 +843,19 @@ status_t Camera2Client::startRecordingL(Parameters &params, bool restart) {
     }
 
     if (callbacksEnabled) {
-        uint8_t outputStreams[3] =
-                { mPreviewStreamId, mRecordingStreamId,
-                  mCallbackProcessor->getStreamId() };
+        uint8_t outputStreams[3] ={
+            getPreviewStreamId(),
+            getRecordingStreamId(),
+            getCallbackStreamId()
+        };
         res = mRecordingRequest.update(
                 ANDROID_REQUEST_OUTPUT_STREAMS,
                 outputStreams, 3);
     } else {
-        uint8_t outputStreams[2] = { mPreviewStreamId, mRecordingStreamId };
+        uint8_t outputStreams[2] = {
+            getPreviewStreamId(),
+            getRecordingStreamId()
+        };
         res = mRecordingRequest.update(
                 ANDROID_REQUEST_OUTPUT_STREAMS,
                 outputStreams, 2);
@@ -1020,8 +1051,18 @@ status_t Camera2Client::takePicture(int msgType) {
                     __FUNCTION__, mCameraId);
             return INVALID_OPERATION;
         case Parameters::PREVIEW:
-        case Parameters::RECORD:
             // Good to go for takePicture
+            res = commandStopFaceDetectionL(l.mParameters);
+            if (res != OK) {
+                ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
+                        __FUNCTION__, mCameraId);
+                return res;
+            }
+            l.mParameters.state = Parameters::STILL_CAPTURE;
+            break;
+        case Parameters::RECORD:
+            // Good to go for video snapshot
+            l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
             break;
         case Parameters::STILL_CAPTURE:
         case Parameters::VIDEO_SNAPSHOT:
@@ -1032,130 +1073,20 @@ status_t Camera2Client::takePicture(int msgType) {
 
     ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId);
 
-    res = mCaptureProcessor->updateStream(l.mParameters);
+    res = mJpegProcessor->updateStream(l.mParameters);
     if (res != OK) {
         ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)",
                 __FUNCTION__, mCameraId, strerror(-res), res);
         return res;
     }
 
-    if (mCaptureRequest.entryCount() == 0) {
-        res = updateCaptureRequest(l.mParameters);
-        if (res != OK) {
-            ALOGE("%s: Camera %d: Can't create still image capture request: "
-                    "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
-            return res;
-        }
-    }
-
-    bool callbacksEnabled = l.mParameters.previewCallbackFlags &
-            CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK;
-    bool recordingEnabled = (l.mParameters.state == Parameters::RECORD);
-
-    int captureStreamId = mCaptureProcessor->getStreamId();
-
-    int streamSwitch = (callbacksEnabled ? 0x2 : 0x0) +
-            (recordingEnabled ? 0x1 : 0x0);
-    switch ( streamSwitch ) {
-        case 0: { // No recording, callbacks
-            uint8_t streamIds[2] = {
-                mPreviewStreamId,
-                captureStreamId
-            };
-            res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
-                    streamIds, 2);
-            break;
-        }
-        case 1: { // Recording
-            uint8_t streamIds[3] = {
-                mPreviewStreamId,
-                mRecordingStreamId,
-                captureStreamId
-            };
-            res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
-                    streamIds, 3);
-            break;
-        }
-        case 2: { // Callbacks
-            uint8_t streamIds[3] = {
-                mPreviewStreamId,
-                mCallbackProcessor->getStreamId(),
-                captureStreamId
-            };
-            res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
-                    streamIds, 3);
-            break;
-        }
-        case 3: { // Both
-            uint8_t streamIds[4] = {
-                mPreviewStreamId,
-                mCallbackProcessor->getStreamId(),
-                mRecordingStreamId,
-                captureStreamId
-            };
-            res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
-                    streamIds, 4);
-            break;
-        }
-    };
-    if (res != OK) {
-        ALOGE("%s: Camera %d: Unable to set up still image capture request: "
-                "%s (%d)",
-                __FUNCTION__, mCameraId, strerror(-res), res);
-        return res;
-    }
-    res = mCaptureRequest.sort();
-    if (res != OK) {
-        ALOGE("%s: Camera %d: Unable to sort capture request: %s (%d)",
-                __FUNCTION__, mCameraId, strerror(-res), res);
-        return res;
-    }
-
-    CameraMetadata captureCopy = mCaptureRequest;
-    if (captureCopy.entryCount() == 0) {
-        ALOGE("%s: Camera %d: Unable to copy capture request for HAL device",
-                __FUNCTION__, mCameraId);
-        return NO_MEMORY;
-    }
-
-    if (l.mParameters.state == Parameters::PREVIEW) {
-        res = mDevice->clearStreamingRequest();
-        if (res != OK) {
-            ALOGE("%s: Camera %d: Unable to stop preview for still capture: "
-                    "%s (%d)",
-                    __FUNCTION__, mCameraId, strerror(-res), res);
-            return res;
-        }
-    }
-    // TODO: Capture should be atomic with setStreamingRequest here
-    res = mDevice->capture(captureCopy);
+    res = mCaptureSequencer->startCapture();
     if (res != OK) {
-        ALOGE("%s: Camera %d: Unable to submit still image capture request: "
-                "%s (%d)",
+        ALOGE("%s: Camera %d: Unable to start capture: %s (%d)",
                 __FUNCTION__, mCameraId, strerror(-res), res);
-        return res;
-    }
-
-    switch (l.mParameters.state) {
-        case Parameters::PREVIEW:
-            l.mParameters.state = Parameters::STILL_CAPTURE;
-            res = commandStopFaceDetectionL(l.mParameters);
-            if (res != OK) {
-                ALOGE("%s: Camera %d: Unable to stop face detection for still capture",
-                        __FUNCTION__, mCameraId);
-                return res;
-            }
-            break;
-        case Parameters::RECORD:
-            l.mParameters.state = Parameters::VIDEO_SNAPSHOT;
-            break;
-        default:
-            ALOGE("%s: Camera %d: Unknown state for still capture!",
-                    __FUNCTION__, mCameraId);
-            return INVALID_OPERATION;
     }
 
-    return OK;
+    return res;
 }
 
 status_t Camera2Client::setParameters(const String8& params) {
@@ -1501,6 +1432,7 @@ void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) {
 void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) {
     ALOGV("%s: Autoexposure state now %d, last trigger %d",
             __FUNCTION__, newState, triggerId);
+    mCaptureSequencer->notifyAutoExposure(newState, triggerId);
 }
 
 void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
@@ -1508,7 +1440,7 @@ void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) {
             __FUNCTION__, newState, triggerId);
 }
 
-int Camera2Client::getCameraId() {
+int Camera2Client::getCameraId() const {
     return mCameraId;
 }
 
@@ -1520,6 +1452,35 @@ camera2::SharedParameters& Camera2Client::getParameters() {
     return mParameters;
 }
 
+int Camera2Client::getPreviewStreamId() const {
+    return mPreviewStreamId;
+}
+
+int Camera2Client::getCaptureStreamId() const {
+    return mJpegProcessor->getStreamId();
+}
+
+int Camera2Client::getCallbackStreamId() const {
+    return mCallbackProcessor->getStreamId();
+}
+
+int Camera2Client::getRecordingStreamId() const {
+    return mRecordingStreamId;
+}
+
+int Camera2Client::getZslStreamId() const {
+    return mZslProcessor->getStreamId();
+}
+
+status_t Camera2Client::registerFrameListener(int32_t id,
+        wp<camera2::FrameProcessor::FilteredListener> listener) {
+    return mFrameProcessor->registerListener(id, listener);
+}
+
+status_t Camera2Client::removeFrameListener(int32_t id) {
+    return mFrameProcessor->removeListener(id);
+}
+
 Camera2Client::SharedCameraClient::Lock::Lock(SharedCameraClient &client):
         mCameraClient(client.mCameraClient),
         mSharedClient(client) {
@@ -1546,6 +1507,10 @@ void Camera2Client::SharedCameraClient::clear() {
     mCameraClient.clear();
 }
 
+const int32_t Camera2Client::kPreviewRequestId;
+const int32_t Camera2Client::kRecordRequestId;
+const int32_t Camera2Client::kFirstCaptureRequestId;
+
 void Camera2Client::onRecordingFrameAvailable() {
     ATRACE_CALL();
     status_t res;
@@ -1656,13 +1621,6 @@ status_t Camera2Client::updateRequests(const Parameters &params) {
                 __FUNCTION__, mCameraId, strerror(-res), res);
         return res;
     }
-    res = updateCaptureRequest(params);
-    if (res != OK) {
-        ALOGE("%s: Camera %d: Unable to update capture request: %s (%d)",
-                __FUNCTION__, mCameraId, strerror(-res), res);
-        return res;
-    }
-
     res = updateRecordingRequest(params);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
@@ -1761,7 +1719,7 @@ status_t Camera2Client::updatePreviewRequest(const Parameters &params) {
         }
     }
 
-    res = updateRequestCommon(&mPreviewRequest, params);
+    res = params.updateRequest(&mPreviewRequest);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update common entries of preview "
                 "request: %s (%d)", __FUNCTION__, mCameraId,
@@ -1769,65 +1727,8 @@ status_t Camera2Client::updatePreviewRequest(const Parameters &params) {
         return res;
     }
 
-    return OK;
-}
-
-status_t Camera2Client::updateCaptureRequest(const Parameters &params) {
-    ATRACE_CALL();
-    status_t res;
-    if (mCaptureRequest.entryCount() == 0) {
-        res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_STILL_CAPTURE,
-                &mCaptureRequest);
-        if (res != OK) {
-            ALOGE("%s: Camera %d: Unable to create default still image request:"
-                    " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res);
-            return res;
-        }
-    }
-
-    res = updateRequestCommon(&mCaptureRequest, params);
-    if (res != OK) {
-        ALOGE("%s: Camera %d: Unable to update common entries of capture "
-                "request: %s (%d)", __FUNCTION__, mCameraId,
-                strerror(-res), res);
-        return res;
-    }
-
-    res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_SIZE,
-            params.jpegThumbSize, 2);
-    if (res != OK) return res;
-    res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
-            &params.jpegThumbQuality, 1);
-    if (res != OK) return res;
-    res = mCaptureRequest.update(ANDROID_JPEG_QUALITY,
-            &params.jpegQuality, 1);
-    if (res != OK) return res;
-    res = mCaptureRequest.update(
-            ANDROID_JPEG_ORIENTATION,
-            &params.jpegRotation, 1);
-    if (res != OK) return res;
-
-    if (params.gpsEnabled) {
-        res = mCaptureRequest.update(
-                ANDROID_JPEG_GPS_COORDINATES,
-                params.gpsCoordinates, 3);
-        if (res != OK) return res;
-        res = mCaptureRequest.update(
-                ANDROID_JPEG_GPS_TIMESTAMP,
-                &params.gpsTimestamp, 1);
-        if (res != OK) return res;
-        res = mCaptureRequest.update(
-                ANDROID_JPEG_GPS_PROCESSING_METHOD,
-                params.gpsProcessingMethod);
-        if (res != OK) return res;
-    } else {
-        res = mCaptureRequest.erase(ANDROID_JPEG_GPS_COORDINATES);
-        if (res != OK) return res;
-        res = mCaptureRequest.erase(ANDROID_JPEG_GPS_TIMESTAMP);
-        if (res != OK) return res;
-        res = mCaptureRequest.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
-        if (res != OK) return res;
-    }
+    res = mPreviewRequest.update(ANDROID_REQUEST_ID,
+            &kPreviewRequestId, 1);
 
     return OK;
 }
@@ -1845,7 +1746,7 @@ status_t Camera2Client::updateRecordingRequest(const Parameters &params) {
         }
     }
 
-    res = updateRequestCommon(&mRecordingRequest, params);
+    res = params.updateRequest(&mRecordingRequest);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to update common entries of recording "
                 "request: %s (%d)", __FUNCTION__, mCameraId,
@@ -1913,197 +1814,6 @@ status_t Camera2Client::updateRecordingStream(const Parameters &params) {
     return OK;
 }
 
-status_t Camera2Client::updateRequestCommon(CameraMetadata *request,
-        const Parameters &params) const {
-    ATRACE_CALL();
-    status_t res;
-    res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
-            params.previewFpsRange, 2);
-    if (res != OK) return res;
-
-    uint8_t wbMode = params.autoWhiteBalanceLock ?
-            (uint8_t)ANDROID_CONTROL_AWB_LOCKED : params.wbMode;
-    res = request->update(ANDROID_CONTROL_AWB_MODE,
-            &wbMode, 1);
-    if (res != OK) return res;
-    res = request->update(ANDROID_CONTROL_EFFECT_MODE,
-            &params.effectMode, 1);
-    if (res != OK) return res;
-    res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
-            &params.antibandingMode, 1);
-    if (res != OK) return res;
-
-    uint8_t controlMode =
-            (params.sceneMode == ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) ?
-            ANDROID_CONTROL_AUTO : ANDROID_CONTROL_USE_SCENE_MODE;
-    res = request->update(ANDROID_CONTROL_MODE,
-            &controlMode, 1);
-    if (res != OK) return res;
-    if (controlMode == ANDROID_CONTROL_USE_SCENE_MODE) {
-        res = request->update(ANDROID_CONTROL_SCENE_MODE,
-                &params.sceneMode, 1);
-        if (res != OK) return res;
-    }
-
-    uint8_t flashMode = ANDROID_FLASH_OFF;
-    uint8_t aeMode;
-    switch (params.flashMode) {
-        case Parameters::FLASH_MODE_OFF:
-            aeMode = ANDROID_CONTROL_AE_ON; break;
-        case Parameters::FLASH_MODE_AUTO:
-            aeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH; break;
-        case Parameters::FLASH_MODE_ON:
-            aeMode = ANDROID_CONTROL_AE_ON_ALWAYS_FLASH; break;
-        case Parameters::FLASH_MODE_TORCH:
-            aeMode = ANDROID_CONTROL_AE_ON;
-            flashMode = ANDROID_FLASH_TORCH;
-            break;
-        case Parameters::FLASH_MODE_RED_EYE:
-            aeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE; break;
-        default:
-            ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__,
-                    mCameraId, params.flashMode);
-            return BAD_VALUE;
-    }
-    if (params.autoExposureLock) aeMode = ANDROID_CONTROL_AE_LOCKED;
-
-    res = request->update(ANDROID_FLASH_MODE,
-            &flashMode, 1);
-    if (res != OK) return res;
-    res = request->update(ANDROID_CONTROL_AE_MODE,
-            &aeMode, 1);
-    if (res != OK) return res;
-
-    float focusDistance = 0; // infinity focus in diopters
-    uint8_t focusMode;
-    switch (params.focusMode) {
-        case Parameters::FOCUS_MODE_AUTO:
-        case Parameters::FOCUS_MODE_MACRO:
-        case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
-        case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
-        case Parameters::FOCUS_MODE_EDOF:
-            focusMode = params.focusMode;
-            break;
-        case Parameters::FOCUS_MODE_INFINITY:
-        case Parameters::FOCUS_MODE_FIXED:
-            focusMode = ANDROID_CONTROL_AF_OFF;
-            break;
-        default:
-            ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__,
-                    mCameraId, params.focusMode);
-            return BAD_VALUE;
-    }
-    res = request->update(ANDROID_LENS_FOCUS_DISTANCE,
-            &focusDistance, 1);
-    if (res != OK) return res;
-    res = request->update(ANDROID_CONTROL_AF_MODE,
-            &focusMode, 1);
-    if (res != OK) return res;
-
-    size_t focusingAreasSize = params.focusingAreas.size() * 5;
-    int32_t *focusingAreas = new int32_t[focusingAreasSize];
-    for (size_t i = 0; i < focusingAreasSize; i += 5) {
-        if (params.focusingAreas[i].weight != 0) {
-            focusingAreas[i + 0] =
-                    params.normalizedXToArray(params.focusingAreas[i].left);
-            focusingAreas[i + 1] =
-                    params.normalizedYToArray(params.focusingAreas[i].top);
-            focusingAreas[i + 2] =
-                    params.normalizedXToArray(params.focusingAreas[i].right);
-            focusingAreas[i + 3] =
-                    params.normalizedYToArray(params.focusingAreas[i].bottom);
-        } else {
-            focusingAreas[i + 0] = 0;
-            focusingAreas[i + 1] = 0;
-            focusingAreas[i + 2] = 0;
-            focusingAreas[i + 3] = 0;
-        }
-        focusingAreas[i + 4] = params.focusingAreas[i].weight;
-    }
-    res = request->update(ANDROID_CONTROL_AF_REGIONS,
-            focusingAreas,focusingAreasSize);
-    if (res != OK) return res;
-    delete[] focusingAreas;
-
-    res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION,
-            &params.exposureCompensation, 1);
-    if (res != OK) return res;
-
-    size_t meteringAreasSize = params.meteringAreas.size() * 5;
-    int32_t *meteringAreas = new int32_t[meteringAreasSize];
-    for (size_t i = 0; i < meteringAreasSize; i += 5) {
-        if (params.meteringAreas[i].weight != 0) {
-            meteringAreas[i + 0] =
-                params.normalizedXToArray(params.meteringAreas[i].left);
-            meteringAreas[i + 1] =
-                params.normalizedYToArray(params.meteringAreas[i].top);
-            meteringAreas[i + 2] =
-                params.normalizedXToArray(params.meteringAreas[i].right);
-            meteringAreas[i + 3] =
-                params.normalizedYToArray(params.meteringAreas[i].bottom);
-        } else {
-            meteringAreas[i + 0] = 0;
-            meteringAreas[i + 1] = 0;
-            meteringAreas[i + 2] = 0;
-            meteringAreas[i + 3] = 0;
-        }
-        meteringAreas[i + 4] = params.meteringAreas[i].weight;
-    }
-    res = request->update(ANDROID_CONTROL_AE_REGIONS,
-            meteringAreas, meteringAreasSize);
-    if (res != OK) return res;
-
-    res = request->update(ANDROID_CONTROL_AWB_REGIONS,
-            meteringAreas, meteringAreasSize);
-    if (res != OK) return res;
-    delete[] meteringAreas;
-
-    // Need to convert zoom index into a crop rectangle. The rectangle is
-    // chosen to maximize its area on the sensor
-
-    camera_metadata_ro_entry_t maxDigitalZoom =
-            mParameters.staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM);
-    float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) /
-            (params.NUM_ZOOM_STEPS-1);
-    float zoomRatio = 1 + zoomIncrement * params.zoom;
-
-    float zoomLeft, zoomTop, zoomWidth, zoomHeight;
-    if (params.previewWidth >= params.previewHeight) {
-        zoomWidth =  params.fastInfo.arrayWidth / zoomRatio;
-        zoomHeight = zoomWidth *
-                params.previewHeight / params.previewWidth;
-    } else {
-        zoomHeight = params.fastInfo.arrayHeight / zoomRatio;
-        zoomWidth = zoomHeight *
-                params.previewWidth / params.previewHeight;
-    }
-    zoomLeft = (params.fastInfo.arrayWidth - zoomWidth) / 2;
-    zoomTop = (params.fastInfo.arrayHeight - zoomHeight) / 2;
-
-    int32_t cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
-    res = request->update(ANDROID_SCALER_CROP_REGION,
-            cropRegion, 3);
-    if (res != OK) return res;
-
-    // TODO: Decide how to map recordingHint, or whether just to ignore it
-
-    uint8_t vstabMode = params.videoStabilization ?
-            ANDROID_CONTROL_VIDEO_STABILIZATION_ON :
-            ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
-    res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
-            &vstabMode, 1);
-    if (res != OK) return res;
-
-    uint8_t faceDetectMode = params.enableFaceDetect ?
-            params.fastInfo.bestFaceDetectMode :
-            (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF;
-    res = request->update(ANDROID_STATS_FACE_DETECT_MODE,
-            &faceDetectMode, 1);
-    if (res != OK) return res;
-
-    return OK;
-}
-
 size_t Camera2Client::calculateBufferSize(int width, int height,
         int format, int stride) {
     switch (format) {
index b2fd636..df5dbf4 100644 (file)
@@ -21,7 +21,9 @@
 #include "CameraService.h"
 #include "camera2/Parameters.h"
 #include "camera2/FrameProcessor.h"
-#include "camera2/CaptureProcessor.h"
+#include "camera2/JpegProcessor.h"
+#include "camera2/ZslProcessor.h"
+#include "camera2/CaptureSequencer.h"
 #include "camera2/CallbackProcessor.h"
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
@@ -95,10 +97,20 @@ public:
      * Interface used by independent components of Camera2Client.
      */
 
-    int getCameraId();
+    int getCameraId() const;
     const sp<Camera2Device>& getCameraDevice();
     camera2::SharedParameters& getParameters();
 
+    int getPreviewStreamId() const;
+    int getCaptureStreamId() const;
+    int getCallbackStreamId() const;
+    int getRecordingStreamId() const;
+    int getZslStreamId() const;
+
+    status_t registerFrameListener(int32_t id,
+            wp<camera2::FrameProcessor::FilteredListener> listener);
+    status_t removeFrameListener(int32_t id);
+
     // Simple class to ensure that access to ICameraClient is serialized by
     // requiring mCameraClientLock to be locked before access to mCameraClient
     // is possible.
@@ -123,6 +135,10 @@ public:
     static size_t calculateBufferSize(int width, int height,
             int format, int stride);
 
+    static const int32_t kPreviewRequestId = 1000;
+    static const int32_t kRecordRequestId  = 2000;
+    static const int32_t kFirstCaptureRequestId = 3000;
+
 private:
     /** ICamera interface-related private members */
 
@@ -183,9 +199,9 @@ private:
 
     /* Still image capture related members */
 
-    sp<camera2::CaptureProcessor> mCaptureProcessor;
-    CameraMetadata mCaptureRequest;
-    status_t updateCaptureRequest(const Parameters &params);
+    sp<camera2::CaptureSequencer> mCaptureSequencer;
+    sp<camera2::JpegProcessor> mJpegProcessor;
+    sp<camera2::ZslProcessor> mZslProcessor;
 
     /* Recording related members */
 
@@ -228,18 +244,6 @@ private:
 
     // Verify that caller is the owner of the camera
     status_t checkPid(const char *checkLocation) const;
-
-    // Update parameters all requests use, based on mParameters
-    status_t updateRequestCommon(CameraMetadata *request, const Parameters &params) const;
-
-    // Map from sensor active array pixel coordinates to normalized camera
-    // parameter coordinates. The former are (0,0)-(array width - 1, array height
-    // - 1), the latter from (-1000,-1000)-(1000,1000)
-    int normalizedXToArray(int x) const;
-    int normalizedYToArray(int y) const;
-    int arrayXToNormalized(int width) const;
-    int arrayYToNormalized(int height) const;
-
 };
 
 }; // namespace android
index daeeebb..a171c46 100644 (file)
@@ -206,6 +206,42 @@ status_t Camera2Device::createStream(sp<ANativeWindow> consumer,
     return OK;
 }
 
+status_t Camera2Device::createReprocessStreamFromStream(int outputId, int *id) {
+    status_t res;
+    ALOGV("%s: E", __FUNCTION__);
+
+    bool found = false;
+    StreamList::iterator streamI;
+    for (streamI = mStreams.begin();
+         streamI != mStreams.end(); streamI++) {
+        if ((*streamI)->getId() == outputId) {
+            found = true;
+            break;
+        }
+    }
+    if (!found) {
+        ALOGE("%s: Camera %d: Output stream %d doesn't exist; can't create "
+                "reprocess stream from it!", __FUNCTION__, mId, outputId);
+        return BAD_VALUE;
+    }
+
+    sp<ReprocessStreamAdapter> stream = new ReprocessStreamAdapter(mDevice);
+
+    res = stream->connectToDevice((*streamI));
+    if (res != OK) {
+        ALOGE("%s: Camera %d: Unable to create reprocessing stream from "\
+                "stream %d: %s (%d)", __FUNCTION__, mId, outputId,
+                strerror(-res), res);
+        return res;
+    }
+
+    *id = stream->getId();
+
+    mReprocessStreams.push_back(stream);
+    return OK;
+}
+
+
 status_t Camera2Device::getStreamInfo(int id,
         uint32_t *width, uint32_t *height, uint32_t *format) {
     ALOGV("%s: E", __FUNCTION__);
@@ -277,6 +313,33 @@ status_t Camera2Device::deleteStream(int id) {
     return OK;
 }
 
+status_t Camera2Device::deleteReprocessStream(int id) {
+    ALOGV("%s: E", __FUNCTION__);
+    bool found = false;
+    for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin();
+         streamI != mReprocessStreams.end(); streamI++) {
+        if ((*streamI)->getId() == id) {
+            status_t res = (*streamI)->release();
+            if (res != OK) {
+                ALOGE("%s: Unable to release reprocess stream %d from "
+                        "HAL device: %s (%d)", __FUNCTION__, id,
+                        strerror(-res), res);
+                return res;
+            }
+            mReprocessStreams.erase(streamI);
+            found = true;
+            break;
+        }
+    }
+    if (!found) {
+        ALOGE("%s: Camera %d: Unable to find stream %d to delete",
+                __FUNCTION__, mId, id);
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+
 status_t Camera2Device::createDefaultRequest(int templateId,
         CameraMetadata *request) {
     status_t err;
@@ -405,6 +468,32 @@ status_t Camera2Device::triggerPrecaptureMetering(uint32_t id) {
     return res;
 }
 
+status_t Camera2Device::pushReprocessBuffer(int reprocessStreamId,
+        buffer_handle_t *buffer, wp<BufferReleasedListener> listener) {
+    ALOGV("%s: E", __FUNCTION__);
+    bool found = false;
+    status_t res = OK;
+    for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin();
+         streamI != mReprocessStreams.end(); streamI++) {
+        if ((*streamI)->getId() == reprocessStreamId) {
+            res = (*streamI)->pushIntoStream(buffer, listener);
+            if (res != OK) {
+                ALOGE("%s: Unable to push buffer to reprocess stream %d: %s (%d)",
+                        __FUNCTION__, reprocessStreamId, strerror(-res), res);
+                return res;
+            }
+            found = true;
+            break;
+        }
+    }
+    if (!found) {
+        ALOGE("%s: Camera %d: Unable to find reprocess stream %d",
+                __FUNCTION__, mId, reprocessStreamId);
+        res = BAD_VALUE;
+    }
+    return res;
+}
+
 /**
  * Camera2Device::NotificationListener
  */
@@ -903,7 +992,7 @@ status_t Camera2Device::StreamAdapter::connectToDevice(
         }
 
         buffers[bufferIdx] = anwBuffers[bufferIdx]->handle;
-        ALOGV("%s: Buffer %p allocated", __FUNCTION__, (void*)(buffers[bufferIdx]));
+        ALOGV("%s: Buffer %p allocated", __FUNCTION__, (void*)buffers[bufferIdx]);
     }
 
     ALOGV("%s: Registering %d buffers with camera HAL", __FUNCTION__, mTotalBuffers);
@@ -1094,5 +1183,198 @@ int Camera2Device::StreamAdapter::set_crop(const camera2_stream_ops_t* w,
     return native_window_set_crop(a, &crop);
 }
 
+/**
+ * Camera2Device::ReprocessStreamAdapter
+ */
+
+#ifndef container_of
+#define container_of(ptr, type, member) \
+    (type *)((char*)(ptr) - offsetof(type, member))
+#endif
+
+Camera2Device::ReprocessStreamAdapter::ReprocessStreamAdapter(camera2_device_t *d):
+        mState(RELEASED),
+        mDevice(d),
+        mId(-1),
+        mWidth(0), mHeight(0), mFormat(0),
+        mActiveBuffers(0),
+        mFrameCount(0)
+{
+    camera2_stream_in_ops::acquire_buffer = acquire_buffer;
+    camera2_stream_in_ops::release_buffer = release_buffer;
+}
+
+Camera2Device::ReprocessStreamAdapter::~ReprocessStreamAdapter() {
+    if (mState != RELEASED) {
+        release();
+    }
+}
+
+status_t Camera2Device::ReprocessStreamAdapter::connectToDevice(
+        const sp<StreamAdapter> &outputStream) {
+    status_t res;
+    ALOGV("%s: E", __FUNCTION__);
+
+    if (mState != RELEASED) return INVALID_OPERATION;
+    if (outputStream == NULL) {
+        ALOGE("%s: Null base stream passed to reprocess stream adapter",
+                __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    mBaseStream = outputStream;
+    mWidth = outputStream->getWidth();
+    mHeight = outputStream->getHeight();
+    mFormat = outputStream->getFormat();
+
+    ALOGV("%s: New reprocess stream parameters %d x %d, format 0x%x",
+            __FUNCTION__, mWidth, mHeight, mFormat);
+
+    // Allocate device-side stream interface
+
+    uint32_t id;
+    res = mDevice->ops->allocate_reprocess_stream_from_stream(mDevice,
+            outputStream->getId(), getStreamOps(),
+            &id);
+    if (res != OK) {
+        ALOGE("%s: Device reprocess stream allocation failed: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    ALOGV("%s: Allocated reprocess stream id %d based on stream %d",
+            __FUNCTION__, id, outputStream->getId());
+
+    mId = id;
+
+    mState = ACTIVE;
+
+    return OK;
+}
+
+status_t Camera2Device::ReprocessStreamAdapter::release() {
+    status_t res;
+    ALOGV("%s: Releasing stream %d", __FUNCTION__, mId);
+    if (mState >= ACTIVE) {
+        res = mDevice->ops->release_reprocess_stream(mDevice, mId);
+        if (res != OK) {
+            ALOGE("%s: Unable to release stream %d",
+                    __FUNCTION__, mId);
+            return res;
+        }
+    }
+
+    List<QueueEntry>::iterator s;
+    for (s = mQueue.begin(); s != mQueue.end(); s++) {
+        sp<BufferReleasedListener> listener = s->releaseListener.promote();
+        if (listener != 0) listener->onBufferReleased(s->handle);
+    }
+    for (s = mInFlightQueue.begin(); s != mInFlightQueue.end(); s++) {
+        sp<BufferReleasedListener> listener = s->releaseListener.promote();
+        if (listener != 0) listener->onBufferReleased(s->handle);
+    }
+    mQueue.clear();
+    mInFlightQueue.clear();
+
+    mState = RELEASED;
+    return OK;
+}
+
+status_t Camera2Device::ReprocessStreamAdapter::pushIntoStream(
+    buffer_handle_t *handle, const wp<BufferReleasedListener> &releaseListener) {
+    // TODO: Some error checking here would be nice
+    ALOGV("%s: Pushing buffer %p to stream", __FUNCTION__, (void*)(*handle));
+
+    QueueEntry entry;
+    entry.handle = handle;
+    entry.releaseListener = releaseListener;
+    mQueue.push_back(entry);
+    return OK;
+}
+
+status_t Camera2Device::ReprocessStreamAdapter::dump(int fd,
+        const Vector<String16>& args) {
+    String8 result =
+            String8::format("      Reprocess stream %d: %d x %d, fmt 0x%x\n",
+                    mId, mWidth, mHeight, mFormat);
+    result.appendFormat("        acquired buffers: %d\n",
+            mActiveBuffers);
+    result.appendFormat("        frame count: %d\n",
+            mFrameCount);
+    write(fd, result.string(), result.size());
+    return OK;
+}
+
+const camera2_stream_in_ops *Camera2Device::ReprocessStreamAdapter::getStreamOps() {
+    return static_cast<camera2_stream_in_ops *>(this);
+}
+
+int Camera2Device::ReprocessStreamAdapter::acquire_buffer(
+    const camera2_stream_in_ops_t *w,
+        buffer_handle_t** buffer) {
+    int res;
+    ReprocessStreamAdapter* stream =
+            const_cast<ReprocessStreamAdapter*>(
+                static_cast<const ReprocessStreamAdapter*>(w));
+    if (stream->mState != ACTIVE) {
+        ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState);
+        return INVALID_OPERATION;
+    }
+
+    if (stream->mQueue.empty()) {
+        *buffer = NULL;
+        return OK;
+    }
+
+    QueueEntry &entry = *(stream->mQueue.begin());
+
+    *buffer = entry.handle;
+
+    stream->mInFlightQueue.push_back(entry);
+    stream->mQueue.erase(stream->mQueue.begin());
+
+    stream->mActiveBuffers++;
+
+    ALOGV("Stream %d acquire: Buffer %p acquired", stream->mId,
+            (void*)(**buffer));
+    return OK;
+}
+
+int Camera2Device::ReprocessStreamAdapter::release_buffer(
+    const camera2_stream_in_ops_t* w,
+    buffer_handle_t* buffer) {
+    ReprocessStreamAdapter *stream =
+            const_cast<ReprocessStreamAdapter*>(
+                static_cast<const ReprocessStreamAdapter*>(w) );
+    stream->mFrameCount++;
+    ALOGV("Reprocess stream %d release: Frame %d (%p)",
+            stream->mId, stream->mFrameCount, (void*)*buffer);
+    int state = stream->mState;
+    if (state != ACTIVE) {
+        ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
+        return INVALID_OPERATION;
+    }
+    stream->mActiveBuffers--;
+
+    List<QueueEntry>::iterator s;
+    for (s = stream->mInFlightQueue.begin(); s != stream->mInFlightQueue.end(); s++) {
+        if ( s->handle == buffer ) break;
+    }
+    if (s == stream->mInFlightQueue.end()) {
+        ALOGE("%s: Can't find buffer %p in in-flight list!", __FUNCTION__,
+                buffer);
+        return INVALID_OPERATION;
+    }
+
+    sp<BufferReleasedListener> listener = s->releaseListener.promote();
+    if (listener != 0) {
+        listener->onBufferReleased(s->handle);
+    } else {
+        ALOGE("%s: Can't free buffer - missing listener", __FUNCTION__);
+    }
+    stream->mInFlightQueue.erase(s);
+
+    return OK;
+}
 
 }; // namespace android
index 64f4608..a327d8d 100644 (file)
@@ -80,6 +80,12 @@ class Camera2Device : public virtual RefBase {
             int *id);
 
     /**
+     * Create an input reprocess stream that uses buffers from an existing
+     * output stream.
+     */
+    status_t createReprocessStreamFromStream(int outputId, int *id);
+
+    /**
      * Get information about a given stream.
      */
     status_t getStreamInfo(int id,
@@ -97,6 +103,12 @@ class Camera2Device : public virtual RefBase {
     status_t deleteStream(int id);
 
     /**
+     * Delete reprocess stream. Must not be called if there are requests in
+     * flight which reference that stream.
+     */
+    status_t deleteReprocessStream(int id);
+
+    /**
      * Create a metadata buffer with fields that the HAL device believes are
      * best for the given use case
      */
@@ -163,6 +175,21 @@ class Camera2Device : public virtual RefBase {
      */
     status_t triggerPrecaptureMetering(uint32_t id);
 
+    /**
+     * Abstract interface for clients that want to listen to reprocess buffer
+     * release events
+     */
+    struct BufferReleasedListener: public virtual RefBase {
+        virtual void onBufferReleased(buffer_handle_t *handle) = 0;
+    };
+
+    /**
+     * Push a buffer to be reprocessed into a reprocessing stream, and
+     * provide a listener to call once the buffer is returned by the HAL
+     */
+    status_t pushReprocessBuffer(int reprocessStreamId,
+            buffer_handle_t *buffer, wp<BufferReleasedListener> listener);
+
   private:
 
     const int mId;
@@ -343,6 +370,86 @@ class Camera2Device : public virtual RefBase {
     typedef List<sp<StreamAdapter> > StreamList;
     StreamList mStreams;
 
+    /**
+     * Adapter from an ANativeWindow interface to camera2 device stream ops.
+     * Also takes care of allocating/deallocating stream in device interface
+     */
+    class ReprocessStreamAdapter: public camera2_stream_in_ops, public virtual RefBase {
+      public:
+        ReprocessStreamAdapter(camera2_device_t *d);
+
+        ~ReprocessStreamAdapter();
+
+        /**
+         * Create a HAL device reprocess stream based on an existing output stream.
+         */
+        status_t connectToDevice(const sp<StreamAdapter> &outputStream);
+
+        status_t release();
+
+        /**
+         * Push buffer into stream for reprocessing. Takes ownership until it notifies
+         * that the buffer has been released
+         */
+        status_t pushIntoStream(buffer_handle_t *handle,
+                const wp<BufferReleasedListener> &releaseListener);
+
+        /**
+         * Get stream parameters.
+         * Only valid after a successful connectToDevice call.
+         */
+        int      getId() const     { return mId; }
+        uint32_t getWidth() const  { return mWidth; }
+        uint32_t getHeight() const { return mHeight; }
+        uint32_t getFormat() const { return mFormat; }
+
+        // Dump stream information
+        status_t dump(int fd, const Vector<String16>& args);
+
+      private:
+        enum {
+            ERROR = -1,
+            RELEASED = 0,
+            ACTIVE
+        } mState;
+
+        sp<ANativeWindow> mConsumerInterface;
+        wp<StreamAdapter> mBaseStream;
+
+        struct QueueEntry {
+            buffer_handle_t *handle;
+            wp<BufferReleasedListener> releaseListener;
+        };
+
+        List<QueueEntry> mQueue;
+
+        List<QueueEntry> mInFlightQueue;
+
+        camera2_device_t *mDevice;
+
+        uint32_t mId;
+        uint32_t mWidth;
+        uint32_t mHeight;
+        uint32_t mFormat;
+
+        /** Debugging information */
+        uint32_t mActiveBuffers;
+        uint32_t mFrameCount;
+        int64_t  mLastTimestamp;
+
+        const camera2_stream_in_ops *getStreamOps();
+
+        static int acquire_buffer(const camera2_stream_in_ops_t *w,
+                buffer_handle_t** buffer);
+
+        static int release_buffer(const camera2_stream_in_ops_t* w,
+                buffer_handle_t* buffer);
+
+    }; // class ReprocessStreamAdapter
+
+    typedef List<sp<ReprocessStreamAdapter> > ReprocessStreamList;
+    ReprocessStreamList mReprocessStreams;
+
     // Receives HAL notifications and routes them to the NotificationListener
     static void notificationCallback(int32_t msg_type,
             int32_t ext1,
index 854b890..bccb18e 100644 (file)
@@ -136,7 +136,7 @@ int CallbackProcessor::getStreamId() const {
     return mCallbackStreamId;
 }
 
-void CallbackProcessor::dump(int fd, const Vector<String16>& args) {
+void CallbackProcessor::dump(int fd, const Vector<String16>& args) const {
 }
 
 bool CallbackProcessor::threadLoop() {
index 36c51a3..c2a1372 100644 (file)
@@ -48,7 +48,7 @@ class CallbackProcessor:
     status_t deleteStream();
     int getStreamId() const;
 
-    void dump(int fd, const Vector<String16>& args);
+    void dump(int fd, const Vector<String16>& args) const;
   private:
     static const nsecs_t kWaitDuration = 10000000; // 10 ms
     wp<Camera2Client> mClient;
index 95377b2..8399e20 100644 (file)
@@ -84,6 +84,10 @@ size_t CameraMetadata::entryCount() const {
             get_camera_metadata_entry_count(mBuffer);
 }
 
+bool CameraMetadata::isEmpty() const {
+    return entryCount() == 0;
+}
+
 status_t CameraMetadata::sort() {
     return sort_camera_metadata(mBuffer);
 }
index 340414e..aee6cd7 100644 (file)
@@ -87,6 +87,11 @@ class CameraMetadata {
     size_t entryCount() const;
 
     /**
+     * Is the buffer empty (no entires)
+     */
+    bool isEmpty() const;
+
+    /**
      * Sort metadata buffer for faster find
      */
     status_t sort();
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
new file mode 100644 (file)
index 0000000..532d2aa
--- /dev/null
@@ -0,0 +1,506 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2Client::CaptureSequencer"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+#include <utils/Vector.h>
+
+#include "CaptureSequencer.h"
+#include "../Camera2Device.h"
+#include "../Camera2Client.h"
+#include "Parameters.h"
+
+namespace android {
+namespace camera2 {
+
+/** Public members */
+
+CaptureSequencer::CaptureSequencer(wp<Camera2Client> client):
+        Thread(false),
+        mStartCapture(false),
+        mBusy(false),
+        mNewAEState(false),
+        mNewFrameReceived(false),
+        mNewCaptureReceived(false),
+        mClient(client),
+        mCaptureState(IDLE),
+        mTriggerId(0),
+        mTimeoutCount(0),
+        mCaptureId(Camera2Client::kFirstCaptureRequestId) {
+}
+
+CaptureSequencer::~CaptureSequencer() {
+    ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void CaptureSequencer::setZslProcessor(wp<ZslProcessor> processor) {
+    Mutex::Autolock l(mInputMutex);
+    mZslProcessor = processor;
+}
+
+status_t CaptureSequencer::startCapture() {
+    ATRACE_CALL();
+    Mutex::Autolock l(mInputMutex);
+    if (mBusy) {
+        ALOGE("%s: Already busy capturing!", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+    if (!mStartCapture) {
+        mStartCapture = true;
+        mStartCaptureSignal.signal();
+    }
+    return OK;
+}
+
+void CaptureSequencer::notifyAutoExposure(uint8_t newState, int triggerId) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mInputMutex);
+    mAEState = newState;
+    mAETriggerId = triggerId;
+    if (!mNewAEState) {
+        mNewAEState = true;
+        mNewNotifySignal.signal();
+    }
+}
+
+void CaptureSequencer::onFrameAvailable(int32_t frameId,
+        CameraMetadata &frame) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mInputMutex);
+    mNewFrameId = frameId;
+    mNewFrame.acquire(frame);
+    if (!mNewFrameReceived) {
+        mNewFrameReceived = true;
+        mNewFrameSignal.signal();
+    }
+}
+
+void CaptureSequencer::onCaptureAvailable(nsecs_t timestamp) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mInputMutex);
+    mCaptureTimestamp = timestamp;
+    if (!mNewCaptureReceived) {
+        mNewCaptureReceived = true;
+        mNewCaptureSignal.signal();
+    }
+}
+
+
+void CaptureSequencer::dump(int fd, const Vector<String16>& args) {
+    String8 result;
+    if (mCaptureRequest.entryCount() != 0) {
+        result = "    Capture request:\n";
+        write(fd, result.string(), result.size());
+        mCaptureRequest.dump(fd, 2, 6);
+    } else {
+        result = "    Capture request: undefined\n";
+        write(fd, result.string(), result.size());
+    }
+    result = String8::format("    Current capture state: %s\n",
+            kStateNames[mCaptureState]);
+    result.append("    Latest captured frame:\n");
+    write(fd, result.string(), result.size());
+    mNewFrame.dump(fd, 2, 6);
+}
+
+/** Private members */
+
+const char* CaptureSequencer::kStateNames[CaptureSequencer::NUM_CAPTURE_STATES+1] =
+{
+    "IDLE",
+    "START",
+    "ZSL_START",
+    "ZSL_WAITING",
+    "ZSL_REPROCESSING",
+    "STANDARD_START",
+    "STANDARD_PRECAPTURE",
+    "STANDARD_CAPTURING",
+    "DONE",
+    "ERROR",
+    "UNKNOWN"
+};
+
+const CaptureSequencer::StateManager
+        CaptureSequencer::kStateManagers[CaptureSequencer::NUM_CAPTURE_STATES-1] = {
+    &CaptureSequencer::manageIdle,
+    &CaptureSequencer::manageStart,
+    &CaptureSequencer::manageZslStart,
+    &CaptureSequencer::manageZslWaiting,
+    &CaptureSequencer::manageZslReprocessing,
+    &CaptureSequencer::manageStandardStart,
+    &CaptureSequencer::manageStandardPrecaptureWait,
+    &CaptureSequencer::manageStandardCapture,
+    &CaptureSequencer::manageStandardCaptureWait,
+    &CaptureSequencer::manageDone,
+};
+
+bool CaptureSequencer::threadLoop() {
+    status_t res;
+
+    sp<Camera2Client> client = mClient.promote();
+    if (client == 0) return false;
+
+    if (mCaptureState < ERROR) {
+        mCaptureState = (this->*kStateManagers[mCaptureState])(client);
+    } else {
+        ALOGE("%s: Bad capture state: %s",
+                __FUNCTION__, kStateNames[mCaptureState]);
+        return false;
+    }
+
+    return true;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageIdle(sp<Camera2Client> &client) {
+    status_t res;
+    ATRACE_CALL();
+    Mutex::Autolock l(mInputMutex);
+    while (!mStartCapture) {
+        res = mStartCaptureSignal.waitRelative(mInputMutex,
+                kWaitDuration);
+        if (res == TIMED_OUT) break;
+    }
+    if (mStartCapture) {
+        mStartCapture = false;
+        mBusy = true;
+        return START;
+    }
+    return IDLE;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageDone(sp<Camera2Client> &client) {
+    status_t res;
+    ATRACE_CALL();
+    mCaptureId++;
+
+    {
+        Mutex::Autolock l(mInputMutex);
+        mBusy = false;
+    }
+
+    SharedParameters::Lock l(client->getParameters());
+    switch (l.mParameters.state) {
+        case Parameters::STILL_CAPTURE:
+            l.mParameters.state = Parameters::STOPPED;
+            break;
+        case Parameters::VIDEO_SNAPSHOT:
+            l.mParameters.state = Parameters::RECORD;
+            break;
+        default:
+            ALOGE("%s: Camera %d: Still image produced unexpectedly "
+                    "in state %s!",
+                    __FUNCTION__, client->getCameraId(),
+                    Parameters::getStateName(l.mParameters.state));
+    }
+
+    return IDLE;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStart(
+        sp<Camera2Client> &client) {
+    status_t res;
+    ATRACE_CALL();
+    SharedParameters::Lock l(client->getParameters());
+    CaptureState nextState = DONE;
+
+    res = updateCaptureRequest(l.mParameters, client);
+    if (res != OK ) {
+        ALOGE("%s: Camera %d: Can't update still image capture request: %s (%d)",
+                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+        return DONE;
+    }
+
+    if (l.mParameters.zslMode &&
+            l.mParameters.state == Parameters::STILL_CAPTURE) {
+        nextState = ZSL_START;
+    } else {
+        nextState = STANDARD_START;
+    }
+
+    return nextState;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageZslStart(
+        sp<Camera2Client> &client) {
+    status_t res;
+    sp<ZslProcessor> processor = mZslProcessor.promote();
+    if (processor == 0) {
+        ALOGE("%s: No ZSL queue to use!", __FUNCTION__);
+        return DONE;
+    }
+
+    client->registerFrameListener(mCaptureId,
+            this);
+
+    res = client->getCameraDevice()->clearStreamingRequest();
+    if (res != OK) {
+        ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
+                "%s (%d)",
+                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+        return DONE;
+    }
+    // TODO: Actually select the right thing here.
+    processor->pushToReprocess(mCaptureId);
+
+    mTimeoutCount = kMaxTimeoutsForCaptureEnd;
+    return STANDARD_CAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageZslWaiting(
+        sp<Camera2Client> &client) {
+    return DONE;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageZslReprocessing(
+        sp<Camera2Client> &client) {
+    return START;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardStart(
+        sp<Camera2Client> &client) {
+    ATRACE_CALL();
+    client->registerFrameListener(mCaptureId,
+            this);
+    {
+        SharedParameters::Lock l(client->getParameters());
+        mTriggerId = l.mParameters.precaptureTriggerCounter++;
+    }
+    client->getCameraDevice()->triggerPrecaptureMetering(mTriggerId);
+
+    mAeInPrecapture = false;
+    mTimeoutCount = kMaxTimeoutsForPrecaptureStart;
+    return STANDARD_PRECAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardPrecaptureWait(
+        sp<Camera2Client> &client) {
+    status_t res;
+    ATRACE_CALL();
+    Mutex::Autolock l(mInputMutex);
+    while (!mNewAEState) {
+        res = mNewNotifySignal.waitRelative(mInputMutex, kWaitDuration);
+        if (res == TIMED_OUT) {
+            mTimeoutCount--;
+            break;
+        }
+    }
+    if (mTimeoutCount <= 0) {
+        ALOGW("Timed out waiting for precapture %s",
+                mAeInPrecapture ? "end" : "start");
+        return STANDARD_CAPTURE;
+    }
+    if (mNewAEState) {
+        if (!mAeInPrecapture) {
+            // Waiting to see PRECAPTURE state
+            if (mAETriggerId == mTriggerId &&
+                    mAEState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+                ALOGV("%s: Got precapture start", __FUNCTION__);
+                mAeInPrecapture = true;
+                mTimeoutCount = kMaxTimeoutsForPrecaptureEnd;
+            }
+        } else {
+            // Waiting to see PRECAPTURE state end
+            if (mAETriggerId == mTriggerId &&
+                    mAEState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
+                ALOGV("%s: Got precapture end", __FUNCTION__);
+                return STANDARD_CAPTURE;
+            }
+        }
+        mNewAEState = false;
+    }
+    return STANDARD_PRECAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardCapture(
+        sp<Camera2Client> &client) {
+    status_t res;
+    ATRACE_CALL();
+    SharedParameters::Lock l(client->getParameters());
+    Vector<uint8_t> outputStreams;
+
+    outputStreams.push(client->getPreviewStreamId());
+    outputStreams.push(client->getCaptureStreamId());
+
+    if (l.mParameters.previewCallbackFlags &
+            CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) {
+        outputStreams.push(client->getCallbackStreamId());
+    }
+
+    if (l.mParameters.state == Parameters::VIDEO_SNAPSHOT) {
+        outputStreams.push(client->getRecordingStreamId());
+    }
+
+    res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+            outputStreams);
+    if (res == OK) {
+        res = mCaptureRequest.update(ANDROID_REQUEST_ID,
+                &mCaptureId, 1);
+    }
+    if (res == OK) {
+        res = mCaptureRequest.sort();
+    }
+
+    if (res != OK) {
+        ALOGE("%s: Camera %d: Unable to set up still capture request: %s (%d)",
+                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+        return DONE;
+    }
+
+    CameraMetadata captureCopy = mCaptureRequest;
+    if (captureCopy.entryCount() == 0) {
+        ALOGE("%s: Camera %d: Unable to copy capture request for HAL device",
+                __FUNCTION__, client->getCameraId());
+        return DONE;
+    }
+
+    if (l.mParameters.state == Parameters::STILL_CAPTURE) {
+        res = client->getCameraDevice()->clearStreamingRequest();
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Unable to stop preview for still capture: "
+                    "%s (%d)",
+                    __FUNCTION__, client->getCameraId(), strerror(-res), res);
+            return DONE;
+        }
+    }
+    // TODO: Capture should be atomic with setStreamingRequest here
+    res = client->getCameraDevice()->capture(captureCopy);
+    if (res != OK) {
+        ALOGE("%s: Camera %d: Unable to submit still image capture request: "
+                "%s (%d)",
+                __FUNCTION__, client->getCameraId(), strerror(-res), res);
+        return DONE;
+    }
+
+    mTimeoutCount = kMaxTimeoutsForCaptureEnd;
+    return STANDARD_CAPTURE_WAIT;
+}
+
+CaptureSequencer::CaptureState CaptureSequencer::manageStandardCaptureWait(
+        sp<Camera2Client> &client) {
+    status_t res;
+    ATRACE_CALL();
+    Mutex::Autolock l(mInputMutex);
+    while (!mNewFrameReceived) {
+        res = mNewFrameSignal.waitRelative(mInputMutex, kWaitDuration);
+        if (res == TIMED_OUT) {
+            mTimeoutCount--;
+            break;
+        }
+    }
+    while (!mNewCaptureReceived) {
+        res = mNewCaptureSignal.waitRelative(mInputMutex, kWaitDuration);
+        if (res == TIMED_OUT) {
+            mTimeoutCount--;
+            break;
+        }
+    }
+    if (mTimeoutCount <= 0) {
+        ALOGW("Timed out waiting for capture to complete");
+        return DONE;
+    }
+    if (mNewFrameReceived && mNewCaptureReceived) {
+        if (mNewFrameId != mCaptureId) {
+            ALOGW("Mismatched capture frame IDs: Expected %d, got %d",
+                    mCaptureId, mNewFrameId);
+        }
+        camera_metadata_entry_t entry;
+        entry = mNewFrame.find(ANDROID_SENSOR_TIMESTAMP);
+        if (entry.count == 0) {
+            ALOGE("No timestamp field in capture frame!");
+        }
+        if (entry.data.i64[0] != mCaptureTimestamp) {
+            ALOGW("Mismatched capture timestamps: Metadata frame %lld,"
+                    " captured buffer %lld", entry.data.i64[0], mCaptureTimestamp);
+        }
+        client->removeFrameListener(mCaptureId);
+
+        mNewFrameReceived = false;
+        mNewCaptureReceived = false;
+        return DONE;
+    }
+    return STANDARD_CAPTURE_WAIT;
+}
+
+status_t CaptureSequencer::updateCaptureRequest(const Parameters &params,
+        sp<Camera2Client> &client) {
+    ATRACE_CALL();
+    status_t res;
+    if (mCaptureRequest.entryCount() == 0) {
+        res = client->getCameraDevice()->createDefaultRequest(
+                CAMERA2_TEMPLATE_STILL_CAPTURE,
+                &mCaptureRequest);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Unable to create default still image request:"
+                    " %s (%d)", __FUNCTION__, client->getCameraId(),
+                    strerror(-res), res);
+            return res;
+        }
+    }
+
+    res = params.updateRequest(&mCaptureRequest);
+    if (res != OK) {
+        ALOGE("%s: Camera %d: Unable to update common entries of capture "
+                "request: %s (%d)", __FUNCTION__, client->getCameraId(),
+                strerror(-res), res);
+        return res;
+    }
+
+    res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_SIZE,
+            params.jpegThumbSize, 2);
+    if (res != OK) return res;
+    res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
+            &params.jpegThumbQuality, 1);
+    if (res != OK) return res;
+    res = mCaptureRequest.update(ANDROID_JPEG_QUALITY,
+            &params.jpegQuality, 1);
+    if (res != OK) return res;
+    res = mCaptureRequest.update(
+            ANDROID_JPEG_ORIENTATION,
+            &params.jpegRotation, 1);
+    if (res != OK) return res;
+
+    if (params.gpsEnabled) {
+        res = mCaptureRequest.update(
+                ANDROID_JPEG_GPS_COORDINATES,
+                params.gpsCoordinates, 3);
+        if (res != OK) return res;
+        res = mCaptureRequest.update(
+                ANDROID_JPEG_GPS_TIMESTAMP,
+                &params.gpsTimestamp, 1);
+        if (res != OK) return res;
+        res = mCaptureRequest.update(
+                ANDROID_JPEG_GPS_PROCESSING_METHOD,
+                params.gpsProcessingMethod);
+        if (res != OK) return res;
+    } else {
+        res = mCaptureRequest.erase(ANDROID_JPEG_GPS_COORDINATES);
+        if (res != OK) return res;
+        res = mCaptureRequest.erase(ANDROID_JPEG_GPS_TIMESTAMP);
+        if (res != OK) return res;
+        res = mCaptureRequest.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
+        if (res != OK) return res;
+    }
+
+    return OK;
+}
+
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/camera2/CaptureSequencer.h
new file mode 100644 (file)
index 0000000..0492a43
--- /dev/null
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTURESEQUENCER_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include "CameraMetadata.h"
+#include "Parameters.h"
+#include "FrameProcessor.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+class ZslProcessor;
+
+/**
+ * Manages the still image capture process for
+ * zero-shutter-lag, regular, and video snapshots.
+ */
+class CaptureSequencer:
+            virtual public Thread,
+            virtual public FrameProcessor::FilteredListener {
+  public:
+    CaptureSequencer(wp<Camera2Client> client);
+    ~CaptureSequencer();
+
+    // Get reference to the ZslProcessor, which holds the ZSL buffers and frames
+    void setZslProcessor(wp<ZslProcessor> processor);
+
+    // Begin still image capture
+    status_t startCapture();
+
+    // Notifications about AE state changes
+    void notifyAutoExposure(uint8_t newState, int triggerId);
+
+    // Notifications from the frame processor
+    virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame);
+
+    // Notifications from the capture processor
+    void onCaptureAvailable(nsecs_t timestamp);
+
+    void dump(int fd, const Vector<String16>& args);
+
+  private:
+    /**
+     * Accessed by other threads
+     */
+    Mutex mInputMutex;
+
+    bool mStartCapture;
+    bool mBusy;
+    Condition mStartCaptureSignal;
+
+    bool mNewAEState;
+    uint8_t mAEState;
+    int mAETriggerId;
+    Condition mNewNotifySignal;
+
+    bool mNewFrameReceived;
+    int32_t mNewFrameId;
+    CameraMetadata mNewFrame;
+    Condition mNewFrameSignal;
+
+    bool mNewCaptureReceived;
+    nsecs_t mCaptureTimestamp;
+    Condition mNewCaptureSignal;
+
+    /**
+     * Internal to CaptureSequencer
+     */
+    static const nsecs_t kWaitDuration = 100000000; // 100 ms
+    static const int kMaxTimeoutsForPrecaptureStart = 2; // 200 ms
+    static const int kMaxTimeoutsForPrecaptureEnd = 10;  // 1 sec
+    static const int kMaxTimeoutsForCaptureEnd    = 20;  // 2 sec
+
+    wp<Camera2Client> mClient;
+    wp<ZslProcessor> mZslProcessor;
+
+    enum CaptureState {
+        IDLE,
+        START,
+        ZSL_START,
+        ZSL_WAITING,
+        ZSL_REPROCESSING,
+        STANDARD_START,
+        STANDARD_PRECAPTURE_WAIT,
+        STANDARD_CAPTURE,
+        STANDARD_CAPTURE_WAIT,
+        DONE,
+        ERROR,
+        NUM_CAPTURE_STATES
+    } mCaptureState;
+    static const char* kStateNames[];
+
+    typedef CaptureState (CaptureSequencer::*StateManager)(sp<Camera2Client> &client);
+    static const StateManager kStateManagers[];
+
+    CameraMetadata mCaptureRequest;
+
+    int mTriggerId;
+    int mTimeoutCount;
+    bool mAeInPrecapture;
+
+    int32_t mCaptureId;
+
+    // Main internal methods
+
+    virtual bool threadLoop();
+
+    CaptureState manageIdle(sp<Camera2Client> &client);
+    CaptureState manageStart(sp<Camera2Client> &client);
+
+    CaptureState manageZslStart(sp<Camera2Client> &client);
+    CaptureState manageZslWaiting(sp<Camera2Client> &client);
+    CaptureState manageZslReprocessing(sp<Camera2Client> &client);
+
+    CaptureState manageStandardStart(sp<Camera2Client> &client);
+    CaptureState manageStandardPrecaptureWait(sp<Camera2Client> &client);
+    CaptureState manageStandardCapture(sp<Camera2Client> &client);
+    CaptureState manageStandardCaptureWait(sp<Camera2Client> &client);
+
+    CaptureState manageDone(sp<Camera2Client> &client);
+
+    // Utility methods
+
+    status_t updateCaptureRequest(const Parameters &params,
+            sp<Camera2Client> &client);
+};
+
+}; // namespace camera2
+}; // namespace android
+
+#endif
index 5059754..e24db0b 100644 (file)
@@ -36,6 +36,19 @@ FrameProcessor::~FrameProcessor() {
     ALOGV("%s: Exit", __FUNCTION__);
 }
 
+status_t FrameProcessor::registerListener(int32_t id,
+        wp<FilteredListener> listener) {
+    Mutex::Autolock l(mInputMutex);
+    ALOGV("%s: Registering listener for frame id %d",
+            __FUNCTION__, id);
+    return mListeners.replaceValueFor(id, listener);
+}
+
+status_t FrameProcessor::removeListener(int32_t id) {
+    Mutex::Autolock l(mInputMutex);
+    return mListeners.removeItem(id);
+}
+
 void FrameProcessor::dump(int fd, const Vector<String16>& args) {
     String8 result("    Latest received frame:\n");
     write(fd, result.string(), result.size());
@@ -50,6 +63,7 @@ bool FrameProcessor::threadLoop() {
         sp<Camera2Client> client = mClient.promote();
         if (client == 0) return false;
         device = client->getCameraDevice();
+        if (device == 0) return false;
     }
 
     res = device->waitForNextFrame(kWaitDuration);
@@ -67,20 +81,28 @@ bool FrameProcessor::threadLoop() {
 
 void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
     status_t res;
+    ATRACE_CALL();
     CameraMetadata frame;
     while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) {
         camera_metadata_entry_t entry;
+
         entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
         if (entry.count == 0) {
-            ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
-                    __FUNCTION__, client->getCameraId(), strerror(-res), res);
+            ALOGE("%s: Camera %d: Error reading frame number",
+                    __FUNCTION__, client->getCameraId());
             break;
         }
 
         res = processFaceDetect(frame, client);
         if (res != OK) break;
 
-        mLastFrame.acquire(frame);
+        // Must be last - listener can take ownership of frame
+        res = processListener(frame, client);
+        if (res != OK) break;
+
+        if (!frame.isEmpty()) {
+            mLastFrame.acquire(frame);
+        }
     }
     if (res != NOT_ENOUGH_DATA) {
         ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
@@ -91,9 +113,43 @@ void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
     return;
 }
 
-status_t FrameProcessor::processFaceDetect(
-    const CameraMetadata &frame, sp<Camera2Client> &client) {
+status_t FrameProcessor::processListener(CameraMetadata &frame,
+        sp<Camera2Client> &client) {
+    status_t res;
+    ATRACE_CALL();
+    camera_metadata_entry_t entry;
+
+    entry = frame.find(ANDROID_REQUEST_ID);
+    if (entry.count == 0) {
+        ALOGE("%s: Camera %d: Error reading frame id",
+                __FUNCTION__, client->getCameraId());
+        return BAD_VALUE;
+    }
+    int32_t frameId = entry.data.i32[0];
+    ALOGV("%s: Got frame with ID %d", __FUNCTION__, frameId);
+
+    sp<FilteredListener> listener;
+    {
+        Mutex::Autolock l(mInputMutex);
+        ssize_t listenerIndex = mListeners.indexOfKey(frameId);
+        if (listenerIndex != NAME_NOT_FOUND) {
+            listener = mListeners[listenerIndex].promote();
+            if (listener == 0) {
+                mListeners.removeItemsAt(listenerIndex, 1);
+            }
+        }
+    }
+
+    if (listener != 0) {
+        listener->onFrameAvailable(frameId, frame);
+    }
+    return OK;
+}
+
+status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
+        sp<Camera2Client> &client) {
     status_t res;
+    ATRACE_CALL();
     camera_metadata_ro_entry_t entry;
     bool enableFaceDetect;
     int maxFaces;
@@ -209,6 +265,5 @@ status_t FrameProcessor::processFaceDetect(
     return OK;
 }
 
-
 }; // namespace camera2
 }; // namespace android
index 2cdf7f0..25d489a 100644 (file)
@@ -20,6 +20,7 @@
 #include <utils/Thread.h>
 #include <utils/String16.h>
 #include <utils/Vector.h>
+#include <utils/KeyedVector.h>
 #include "CameraMetadata.h"
 
 namespace android {
@@ -36,6 +37,17 @@ class FrameProcessor: public Thread {
     FrameProcessor(wp<Camera2Client> client);
     ~FrameProcessor();
 
+    struct FilteredListener: virtual public RefBase {
+        // Listener may take ownership of frame
+        virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame) = 0;
+    };
+
+    // Register a listener for a specific frame ID (android.request.id).
+    // De-registers any existing listeners for that ID
+    status_t registerListener(int32_t id, wp<FilteredListener> listener);
+
+    status_t removeListener(int32_t id);
+
     void dump(int fd, const Vector<String16>& args);
   private:
     static const nsecs_t kWaitDuration = 10000000; // 10 ms
@@ -43,10 +55,17 @@ class FrameProcessor: public Thread {
 
     virtual bool threadLoop();
 
+    Mutex mInputMutex;
+    KeyedVector<int32_t, wp<FilteredListener> > mListeners;
+
     void processNewFrames(sp<Camera2Client> &client);
+
     status_t processFaceDetect(const CameraMetadata &frame,
             sp<Camera2Client> &client);
 
+    status_t processListener(CameraMetadata &frame,
+            sp<Camera2Client> &client);
+
     CameraMetadata mLastFrame;
 };
 
  * limitations under the License.
  */
 
-#define LOG_TAG "Camera2Client::CaptureProcessor"
+#define LOG_TAG "Camera2Client::JpegProcessor"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
-#include "CaptureProcessor.h"
+#include "JpegProcessor.h"
 #include <gui/SurfaceTextureClient.h>
 #include "../Camera2Device.h"
 #include "../Camera2Client.h"
 namespace android {
 namespace camera2 {
 
-CaptureProcessor::CaptureProcessor(wp<Camera2Client> client):
+JpegProcessor::JpegProcessor(
+    wp<Camera2Client> client,
+    wp<CaptureSequencer> sequencer):
         Thread(false),
         mClient(client),
+        mSequencer(sequencer),
         mCaptureAvailable(false),
         mCaptureStreamId(NO_STREAM) {
 }
 
-CaptureProcessor::~CaptureProcessor() {
+JpegProcessor::~JpegProcessor() {
     ALOGV("%s: Exit", __FUNCTION__);
 }
 
-void CaptureProcessor::onFrameAvailable() {
+void JpegProcessor::onFrameAvailable() {
     Mutex::Autolock l(mInputMutex);
     if (!mCaptureAvailable) {
         mCaptureAvailable = true;
@@ -49,7 +52,7 @@ void CaptureProcessor::onFrameAvailable() {
     }
 }
 
-status_t CaptureProcessor::updateStream(const Parameters &params) {
+status_t JpegProcessor::updateStream(const Parameters &params) {
     ATRACE_CALL();
     ALOGV("%s", __FUNCTION__);
     status_t res;
@@ -127,7 +130,7 @@ status_t CaptureProcessor::updateStream(const Parameters &params) {
     return OK;
 }
 
-status_t CaptureProcessor::deleteStream() {
+status_t JpegProcessor::deleteStream() {
     ATRACE_CALL();
     status_t res;
 
@@ -144,15 +147,15 @@ status_t CaptureProcessor::deleteStream() {
     return OK;
 }
 
-int CaptureProcessor::getStreamId() const {
+int JpegProcessor::getStreamId() const {
     Mutex::Autolock l(mInputMutex);
     return mCaptureStreamId;
 }
 
-void CaptureProcessor::dump(int fd, const Vector<String16>& args) {
+void JpegProcessor::dump(int fd, const Vector<String16>& args) const {
 }
 
-bool CaptureProcessor::threadLoop() {
+bool JpegProcessor::threadLoop() {
     status_t res;
 
     {
@@ -174,7 +177,7 @@ bool CaptureProcessor::threadLoop() {
     return true;
 }
 
-status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
+status_t JpegProcessor::processNewCapture(sp<Camera2Client> &client) {
     ATRACE_CALL();
     status_t res;
     sp<Camera2Heap> captureHeap;
@@ -200,10 +203,7 @@ status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
 
         switch (l.mParameters.state) {
             case Parameters::STILL_CAPTURE:
-                l.mParameters.state = Parameters::STOPPED;
-                break;
             case Parameters::VIDEO_SNAPSHOT:
-                l.mParameters.state = Parameters::RECORD;
                 break;
             default:
                 ALOGE("%s: Camera %d: Still image produced unexpectedly "
@@ -224,6 +224,11 @@ status_t CaptureProcessor::processNewCapture(sp<Camera2Client> &client) {
         return OK;
     }
 
+    sp<CaptureSequencer> sequencer = mSequencer.promote();
+    if (sequencer != 0) {
+        sequencer->onCaptureAvailable(imgBuffer.timestamp);
+    }
+
     // TODO: Optimize this to avoid memcopy
     void* captureMemory = mCaptureHeap->mHeap->getBase();
     size_t size = mCaptureHeap->mHeap->getSize();
@@ -14,8 +14,8 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H
-#define ANDROID_SERVERS_CAMERA_CAMERA2_CAPTUREPROCESSOR_H
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_JPEGPROCESSOR_H
 
 #include <utils/Thread.h>
 #include <utils/String16.h>
@@ -33,14 +33,16 @@ class Camera2Client;
 
 namespace camera2 {
 
+class CaptureSequencer;
+
 /***
  * Still image capture output image processing
  */
-class CaptureProcessor:
+class JpegProcessor:
             public Thread, public CpuConsumer::FrameAvailableListener {
   public:
-    CaptureProcessor(wp<Camera2Client> client);
-    ~CaptureProcessor();
+    JpegProcessor(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+    ~JpegProcessor();
 
     void onFrameAvailable();
 
@@ -48,10 +50,11 @@ class CaptureProcessor:
     status_t deleteStream();
     int getStreamId() const;
 
-    void dump(int fd, const Vector<String16>& args);
+    void dump(int fd, const Vector<String16>& args) const;
   private:
     static const nsecs_t kWaitDuration = 10000000; // 10 ms
     wp<Camera2Client> mClient;
+    wp<CaptureSequencer> mSequencer;
 
     mutable Mutex mInputMutex;
     bool mCaptureAvailable;
index 2f7d023..1cad2ae 100644 (file)
@@ -18,6 +18,9 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
 #include <math.h>
 #include <stdlib.h>
 
@@ -738,9 +741,11 @@ status_t Parameters::initialize(const CameraMetadata *info) {
     enableFaceDetect = false;
 
     enableFocusMoveMessages = false;
-    afTriggerCounter = 0;
+    afTriggerCounter = 1;
     currentAfTriggerId = -1;
 
+    precaptureTriggerCounter = 1;
+
     previewCallbackFlags = 0;
 
     state = STOPPED;
@@ -1318,6 +1323,202 @@ status_t Parameters::set(const String8& params) {
     return OK;
 }
 
+status_t Parameters::updateRequest(CameraMetadata *request) const {
+    ATRACE_CALL();
+    status_t res;
+
+    uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
+    res = request->update(ANDROID_REQUEST_METADATA_MODE,
+            &metadataMode, 1);
+    if (res != OK) return res;
+
+    res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+            previewFpsRange, 2);
+    if (res != OK) return res;
+
+    uint8_t reqWbMode = autoWhiteBalanceLock ?
+            (uint8_t)ANDROID_CONTROL_AWB_LOCKED : wbMode;
+    res = request->update(ANDROID_CONTROL_AWB_MODE,
+            &reqWbMode, 1);
+    if (res != OK) return res;
+    res = request->update(ANDROID_CONTROL_EFFECT_MODE,
+            &effectMode, 1);
+    if (res != OK) return res;
+    res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+            &antibandingMode, 1);
+    if (res != OK) return res;
+
+    uint8_t reqControlMode =
+            (sceneMode == ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) ?
+            ANDROID_CONTROL_AUTO : ANDROID_CONTROL_USE_SCENE_MODE;
+    res = request->update(ANDROID_CONTROL_MODE,
+            &reqControlMode, 1);
+    if (res != OK) return res;
+    if (reqControlMode == ANDROID_CONTROL_USE_SCENE_MODE) {
+        res = request->update(ANDROID_CONTROL_SCENE_MODE,
+                &sceneMode, 1);
+        if (res != OK) return res;
+    }
+
+    uint8_t reqFlashMode = ANDROID_FLASH_OFF;
+    uint8_t reqAeMode;
+    switch (flashMode) {
+        case Parameters::FLASH_MODE_OFF:
+            reqAeMode = ANDROID_CONTROL_AE_ON; break;
+        case Parameters::FLASH_MODE_AUTO:
+            reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH; break;
+        case Parameters::FLASH_MODE_ON:
+            reqAeMode = ANDROID_CONTROL_AE_ON_ALWAYS_FLASH; break;
+        case Parameters::FLASH_MODE_TORCH:
+            reqAeMode = ANDROID_CONTROL_AE_ON;
+            reqFlashMode = ANDROID_FLASH_TORCH;
+            break;
+        case Parameters::FLASH_MODE_RED_EYE:
+            reqAeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE; break;
+        default:
+            ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__,
+                    cameraId, flashMode);
+            return BAD_VALUE;
+    }
+    if (autoExposureLock) reqAeMode = ANDROID_CONTROL_AE_LOCKED;
+
+    res = request->update(ANDROID_FLASH_MODE,
+            &reqFlashMode, 1);
+    if (res != OK) return res;
+    res = request->update(ANDROID_CONTROL_AE_MODE,
+            &reqAeMode, 1);
+    if (res != OK) return res;
+
+    float reqFocusDistance = 0; // infinity focus in diopters
+    uint8_t reqFocusMode;
+    switch (focusMode) {
+        case Parameters::FOCUS_MODE_AUTO:
+        case Parameters::FOCUS_MODE_MACRO:
+        case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO:
+        case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE:
+        case Parameters::FOCUS_MODE_EDOF:
+            reqFocusMode = focusMode;
+            break;
+        case Parameters::FOCUS_MODE_INFINITY:
+        case Parameters::FOCUS_MODE_FIXED:
+            reqFocusMode = ANDROID_CONTROL_AF_OFF;
+            break;
+        default:
+            ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__,
+                    cameraId, focusMode);
+            return BAD_VALUE;
+    }
+    res = request->update(ANDROID_LENS_FOCUS_DISTANCE,
+            &reqFocusDistance, 1);
+    if (res != OK) return res;
+    res = request->update(ANDROID_CONTROL_AF_MODE,
+            &reqFocusMode, 1);
+    if (res != OK) return res;
+
+    size_t reqFocusingAreasSize = focusingAreas.size() * 5;
+    int32_t *reqFocusingAreas = new int32_t[reqFocusingAreasSize];
+    for (size_t i = 0; i < reqFocusingAreasSize; i += 5) {
+        if (focusingAreas[i].weight != 0) {
+            reqFocusingAreas[i + 0] =
+                    normalizedXToArray(focusingAreas[i].left);
+            reqFocusingAreas[i + 1] =
+                    normalizedYToArray(focusingAreas[i].top);
+            reqFocusingAreas[i + 2] =
+                    normalizedXToArray(focusingAreas[i].right);
+            reqFocusingAreas[i + 3] =
+                    normalizedYToArray(focusingAreas[i].bottom);
+        } else {
+            reqFocusingAreas[i + 0] = 0;
+            reqFocusingAreas[i + 1] = 0;
+            reqFocusingAreas[i + 2] = 0;
+            reqFocusingAreas[i + 3] = 0;
+        }
+        reqFocusingAreas[i + 4] = focusingAreas[i].weight;
+    }
+    res = request->update(ANDROID_CONTROL_AF_REGIONS,
+            reqFocusingAreas, reqFocusingAreasSize);
+    if (res != OK) return res;
+    delete[] reqFocusingAreas;
+
+    res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION,
+            &exposureCompensation, 1);
+    if (res != OK) return res;
+
+    size_t reqMeteringAreasSize = meteringAreas.size() * 5;
+    int32_t *reqMeteringAreas = new int32_t[reqMeteringAreasSize];
+    for (size_t i = 0; i < reqMeteringAreasSize; i += 5) {
+        if (meteringAreas[i].weight != 0) {
+            reqMeteringAreas[i + 0] =
+                normalizedXToArray(meteringAreas[i].left);
+            reqMeteringAreas[i + 1] =
+                normalizedYToArray(meteringAreas[i].top);
+            reqMeteringAreas[i + 2] =
+                normalizedXToArray(meteringAreas[i].right);
+            reqMeteringAreas[i + 3] =
+                normalizedYToArray(meteringAreas[i].bottom);
+        } else {
+            reqMeteringAreas[i + 0] = 0;
+            reqMeteringAreas[i + 1] = 0;
+            reqMeteringAreas[i + 2] = 0;
+            reqMeteringAreas[i + 3] = 0;
+        }
+        reqMeteringAreas[i + 4] = meteringAreas[i].weight;
+    }
+    res = request->update(ANDROID_CONTROL_AE_REGIONS,
+            reqMeteringAreas, reqMeteringAreasSize);
+    if (res != OK) return res;
+
+    res = request->update(ANDROID_CONTROL_AWB_REGIONS,
+            reqMeteringAreas, reqMeteringAreasSize);
+    if (res != OK) return res;
+    delete[] reqMeteringAreas;
+
+    // Need to convert zoom index into a crop rectangle. The rectangle is
+    // chosen to maximize its area on the sensor
+
+    camera_metadata_ro_entry_t maxDigitalZoom =
+            staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM);
+    float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) /
+            (NUM_ZOOM_STEPS-1);
+    float zoomRatio = 1 + zoomIncrement * zoom;
+
+    float zoomLeft, zoomTop, zoomWidth, zoomHeight;
+    if (previewWidth >= previewHeight) {
+        zoomWidth =  fastInfo.arrayWidth / zoomRatio;
+        zoomHeight = zoomWidth *
+                previewHeight / previewWidth;
+    } else {
+        zoomHeight = fastInfo.arrayHeight / zoomRatio;
+        zoomWidth = zoomHeight *
+                previewWidth / previewHeight;
+    }
+    zoomLeft = (fastInfo.arrayWidth - zoomWidth) / 2;
+    zoomTop = (fastInfo.arrayHeight - zoomHeight) / 2;
+
+    int32_t reqCropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
+    res = request->update(ANDROID_SCALER_CROP_REGION,
+            reqCropRegion, 3);
+    if (res != OK) return res;
+
+    // TODO: Decide how to map recordingHint, or whether just to ignore it
+
+    uint8_t reqVstabMode = videoStabilization ?
+            ANDROID_CONTROL_VIDEO_STABILIZATION_ON :
+            ANDROID_CONTROL_VIDEO_STABILIZATION_OFF;
+    res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+            &reqVstabMode, 1);
+    if (res != OK) return res;
+
+    uint8_t reqFaceDetectMode = enableFaceDetect ?
+            fastInfo.bestFaceDetectMode :
+            (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF;
+    res = request->update(ANDROID_STATS_FACE_DETECT_MODE,
+            &reqFaceDetectMode, 1);
+    if (res != OK) return res;
+
+    return OK;
+}
+
 const char* Parameters::getStateName(State state) {
 #define CASE_ENUM_TO_CHAR(x) case x: return(#x); break;
     switch(state) {
index 817d001..e71d086 100644 (file)
 namespace android {
 namespace camera2 {
 
-// Current camera state; this is the full state of the Camera under the old
-// camera API (contents of the CameraParameters object in a more-efficient
-// format, plus other state). The enum values are mostly based off the
-// corresponding camera2 enums, not the camera1 strings. A few are defined here
-// if they don't cleanly map to camera2 values.
+/**
+ * Current camera state; this is the full state of the Camera under the old
+ * camera API (contents of the CameraParameters object in a more-efficient
+ * format, plus other state). The enum values are mostly based off the
+ * corresponding camera2 enums, not the camera1 strings. A few are defined here
+ * if they don't cleanly map to camera2 values.
+ */
 struct Parameters {
+    /**
+     * Parameters and other state
+     */
     int cameraId;
     int cameraFacing;
 
@@ -117,9 +122,13 @@ struct Parameters {
     int currentAfTriggerId;
     bool afInMotion;
 
+    int precaptureTriggerCounter;
+
     uint32_t previewCallbackFlags;
     bool previewCallbackOneShot;
 
+    bool zslMode;
+
     // Overall camera state
     enum State {
         DISCONNECTED,
@@ -149,7 +158,9 @@ struct Parameters {
         int32_t maxFaces;
     } fastInfo;
 
-    // Parameter manipulation and setup methods
+    /**
+     * Parameter manipulation and setup methods
+     */
 
     Parameters(int cameraId, int cameraFacing);
     ~Parameters();
@@ -170,6 +181,9 @@ struct Parameters {
     // Validate and update camera parameters based on new settings
     status_t set(const String8 &params);
 
+    // Update passed-in request for common parameters
+    status_t updateRequest(CameraMetadata *request) const;
+
     // Static methods for debugging and converting between camera1 and camera2
     // parameters
 
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/camera2/ZslProcessor.cpp
new file mode 100644 (file)
index 0000000..a39585e
--- /dev/null
@@ -0,0 +1,378 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera2Client::ZslProcessor"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+//#define LOG_NNDEBUG 0
+
+#ifdef LOG_NNDEBUG
+#define ALOGVV(...) ALOGV(__VA_ARGS__)
+#else
+#define ALOGVV(...) ((void)0)
+#endif
+
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include "ZslProcessor.h"
+#include <gui/SurfaceTextureClient.h>
+#include "../Camera2Device.h"
+#include "../Camera2Client.h"
+
+
+namespace android {
+namespace camera2 {
+
+ZslProcessor::ZslProcessor(
+    wp<Camera2Client> client,
+    wp<CaptureSequencer> sequencer):
+        Thread(false),
+        mState(RUNNING),
+        mClient(client),
+        mSequencer(sequencer),
+        mZslBufferAvailable(false),
+        mZslStreamId(NO_STREAM),
+        mZslReprocessStreamId(NO_STREAM),
+        mFrameListHead(0),
+        mZslQueueHead(0),
+        mZslQueueTail(0) {
+    mZslQueue.insertAt(0, kZslBufferDepth);
+    mFrameList.insertAt(0, kFrameListDepth);
+    sp<CaptureSequencer> captureSequencer = mSequencer.promote();
+    if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
+}
+
+ZslProcessor::~ZslProcessor() {
+    ALOGV("%s: Exit", __FUNCTION__);
+}
+
+void ZslProcessor::onFrameAvailable() {
+    Mutex::Autolock l(mInputMutex);
+    if (!mZslBufferAvailable) {
+        mZslBufferAvailable = true;
+        mZslBufferAvailableSignal.signal();
+    }
+}
+
+void ZslProcessor::onFrameAvailable(int32_t frameId, CameraMetadata &frame) {
+    Mutex::Autolock l(mInputMutex);
+    camera_metadata_entry_t entry;
+    entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+    nsecs_t timestamp = entry.data.i64[0];
+    ALOGVV("Got preview frame for timestamp %lld", timestamp);
+
+    if (mState != RUNNING) return;
+
+    mFrameList.editItemAt(mFrameListHead).acquire(frame);
+    mFrameListHead = (mFrameListHead + 1) % kFrameListDepth;
+
+    findMatchesLocked();
+}
+
+void ZslProcessor::onBufferReleased(buffer_handle_t *handle) {
+    Mutex::Autolock l(mInputMutex);
+
+    buffer_handle_t *expectedHandle =
+            &(mZslQueue[mZslQueueTail].buffer.mGraphicBuffer->handle);
+
+    if (handle != expectedHandle) {
+        ALOGE("%s: Expected buffer %p, got buffer %p",
+                __FUNCTION__, expectedHandle, handle);
+    }
+
+    mState = RUNNING;
+}
+
+status_t ZslProcessor::updateStream(const Parameters &params) {
+    ATRACE_CALL();
+    ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
+    status_t res;
+
+    Mutex::Autolock l(mInputMutex);
+
+    sp<Camera2Client> client = mClient.promote();
+    if (client == 0) return OK;
+    sp<Camera2Device> device = client->getCameraDevice();
+
+    if (mZslConsumer == 0) {
+        // Create CPU buffer queue endpoint
+        mZslConsumer = new BufferItemConsumer(
+            GRALLOC_USAGE_HW_CAMERA_ZSL,
+            kZslBufferDepth,
+            true);
+        mZslConsumer->setFrameAvailableListener(this);
+        mZslConsumer->setName(String8("Camera2Client::ZslConsumer"));
+        mZslWindow = new SurfaceTextureClient(
+            mZslConsumer->getProducerInterface());
+    }
+
+    if (mZslStreamId != NO_STREAM) {
+        // Check if stream parameters have to change
+        uint32_t currentWidth, currentHeight;
+        res = device->getStreamInfo(mZslStreamId,
+                &currentWidth, &currentHeight, 0);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Error querying capture output stream info: "
+                    "%s (%d)", __FUNCTION__,
+                    client->getCameraId(), strerror(-res), res);
+            return res;
+        }
+        if (currentWidth != (uint32_t)params.pictureWidth ||
+                currentHeight != (uint32_t)params.pictureHeight) {
+            res = device->deleteStream(mZslReprocessStreamId);
+            if (res != OK) {
+                ALOGE("%s: Camera %d: Unable to delete old reprocess stream "
+                        "for ZSL: %s (%d)", __FUNCTION__,
+                        client->getCameraId(), strerror(-res), res);
+                return res;
+            }
+            res = device->deleteStream(mZslStreamId);
+            if (res != OK) {
+                ALOGE("%s: Camera %d: Unable to delete old output stream "
+                        "for ZSL: %s (%d)", __FUNCTION__,
+                        client->getCameraId(), strerror(-res), res);
+                return res;
+            }
+            mZslStreamId = NO_STREAM;
+        }
+    }
+
+    if (mZslStreamId == NO_STREAM) {
+        // Create stream for HAL production
+        res = device->createStream(mZslWindow,
+                params.pictureWidth, params.pictureHeight,
+                HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 0,
+                &mZslStreamId);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Can't create output stream for ZSL: "
+                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    strerror(-res), res);
+            return res;
+        }
+        res = device->createReprocessStreamFromStream(mZslStreamId,
+                &mZslReprocessStreamId);
+        if (res != OK) {
+            ALOGE("%s: Camera %d: Can't create reprocess stream for ZSL: "
+                    "%s (%d)", __FUNCTION__, client->getCameraId(),
+                    strerror(-res), res);
+            return res;
+        }
+    }
+    client->registerFrameListener(Camera2Client::kPreviewRequestId, this);
+
+    return OK;
+}
+
+status_t ZslProcessor::deleteStream() {
+    ATRACE_CALL();
+    status_t res;
+
+    Mutex::Autolock l(mInputMutex);
+
+    if (mZslStreamId != NO_STREAM) {
+        sp<Camera2Client> client = mClient.promote();
+        if (client == 0) return OK;
+        sp<Camera2Device> device = client->getCameraDevice();
+
+        device->deleteStream(mZslReprocessStreamId);
+        mZslReprocessStreamId = NO_STREAM;
+        device->deleteStream(mZslStreamId);
+        mZslStreamId = NO_STREAM;
+    }
+    return OK;
+}
+
+int ZslProcessor::getStreamId() const {
+    Mutex::Autolock l(mInputMutex);
+    return mZslStreamId;
+}
+
+int ZslProcessor::getReprocessStreamId() const {
+    Mutex::Autolock l(mInputMutex);
+    return mZslReprocessStreamId;
+}
+
+status_t ZslProcessor::pushToReprocess(int32_t requestId) {
+    ALOGV("%s: Send in reprocess request with id %d",
+            __FUNCTION__, requestId);
+    Mutex::Autolock l(mInputMutex);
+    status_t res;
+    sp<Camera2Client> client = mClient.promote();
+
+    if (client == 0) return false;
+
+    if (mZslQueueTail != mZslQueueHead) {
+        buffer_handle_t *handle =
+            &(mZslQueue[mZslQueueTail].buffer.mGraphicBuffer->handle);
+        CameraMetadata request = mZslQueue[mZslQueueTail].frame;
+        uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
+        res = request.update(ANDROID_REQUEST_TYPE,
+                &requestType, 1);
+        uint8_t inputStreams[1] = { mZslReprocessStreamId };
+        if (res == OK) request.update(ANDROID_REQUEST_INPUT_STREAMS,
+                inputStreams, 1);
+        uint8_t outputStreams[1] = { client->getCaptureStreamId() };
+        if (res == OK) request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
+                outputStreams, 1);
+        res = request.update(ANDROID_REQUEST_ID,
+                &requestId, 1);
+
+        if (res != OK ) {
+            ALOGE("%s: Unable to update frame to a reprocess request", __FUNCTION__);
+            return INVALID_OPERATION;
+        }
+
+        res = client->getCameraDevice()->pushReprocessBuffer(mZslReprocessStreamId,
+                handle, this);
+        if (res != OK) {
+            ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+
+        res = client->getCameraDevice()->capture(request);
+        if (res != OK ) {
+            ALOGE("%s: Unable to send ZSL reprocess request to capture: %s (%d)",
+                    __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+
+        mState = LOCKED;
+    } else {
+        ALOGE("%s: Nothing to push", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    return OK;
+}
+
+void ZslProcessor::dump(int fd, const Vector<String16>& args) const {
+}
+
+bool ZslProcessor::threadLoop() {
+    status_t res;
+
+    {
+        Mutex::Autolock l(mInputMutex);
+        while (!mZslBufferAvailable) {
+            res = mZslBufferAvailableSignal.waitRelative(mInputMutex,
+                    kWaitDuration);
+            if (res == TIMED_OUT) return true;
+        }
+        mZslBufferAvailable = false;
+    }
+
+    do {
+        sp<Camera2Client> client = mClient.promote();
+        if (client == 0) return false;
+        res = processNewZslBuffer(client);
+    } while (res == OK);
+
+    return true;
+}
+
+status_t ZslProcessor::processNewZslBuffer(sp<Camera2Client> &client) {
+    ATRACE_CALL();
+    status_t res;
+    Mutex::Autolock l(mInputMutex);
+
+    if (mState == LOCKED) {
+        BufferItemConsumer::BufferItem item;
+        res = mZslConsumer->acquireBuffer(&item);
+        if (res != OK) {
+            if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
+                ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
+                        "%s (%d)", __FUNCTION__,
+                        client->getCameraId(), strerror(-res), res);
+            }
+            return res;
+        }
+        mZslConsumer->releaseBuffer(item);
+        return OK;
+    }
+
+    ALOGVV("Got ZSL buffer: head: %d, tail: %d", mZslQueueHead, mZslQueueTail);
+
+    if ( (mZslQueueHead + 1) % kZslBufferDepth == mZslQueueTail) {
+        mZslConsumer->releaseBuffer(mZslQueue[mZslQueueTail].buffer);
+        mZslQueue.replaceAt(mZslQueueTail);
+        mZslQueueTail = (mZslQueueTail + 1) % kZslBufferDepth;
+    }
+
+    ZslPair &queueHead = mZslQueue.editItemAt(mZslQueueHead);
+
+    res = mZslConsumer->acquireBuffer(&(queueHead.buffer));
+    if (res != OK) {
+        if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
+            ALOGE("%s: Camera %d: Error receiving ZSL image buffer: "
+                    "%s (%d)", __FUNCTION__,
+                    client->getCameraId(), strerror(-res), res);
+        }
+        return res;
+    }
+    queueHead.frame.release();
+
+    mZslQueueHead = (mZslQueueHead + 1) % kZslBufferDepth;
+
+    ALOGVV("  Added buffer, timestamp %lld", queueHead.buffer.mTimestamp);
+
+    findMatchesLocked();
+
+    return OK;
+}
+
+void ZslProcessor::findMatchesLocked() {
+    for (size_t i = 0; i < mZslQueue.size(); i++) {
+        ZslPair &queueEntry = mZslQueue.editItemAt(i);
+        nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
+        if (queueEntry.frame.isEmpty() && bufferTimestamp != 0) {
+            // Have buffer, no matching frame. Look for one
+            for (size_t j = 0; j < mFrameList.size(); j++) {
+                bool match = false;
+                CameraMetadata &frame = mFrameList.editItemAt(j);
+                if (!frame.isEmpty()) {
+                    camera_metadata_entry_t entry;
+                    entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
+                    if (entry.count == 0) {
+                        ALOGE("%s: Can't find timestamp in frame!",
+                                __FUNCTION__);
+                        continue;
+                    }
+                    nsecs_t frameTimestamp = entry.data.i64[0];
+                    if (bufferTimestamp == frameTimestamp) {
+                        ALOGVV("%s: Found match %lld", __FUNCTION__,
+                                frameTimestamp);
+                        match = true;
+                    } else {
+                        int64_t delta = abs(bufferTimestamp - frameTimestamp);
+                        if ( delta < 1000000) {
+                            ALOGVV("%s: Found close match %lld (delta %lld)",
+                                    __FUNCTION__, bufferTimestamp, delta);
+                            match = true;
+                        }
+                    }
+                }
+                if (match) {
+                    queueEntry.frame.acquire(frame);
+                    break;
+                }
+            }
+        }
+    }
+}
+
+}; // namespace camera2
+}; // namespace android
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/camera2/ZslProcessor.h
new file mode 100644 (file)
index 0000000..74921a3
--- /dev/null
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H
+#define ANDROID_SERVERS_CAMERA_CAMERA2_ZSLPROCESSOR_H
+
+#include <utils/Thread.h>
+#include <utils/String16.h>
+#include <utils/Vector.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+#include <gui/BufferItemConsumer.h>
+#include "Parameters.h"
+#include "FrameProcessor.h"
+#include "CameraMetadata.h"
+#include "Camera2Heap.h"
+#include "../Camera2Device.h"
+
+namespace android {
+
+class Camera2Client;
+
+namespace camera2 {
+
+class CaptureSequencer;
+
+/***
+ * ZSL queue processing
+ */
+class ZslProcessor:
+            virtual public Thread,
+            virtual public BufferItemConsumer::FrameAvailableListener,
+            virtual public FrameProcessor::FilteredListener,
+            virtual public Camera2Device::BufferReleasedListener {
+  public:
+    ZslProcessor(wp<Camera2Client> client, wp<CaptureSequencer> sequencer);
+    ~ZslProcessor();
+
+    // From mZslConsumer
+    virtual void onFrameAvailable();
+    // From FrameProcessor
+    virtual void onFrameAvailable(int32_t frameId, CameraMetadata &frame);
+
+    virtual void onBufferReleased(buffer_handle_t *handle);
+
+    status_t updateStream(const Parameters &params);
+    status_t deleteStream();
+    int getStreamId() const;
+    int getReprocessStreamId() const;
+
+    status_t pushToReprocess(int32_t requestId);
+
+    void dump(int fd, const Vector<String16>& args) const;
+  private:
+    static const nsecs_t kWaitDuration = 10000000; // 10 ms
+
+    enum {
+        RUNNING,
+        LOCKED
+    } mState;
+
+    wp<Camera2Client> mClient;
+    wp<CaptureSequencer> mSequencer;
+
+    mutable Mutex mInputMutex;
+    bool mZslBufferAvailable;
+    Condition mZslBufferAvailableSignal;
+
+    enum {
+        NO_STREAM = -1
+    };
+
+    int mZslStreamId;
+    int mZslReprocessStreamId;
+    sp<BufferItemConsumer> mZslConsumer;
+    sp<ANativeWindow>      mZslWindow;
+
+    struct ZslPair {
+        BufferItemConsumer::BufferItem buffer;
+        CameraMetadata frame;
+    };
+
+    static const size_t kZslBufferDepth = 3;
+    static const size_t kFrameListDepth = kZslBufferDepth * 2;
+    Vector<CameraMetadata> mFrameList;
+    size_t mFrameListHead;
+
+    ZslPair mNextPair;
+
+    Vector<ZslPair> mZslQueue;
+    size_t mZslQueueHead;
+    size_t mZslQueueTail;
+
+    virtual bool threadLoop();
+
+    status_t processNewZslBuffer(sp<Camera2Client> &client);
+
+    // Match up entries from frame list to buffers in ZSL queue
+    void findMatchesLocked();
+};
+
+
+}; //namespace camera2
+}; //namespace android
+
+#endif