Camera: Support BufferQueue between Camera and StageFright

Use a BufferQueue between Camera and StageFright to pass video
buffers for Camera HALv3 devices.

CameraSource in StageFright will try to use "buffer queue" mode
if it is supported by the camera device. In "buffer queue" mode,
CameraSource creates a buffer queue and a listener thread to recieve
video buffers from camera device. CameraSource then wraps the
ANWBuffer in MediaBuffer. If the camera device doesn't support
"buffer queue" mode, it falls back to "metadata in video buffer"
mode or "real YUV data" mode.

"Metadata in video buffer" mode is removed from Camera2Client and
only "buffer queue" mode is supported.

Bug: 24511454

Change-Id: Ice833b57bcd8d91852d6415402013f56f3e3970a
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index c5fe69f..18b97a3 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -293,7 +293,7 @@
         virtual status_t      startPreview() = 0;
         virtual void          stopPreview() = 0;
         virtual bool          previewEnabled() = 0;
-        virtual status_t      storeMetaDataInBuffers(bool enabled) = 0;
+        virtual status_t      setVideoBufferMode(int32_t videoBufferMode) = 0;
         virtual status_t      startRecording() = 0;
         virtual void          stopRecording() = 0;
         virtual bool          recordingEnabled() = 0;
@@ -304,6 +304,7 @@
         virtual status_t      setParameters(const String8& params) = 0;
         virtual String8       getParameters() const = 0;
         virtual status_t      sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0;
+        virtual status_t      setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) = 0;
 
         // Interface used by CameraService
         Client(const sp<CameraService>& cameraService,
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 175920f..6722512 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -93,7 +93,6 @@
     mStreamingProcessor = new StreamingProcessor(this);
     threadName = String8::format("C2-%d-StreamProc",
             mCameraId);
-    mStreamingProcessor->run(threadName.string());
 
     mFrameProcessor = new FrameProcessor(mDevice, this);
     threadName = String8::format("C2-%d-FrameProc",
@@ -390,7 +389,6 @@
         l.mParameters.state = Parameters::DISCONNECTED;
     }
 
-    mStreamingProcessor->requestExit();
     mFrameProcessor->requestExit();
     mCaptureSequencer->requestExit();
     mJpegProcessor->requestExit();
@@ -404,7 +402,6 @@
         // complete callbacks that re-enter Camera2Client
         mBinderSerializationLock.unlock();
 
-        mStreamingProcessor->join();
         mFrameProcessor->join();
         mCaptureSequencer->join();
         mJpegProcessor->join();
@@ -944,7 +941,7 @@
     return l.mParameters.state == Parameters::PREVIEW;
 }
 
-status_t Camera2Client::storeMetaDataInBuffers(bool enabled) {
+status_t Camera2Client::setVideoBufferMode(int32_t videoBufferMode) {
     ATRACE_CALL();
     Mutex::Autolock icl(mBinderSerializationLock);
     status_t res;
@@ -963,7 +960,12 @@
             break;
     }
 
-    l.mParameters.storeMetadataInBuffers = enabled;
+    if (videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
+        ALOGE("%s: %d: Only video buffer queue is supported", __FUNCTION__, __LINE__);
+        return BAD_VALUE;
+    }
+
+    l.mParameters.videoBufferMode = videoBufferMode;
 
     return OK;
 }
@@ -1009,10 +1011,14 @@
             return INVALID_OPERATION;
     };
 
-    if (!params.storeMetadataInBuffers) {
-        ALOGE("%s: Camera %d: Recording only supported in metadata mode, but "
-                "non-metadata recording mode requested!", __FUNCTION__,
-                mCameraId);
+    if (params.videoBufferMode != VIDEO_BUFFER_MODE_BUFFER_QUEUE) {
+        ALOGE("%s: Camera %d: Recording only supported buffer queue mode, but "
+                "mode %d is requested!", __FUNCTION__, mCameraId, params.videoBufferMode);
+        return INVALID_OPERATION;
+    }
+
+    if (!mStreamingProcessor->haveValidRecordingWindow()) {
+        ALOGE("%s: No valid recording window", __FUNCTION__);
         return INVALID_OPERATION;
     }
 
@@ -1176,28 +1182,28 @@
 
     mCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
 
-    // Remove recording stream to prevent it from slowing down takePicture later
-    if (!l.mParameters.recordingHint && l.mParameters.isJpegSizeOverridden()) {
-        res = stopStream();
-        if (res != OK) {
-            ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
-                    __FUNCTION__, mCameraId, strerror(-res), res);
-        }
-        res = mDevice->waitUntilDrained();
-        if (res != OK) {
-            ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
-                    __FUNCTION__, mCameraId, strerror(-res), res);
-        }
-        // Clean up recording stream
-        res = mStreamingProcessor->deleteRecordingStream();
-        if (res != OK) {
-            ALOGE("%s: Camera %d: Unable to delete recording stream before "
-                    "stop preview: %s (%d)",
-                    __FUNCTION__, mCameraId, strerror(-res), res);
-        }
-        l.mParameters.recoverOverriddenJpegSize();
+    // Remove recording stream because the video target may be abandoned soon.
+    res = stopStream();
+    if (res != OK) {
+        ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)",
+                __FUNCTION__, mCameraId, strerror(-res), res);
     }
 
+    res = mDevice->waitUntilDrained();
+    if (res != OK) {
+        ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)",
+                __FUNCTION__, mCameraId, strerror(-res), res);
+    }
+    // Clean up recording stream
+    res = mStreamingProcessor->deleteRecordingStream();
+    if (res != OK) {
+        ALOGE("%s: Camera %d: Unable to delete recording stream before "
+                "stop preview: %s (%d)",
+                __FUNCTION__, mCameraId, strerror(-res), res);
+    }
+    l.mParameters.recoverOverriddenJpegSize();
+
+    // Restart preview
     res = startPreviewL(l.mParameters, true);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to return to preview",
@@ -1224,10 +1230,7 @@
 
 void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) {
     ATRACE_CALL();
-    Mutex::Autolock icl(mBinderSerializationLock);
-    if ( checkPid(__FUNCTION__) != OK) return;
-
-    mStreamingProcessor->releaseRecordingFrame(mem);
+    ALOGW("%s: Not supported in buffer queue mode.", __FUNCTION__);
 }
 
 status_t Camera2Client::autoFocus() {
@@ -1529,10 +1532,10 @@
         case CAMERA_CMD_PING:
             return commandPingL();
         case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
-            return commandSetVideoBufferCountL(arg1);
         case CAMERA_CMD_SET_VIDEO_FORMAT:
-            return commandSetVideoFormatL(arg1,
-                    static_cast<android_dataspace>(arg2));
+            ALOGE("%s: command %d (arguments %d, %d) is not supported.",
+                    __FUNCTION__, cmd, arg1, arg2);
+            return BAD_VALUE;
         default:
             ALOGE("%s: Unknown command %d (arguments %d, %d)",
                     __FUNCTION__, cmd, arg1, arg2);
@@ -1674,27 +1677,6 @@
     }
 }
 
-status_t Camera2Client::commandSetVideoBufferCountL(size_t count) {
-    if (recordingEnabledL()) {
-        ALOGE("%s: Camera %d: Error setting video buffer count after "
-                "recording was started", __FUNCTION__, mCameraId);
-        return INVALID_OPERATION;
-    }
-
-    return mStreamingProcessor->setRecordingBufferCount(count);
-}
-
-status_t Camera2Client::commandSetVideoFormatL(int format,
-        android_dataspace dataspace) {
-    if (recordingEnabledL()) {
-        ALOGE("%s: Camera %d: Error setting video format after "
-                "recording was started", __FUNCTION__, mCameraId);
-        return INVALID_OPERATION;
-    }
-
-    return mStreamingProcessor->setRecordingFormat(format, dataspace);
-}
-
 void Camera2Client::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
         const CaptureResultExtras& resultExtras) {
     int32_t err = CAMERA_ERROR_UNKNOWN;
@@ -2118,6 +2100,84 @@
     return res;
 }
 
+status_t Camera2Client::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
+    ATRACE_CALL();
+    ALOGV("%s: E", __FUNCTION__);
+    Mutex::Autolock icl(mBinderSerializationLock);
+    status_t res;
+    if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
+
+    sp<IBinder> binder = IInterface::asBinder(bufferProducer);
+    if (binder == mVideoSurface) {
+        ALOGV("%s: Camera %d: New video window is same as old video window",
+                __FUNCTION__, mCameraId);
+        return NO_ERROR;
+    }
+
+    sp<Surface> window;
+    int format;
+    android_dataspace dataSpace;
+
+    if (bufferProducer != nullptr) {
+        // Using controlledByApp flag to ensure that the buffer queue remains in
+        // async mode for the old camera API, where many applications depend
+        // on that behavior.
+        window = new Surface(bufferProducer, /*controlledByApp*/ true);
+
+        ANativeWindow *anw = window.get();
+
+        if ((res = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+            ALOGE("%s: Failed to query Surface format", __FUNCTION__);
+            return res;
+        }
+
+        if ((res = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
+                                reinterpret_cast<int*>(&dataSpace))) != OK) {
+            ALOGE("%s: Failed to query Surface dataSpace", __FUNCTION__);
+            return res;
+        }
+    }
+
+    Parameters::State state;
+    {
+        SharedParameters::Lock l(mParameters);
+        state = l.mParameters.state;
+    }
+
+    switch (state) {
+        case Parameters::STOPPED:
+        case Parameters::WAITING_FOR_PREVIEW_WINDOW:
+        case Parameters::PREVIEW:
+            // OK
+            break;
+        case Parameters::DISCONNECTED:
+        case Parameters::RECORD:
+        case Parameters::STILL_CAPTURE:
+        case Parameters::VIDEO_SNAPSHOT:
+        default:
+            ALOGE("%s: Camera %d: Cannot set video target while in state %s",
+                    __FUNCTION__, mCameraId,
+                    Parameters::getStateName(state));
+            return INVALID_OPERATION;
+    }
+
+    mVideoSurface = binder;
+    res = mStreamingProcessor->setRecordingWindow(window);
+    if (res != OK) {
+        ALOGE("%s: Unable to set new recording window: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return res;
+    }
+
+    {
+        SharedParameters::Lock l(mParameters);
+        l.mParameters.videoFormat = format;
+        l.mParameters.videoDataSpace = dataSpace;
+    }
+
+    return OK;
+}
+
 const char* Camera2Client::kAutofocusLabel = "autofocus";
 const char* Camera2Client::kTakepictureLabel = "take_picture";
 
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index e1e18c9..428dca1 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -66,7 +66,7 @@
     virtual status_t        startPreview();
     virtual void            stopPreview();
     virtual bool            previewEnabled();
-    virtual status_t        storeMetaDataInBuffers(bool enabled);
+    virtual status_t        setVideoBufferMode(int32_t videoBufferMode);
     virtual status_t        startRecording();
     virtual void            stopRecording();
     virtual bool            recordingEnabled();
@@ -79,6 +79,7 @@
     virtual status_t        sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
     virtual void            notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
                                         const CaptureResultExtras& resultExtras);
+    virtual status_t        setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
 
     /**
      * Interface used by CameraService
@@ -194,6 +195,7 @@
     /* Preview/Recording related members */
 
     sp<IBinder> mPreviewSurface;
+    sp<IBinder> mVideoSurface;
     sp<camera2::StreamingProcessor> mStreamingProcessor;
 
     /** Preview callback related members */
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index 30b462b..ced9d8c 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -482,14 +482,24 @@
     mHardware->releaseRecordingFrame(mem);
 }
 
-status_t CameraClient::storeMetaDataInBuffers(bool enabled)
-{
-    LOG1("storeMetaDataInBuffers: %s", enabled? "true": "false");
+status_t CameraClient::setVideoBufferMode(int32_t videoBufferMode) {
+    LOG1("setVideoBufferMode: %d", videoBufferMode);
+    bool enableMetadataInBuffers = false;
+
+    if (videoBufferMode == VIDEO_BUFFER_MODE_DATA_CALLBACK_METADATA) {
+        enableMetadataInBuffers = true;
+    } else if (videoBufferMode != VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV) {
+        ALOGE("%s: %d: videoBufferMode %d is not supported.", __FUNCTION__, __LINE__,
+                videoBufferMode);
+        return BAD_VALUE;
+    }
+
     Mutex::Autolock lock(mLock);
     if (checkPidAndHardware() != NO_ERROR) {
         return UNKNOWN_ERROR;
     }
-    return mHardware->storeMetaDataInBuffers(enabled);
+
+    return mHardware->storeMetaDataInBuffers(enableMetadataInBuffers);
 }
 
 bool CameraClient::previewEnabled() {
@@ -991,4 +1001,9 @@
     return -1;
 }
 
+status_t CameraClient::setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer) {
+    ALOGE("%s: %d: CameraClient doesn't support setting a video target.", __FUNCTION__, __LINE__);
+    return INVALID_OPERATION;
+}
+
 }; // namespace android
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
index 95616b2..66e57d5 100644
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ b/services/camera/libcameraservice/api1/CameraClient.h
@@ -44,7 +44,7 @@
     virtual status_t        startPreview();
     virtual void            stopPreview();
     virtual bool            previewEnabled();
-    virtual status_t        storeMetaDataInBuffers(bool enabled);
+    virtual status_t        setVideoBufferMode(int32_t videoBufferMode);
     virtual status_t        startRecording();
     virtual void            stopRecording();
     virtual bool            recordingEnabled();
@@ -55,6 +55,7 @@
     virtual status_t        setParameters(const String8& params);
     virtual String8         getParameters() const;
     virtual status_t        sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+    virtual status_t        setVideoTarget(const sp<IGraphicBufferProducer>& bufferProducer);
 
     // Interface used by CameraService
     CameraClient(const sp<CameraService>& cameraService,
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index f901dda..7a97396 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -30,6 +30,7 @@
 #include "Parameters.h"
 #include "system/camera.h"
 #include "hardware/camera_common.h"
+#include <camera/ICamera.h>
 #include <media/MediaProfiles.h>
 #include <media/mediarecorder.h>
 
@@ -870,8 +871,9 @@
     }
 
     // Set up initial state for non-Camera.Parameters state variables
-
-    storeMetadataInBuffers = true;
+    videoFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    videoDataSpace = HAL_DATASPACE_BT709;
+    videoBufferMode = ICamera::VIDEO_BUFFER_MODE_DATA_CALLBACK_YUV;
     playShutterSound = true;
     enableFaceDetect = false;
 
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index c5bbf63..c437722 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -131,7 +131,8 @@
 
     int zoom;
 
-    int videoWidth, videoHeight;
+    int videoWidth, videoHeight, videoFormat;
+    android_dataspace videoDataSpace;
 
     bool recordingHint;
     bool videoStabilization;
@@ -141,7 +142,8 @@
 
     // These parameters are also part of the camera API-visible state, but not
     // directly listed in Camera.Parameters
-    bool storeMetadataInBuffers;
+    // One of ICamera::VIDEO_BUFFER_MODE_*
+    int32_t videoBufferMode;
     bool playShutterSound;
     bool enableFaceDetect;
 
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 88a0f50..211bdae 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -49,13 +49,7 @@
         mPreviewRequestId(Camera2Client::kPreviewRequestIdStart),
         mPreviewStreamId(NO_STREAM),
         mRecordingRequestId(Camera2Client::kRecordingRequestIdStart),
-        mRecordingStreamId(NO_STREAM),
-        mRecordingFrameAvailable(false),
-        mRecordingHeapCount(kDefaultRecordingHeapCount),
-        mRecordingHeapFree(kDefaultRecordingHeapCount),
-        mRecordingFormat(kDefaultRecordingFormat),
-        mRecordingDataSpace(kDefaultRecordingDataSpace),
-        mRecordingGrallocUsage(kDefaultRecordingGrallocUsage)
+        mRecordingStreamId(NO_STREAM)
 {
 }
 
@@ -78,11 +72,30 @@
     return OK;
 }
 
+status_t StreamingProcessor::setRecordingWindow(sp<Surface> window) {
+    ATRACE_CALL();
+    status_t res;
+
+    res = deleteRecordingStream();
+    if (res != OK) return res;
+
+    Mutex::Autolock m(mMutex);
+
+    mRecordingWindow = window;
+
+    return OK;
+}
+
 bool StreamingProcessor::haveValidPreviewWindow() const {
     Mutex::Autolock m(mMutex);
     return mPreviewWindow != 0;
 }
 
+bool StreamingProcessor::haveValidRecordingWindow() const {
+    Mutex::Autolock m(mMutex);
+    return mRecordingWindow != nullptr;
+}
+
 status_t StreamingProcessor::updatePreviewRequest(const Parameters &params) {
     ATRACE_CALL();
     status_t res;
@@ -244,86 +257,6 @@
     return mPreviewStreamId;
 }
 
-status_t StreamingProcessor::setRecordingBufferCount(size_t count) {
-    ATRACE_CALL();
-    // Make sure we can support this many buffer slots
-    if (count > BufferQueue::NUM_BUFFER_SLOTS) {
-        ALOGE("%s: Camera %d: Too many recording buffers requested: %zu, max %d",
-                __FUNCTION__, mId, count, BufferQueue::NUM_BUFFER_SLOTS);
-        return BAD_VALUE;
-    }
-
-    Mutex::Autolock m(mMutex);
-
-    ALOGV("%s: Camera %d: New recording buffer count from encoder: %zu",
-            __FUNCTION__, mId, count);
-
-    // Need to re-size consumer and heap
-    if (mRecordingHeapCount != count) {
-        ALOGV("%s: Camera %d: Resetting recording heap and consumer",
-            __FUNCTION__, mId);
-
-        if (isStreamActive(mActiveStreamIds, mRecordingStreamId)) {
-            ALOGE("%s: Camera %d: Setting recording buffer count when "
-                    "recording stream is already active!", __FUNCTION__,
-                    mId);
-            return INVALID_OPERATION;
-        }
-
-        releaseAllRecordingFramesLocked();
-
-        if (mRecordingHeap != 0) {
-            mRecordingHeap.clear();
-        }
-        mRecordingHeapCount = count;
-        mRecordingHeapFree = count;
-
-        mRecordingConsumer.clear();
-    }
-
-    return OK;
-}
-
-status_t StreamingProcessor::setRecordingFormat(int format,
-        android_dataspace dataSpace) {
-    ATRACE_CALL();
-
-    Mutex::Autolock m(mMutex);
-
-    ALOGV("%s: Camera %d: New recording format/dataspace from encoder: %X, %X",
-            __FUNCTION__, mId, format, dataSpace);
-
-    mRecordingFormat = format;
-    mRecordingDataSpace = dataSpace;
-    int prevGrallocUsage = mRecordingGrallocUsage;
-    if (mRecordingFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
-        mRecordingGrallocUsage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
-    } else {
-        mRecordingGrallocUsage = GRALLOC_USAGE_SW_READ_OFTEN;
-    }
-
-    ALOGV("%s: Camera %d: New recording gralloc usage: %08X", __FUNCTION__, mId,
-            mRecordingGrallocUsage);
-
-    if (prevGrallocUsage != mRecordingGrallocUsage) {
-        ALOGV("%s: Camera %d: Resetting recording consumer for new usage",
-            __FUNCTION__, mId);
-
-        if (isStreamActive(mActiveStreamIds, mRecordingStreamId)) {
-            ALOGE("%s: Camera %d: Changing recording format when "
-                    "recording stream is already active!", __FUNCTION__,
-                    mId);
-            return INVALID_OPERATION;
-        }
-
-        releaseAllRecordingFramesLocked();
-
-        mRecordingConsumer.clear();
-    }
-
-    return OK;
-}
-
 status_t StreamingProcessor::updateRecordingRequest(const Parameters &params) {
     ATRACE_CALL();
     status_t res;
@@ -395,11 +328,11 @@
         return res;
     }
 
-    if (mRecordingConsumer == 0 ||
+    if (mRecordingWindow == nullptr ||
             currentWidth != (uint32_t)params.videoWidth ||
             currentHeight != (uint32_t)params.videoHeight ||
-            currentFormat != (uint32_t)mRecordingFormat ||
-            currentDataSpace != mRecordingDataSpace) {
+            currentFormat != (uint32_t)params.videoFormat ||
+            currentDataSpace != params.videoDataSpace) {
         *needsUpdate = true;
     }
     *needsUpdate = false;
@@ -417,26 +350,6 @@
         return INVALID_OPERATION;
     }
 
-    bool newConsumer = false;
-    if (mRecordingConsumer == 0) {
-        ALOGV("%s: Camera %d: Creating recording consumer with %zu + 1 "
-                "consumer-side buffers", __FUNCTION__, mId, mRecordingHeapCount);
-        // Create CPU buffer queue endpoint. We need one more buffer here so that we can
-        // always acquire and free a buffer when the heap is full; otherwise the consumer
-        // will have buffers in flight we'll never clear out.
-        sp<IGraphicBufferProducer> producer;
-        sp<IGraphicBufferConsumer> consumer;
-        BufferQueue::createBufferQueue(&producer, &consumer);
-        mRecordingConsumer = new BufferItemConsumer(consumer,
-                mRecordingGrallocUsage,
-                mRecordingHeapCount + 1);
-        mRecordingConsumer->setFrameAvailableListener(this);
-        mRecordingConsumer->setName(String8("Camera2-RecordingConsumer"));
-        mRecordingWindow = new Surface(producer);
-        newConsumer = true;
-        // Allocate memory later, since we don't know buffer size until receipt
-    }
-
     if (mRecordingStreamId != NO_STREAM) {
         // Check if stream parameters have to change
         uint32_t currentWidth, currentHeight;
@@ -453,9 +366,8 @@
         }
         if (currentWidth != (uint32_t)params.videoWidth ||
                 currentHeight != (uint32_t)params.videoHeight ||
-                currentFormat != (uint32_t)mRecordingFormat ||
-                currentDataSpace != mRecordingDataSpace ||
-                newConsumer) {
+                currentFormat != (uint32_t)params.videoFormat ||
+                currentDataSpace != params.videoDataSpace) {
             // TODO: Should wait to be sure previous recording has finished
             res = device->deleteStream(mRecordingStreamId);
 
@@ -475,10 +387,9 @@
     }
 
     if (mRecordingStreamId == NO_STREAM) {
-        mRecordingFrameCount = 0;
         res = device->createStream(mRecordingWindow,
                 params.videoWidth, params.videoHeight,
-                mRecordingFormat, mRecordingDataSpace,
+                params.videoFormat, params.videoDataSpace,
                 CAMERA3_STREAM_ROTATION_0, &mRecordingStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for recording: "
@@ -542,20 +453,6 @@
 
     Mutex::Autolock m(mMutex);
 
-    // If a recording stream is being started up and no recording
-    // stream is active yet, free up any outstanding buffers left
-    // from the previous recording session. There should never be
-    // any, so if there are, warn about it.
-    bool isRecordingStreamIdle = !isStreamActive(mActiveStreamIds, mRecordingStreamId);
-    bool startRecordingStream = isStreamActive(outputStreams, mRecordingStreamId);
-    if (startRecordingStream && isRecordingStreamIdle) {
-        releaseAllRecordingFramesLocked();
-    }
-
-    ALOGV("%s: Camera %d: %s started, recording heap has %zu free of %zu",
-            __FUNCTION__, mId, (type == PREVIEW) ? "preview" : "recording",
-            mRecordingHeapFree, mRecordingHeapCount);
-
     CameraMetadata &request = (type == PREVIEW) ?
             mPreviewRequest : mRecordingRequest;
 
@@ -692,272 +589,6 @@
     return OK;
 }
 
-void StreamingProcessor::onFrameAvailable(const BufferItem& /*item*/) {
-    ATRACE_CALL();
-    Mutex::Autolock l(mMutex);
-    if (!mRecordingFrameAvailable) {
-        mRecordingFrameAvailable = true;
-        mRecordingFrameAvailableSignal.signal();
-    }
-
-}
-
-bool StreamingProcessor::threadLoop() {
-    status_t res;
-
-    {
-        Mutex::Autolock l(mMutex);
-        while (!mRecordingFrameAvailable) {
-            res = mRecordingFrameAvailableSignal.waitRelative(
-                mMutex, kWaitDuration);
-            if (res == TIMED_OUT) return true;
-        }
-        mRecordingFrameAvailable = false;
-    }
-
-    do {
-        res = processRecordingFrame();
-    } while (res == OK);
-
-    return true;
-}
-
-status_t StreamingProcessor::processRecordingFrame() {
-    ATRACE_CALL();
-    status_t res;
-    sp<Camera2Heap> recordingHeap;
-    size_t heapIdx = 0;
-    nsecs_t timestamp;
-
-    sp<Camera2Client> client = mClient.promote();
-    if (client == 0) {
-        // Discard frames during shutdown
-        BufferItem imgBuffer;
-        res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0);
-        if (res != OK) {
-            if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
-                ALOGE("%s: Camera %d: Can't acquire recording buffer: %s (%d)",
-                        __FUNCTION__, mId, strerror(-res), res);
-            }
-            return res;
-        }
-        mRecordingConsumer->releaseBuffer(imgBuffer);
-        return OK;
-    }
-
-    {
-        /* acquire SharedParameters before mMutex so we don't dead lock
-            with Camera2Client code calling into StreamingProcessor */
-        SharedParameters::Lock l(client->getParameters());
-        Mutex::Autolock m(mMutex);
-        BufferItem imgBuffer;
-        res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0);
-        if (res != OK) {
-            if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) {
-                ALOGE("%s: Camera %d: Can't acquire recording buffer: %s (%d)",
-                        __FUNCTION__, mId, strerror(-res), res);
-            }
-            return res;
-        }
-        timestamp = imgBuffer.mTimestamp;
-
-        mRecordingFrameCount++;
-        ALOGVV("OnRecordingFrame: Frame %d", mRecordingFrameCount);
-
-        if (l.mParameters.state != Parameters::RECORD &&
-                l.mParameters.state != Parameters::VIDEO_SNAPSHOT) {
-            ALOGV("%s: Camera %d: Discarding recording image buffers "
-                    "received after recording done", __FUNCTION__,
-                    mId);
-            mRecordingConsumer->releaseBuffer(imgBuffer);
-            return INVALID_OPERATION;
-        }
-
-        if (mRecordingHeap == 0) {
-            size_t payloadSize = sizeof(VideoNativeMetadata);
-            ALOGV("%s: Camera %d: Creating recording heap with %zu buffers of "
-                    "size %zu bytes", __FUNCTION__, mId,
-                    mRecordingHeapCount, payloadSize);
-
-            mRecordingHeap = new Camera2Heap(payloadSize, mRecordingHeapCount,
-                    "Camera2Client::RecordingHeap");
-            if (mRecordingHeap->mHeap->getSize() == 0) {
-                ALOGE("%s: Camera %d: Unable to allocate memory for recording",
-                        __FUNCTION__, mId);
-                mRecordingConsumer->releaseBuffer(imgBuffer);
-                return NO_MEMORY;
-            }
-            for (size_t i = 0; i < mRecordingBuffers.size(); i++) {
-                if (mRecordingBuffers[i].mSlot !=
-                        BufferItemConsumer::INVALID_BUFFER_SLOT) {
-                    ALOGE("%s: Camera %d: Non-empty recording buffers list!",
-                            __FUNCTION__, mId);
-                }
-            }
-            mRecordingBuffers.clear();
-            mRecordingBuffers.setCapacity(mRecordingHeapCount);
-            mRecordingBuffers.insertAt(0, mRecordingHeapCount);
-
-            mRecordingHeapHead = 0;
-            mRecordingHeapFree = mRecordingHeapCount;
-        }
-
-        if (mRecordingHeapFree == 0) {
-            ALOGE("%s: Camera %d: No free recording buffers, dropping frame",
-                    __FUNCTION__, mId);
-            mRecordingConsumer->releaseBuffer(imgBuffer);
-            return NO_MEMORY;
-        }
-
-        heapIdx = mRecordingHeapHead;
-        mRecordingHeapHead = (mRecordingHeapHead + 1) % mRecordingHeapCount;
-        mRecordingHeapFree--;
-
-        ALOGVV("%s: Camera %d: Timestamp %lld",
-                __FUNCTION__, mId, timestamp);
-
-        ssize_t offset;
-        size_t size;
-        sp<IMemoryHeap> heap =
-                mRecordingHeap->mBuffers[heapIdx]->getMemory(&offset,
-                        &size);
-
-        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
-            (uint8_t*)heap->getBase() + offset);
-        payload->eType = kMetadataBufferTypeANWBuffer;
-        payload->pBuffer = imgBuffer.mGraphicBuffer->getNativeBuffer();
-        payload->nFenceFd = -1;
-
-        ALOGVV("%s: Camera %d: Sending out ANWBuffer %p",
-                __FUNCTION__, mId, payload->pBuffer);
-
-        mRecordingBuffers.replaceAt(imgBuffer, heapIdx);
-        recordingHeap = mRecordingHeap;
-    }
-
-    // Call outside locked parameters to allow re-entrancy from notification
-    Camera2Client::SharedCameraCallbacks::Lock l(client->mSharedCameraCallbacks);
-    if (l.mRemoteCallback != 0) {
-        l.mRemoteCallback->dataCallbackTimestamp(timestamp,
-                CAMERA_MSG_VIDEO_FRAME,
-                recordingHeap->mBuffers[heapIdx]);
-    } else {
-        ALOGW("%s: Camera %d: Remote callback gone", __FUNCTION__, mId);
-    }
-
-    return OK;
-}
-
-void StreamingProcessor::releaseRecordingFrame(const sp<IMemory>& mem) {
-    ATRACE_CALL();
-    status_t res;
-
-    Mutex::Autolock m(mMutex);
-    // Make sure this is for the current heap
-    ssize_t offset;
-    size_t size;
-    sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
-    if (heap->getHeapID() != mRecordingHeap->mHeap->getHeapID()) {
-        ALOGW("%s: Camera %d: Mismatched heap ID, ignoring release "
-                "(got %x, expected %x)", __FUNCTION__, mId,
-                heap->getHeapID(), mRecordingHeap->mHeap->getHeapID());
-        return;
-    }
-
-    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
-        (uint8_t*)heap->getBase() + offset);
-
-    if (payload->eType != kMetadataBufferTypeANWBuffer) {
-        ALOGE("%s: Camera %d: Recording frame type invalid (got %x, expected %x)",
-                __FUNCTION__, mId, payload->eType,
-                kMetadataBufferTypeANWBuffer);
-        return;
-    }
-
-    // Release the buffer back to the recording queue
-    size_t itemIndex;
-    for (itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) {
-        const BufferItem item = mRecordingBuffers[itemIndex];
-        if (item.mSlot != BufferItemConsumer::INVALID_BUFFER_SLOT &&
-                item.mGraphicBuffer->getNativeBuffer() == payload->pBuffer) {
-                break;
-        }
-    }
-
-    if (itemIndex == mRecordingBuffers.size()) {
-        ALOGE("%s: Camera %d: Can't find returned ANW Buffer %p in list of "
-                "outstanding buffers", __FUNCTION__, mId,
-                payload->pBuffer);
-        return;
-    }
-
-    ALOGVV("%s: Camera %d: Freeing returned ANW buffer %p index %d", __FUNCTION__,
-            mId, payload->pBuffer, itemIndex);
-
-    res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]);
-    if (res != OK) {
-        ALOGE("%s: Camera %d: Unable to free recording frame "
-                "(Returned ANW buffer: %p): %s (%d)", __FUNCTION__,
-                mId, payload->pBuffer, strerror(-res), res);
-        return;
-    }
-    mRecordingBuffers.replaceAt(itemIndex);
-
-    mRecordingHeapFree++;
-    ALOGV_IF(mRecordingHeapFree == mRecordingHeapCount,
-            "%s: Camera %d: All %d recording buffers returned",
-            __FUNCTION__, mId, mRecordingHeapCount);
-}
-
-void StreamingProcessor::releaseAllRecordingFramesLocked() {
-    ATRACE_CALL();
-    status_t res;
-
-    if (mRecordingConsumer == 0) {
-        return;
-    }
-
-    ALOGV("%s: Camera %d: Releasing all recording buffers", __FUNCTION__,
-            mId);
-
-    size_t releasedCount = 0;
-    for (size_t itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) {
-        const BufferItem item = mRecordingBuffers[itemIndex];
-        if (item.mSlot != BufferItemConsumer::INVALID_BUFFER_SLOT) {
-            res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]);
-            if (res != OK) {
-                ALOGE("%s: Camera %d: Unable to free recording frame "
-                        "(buffer_handle_t: %p): %s (%d)", __FUNCTION__,
-                        mId, item.mGraphicBuffer->handle, strerror(-res), res);
-            }
-            mRecordingBuffers.replaceAt(itemIndex);
-            releasedCount++;
-        }
-    }
-
-    if (releasedCount > 0) {
-        ALOGW("%s: Camera %d: Force-freed %zu outstanding buffers "
-                "from previous recording session", __FUNCTION__, mId, releasedCount);
-        ALOGE_IF(releasedCount != mRecordingHeapCount - mRecordingHeapFree,
-            "%s: Camera %d: Force-freed %zu buffers, but expected %zu",
-            __FUNCTION__, mId, releasedCount, mRecordingHeapCount - mRecordingHeapFree);
-    }
-
-    mRecordingHeapHead = 0;
-    mRecordingHeapFree = mRecordingHeapCount;
-}
-
-bool StreamingProcessor::isStreamActive(const Vector<int32_t> &streams,
-        int32_t recordingStreamId) {
-    for (size_t i = 0; i < streams.size(); i++) {
-        if (streams[i] == recordingStreamId) {
-            return true;
-        }
-    }
-    return false;
-}
-
-
 status_t StreamingProcessor::dump(int fd, const Vector<String16>& /*args*/) {
     String8 result;
 
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index 0b17eae..57e6389 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -37,24 +37,22 @@
 /**
  * Management and processing for preview and recording streams
  */
-class StreamingProcessor:
-            public Thread, public BufferItemConsumer::FrameAvailableListener {
+class StreamingProcessor : public virtual VirtualLightRefBase {
   public:
     StreamingProcessor(sp<Camera2Client> client);
     ~StreamingProcessor();
 
     status_t setPreviewWindow(sp<Surface> window);
+    status_t setRecordingWindow(sp<Surface> window);
 
     bool haveValidPreviewWindow() const;
+    bool haveValidRecordingWindow() const;
 
     status_t updatePreviewRequest(const Parameters &params);
     status_t updatePreviewStream(const Parameters &params);
     status_t deletePreviewStream();
     int getPreviewStreamId() const;
 
-    status_t setRecordingBufferCount(size_t count);
-    status_t setRecordingFormat(int format, android_dataspace_t dataspace);
-
     status_t updateRecordingRequest(const Parameters &params);
     // If needsUpdate is set to true, a updateRecordingStream call with params will recreate
     // recording stream
@@ -81,11 +79,6 @@
     status_t getActiveRequestId() const;
     status_t incrementStreamingIds();
 
-    // Callback for new recording frames from HAL
-    virtual void onFrameAvailable(const BufferItem& item);
-    // Callback from stagefright which returns used recording frames
-    void releaseRecordingFrame(const sp<IMemory>& mem);
-
     status_t dump(int fd, const Vector<String16>& args);
 
   private:
@@ -110,47 +103,10 @@
     CameraMetadata mPreviewRequest;
     sp<Surface> mPreviewWindow;
 
-    // Recording-related members
-    static const nsecs_t kWaitDuration = 50000000; // 50 ms
-
     int32_t mRecordingRequestId;
     int mRecordingStreamId;
-    int mRecordingFrameCount;
-    sp<BufferItemConsumer> mRecordingConsumer;
     sp<Surface>  mRecordingWindow;
     CameraMetadata mRecordingRequest;
-    sp<camera2::Camera2Heap> mRecordingHeap;
-
-    bool mRecordingFrameAvailable;
-    Condition mRecordingFrameAvailableSignal;
-
-    static const size_t kDefaultRecordingHeapCount = 8;
-    size_t mRecordingHeapCount;
-    Vector<BufferItem> mRecordingBuffers;
-    size_t mRecordingHeapHead, mRecordingHeapFree;
-
-    static const int kDefaultRecordingFormat =
-            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    int mRecordingFormat;
-
-    static const android_dataspace kDefaultRecordingDataSpace =
-            HAL_DATASPACE_BT709;
-    android_dataspace mRecordingDataSpace;
-
-    static const int kDefaultRecordingGrallocUsage =
-            GRALLOC_USAGE_HW_VIDEO_ENCODER;
-    int mRecordingGrallocUsage;
-
-    virtual bool threadLoop();
-
-    status_t processRecordingFrame();
-
-    // Unilaterally free any buffers still outstanding to stagefright
-    void releaseAllRecordingFramesLocked();
-
-    // Determine if the specified stream is currently in use
-    static bool isStreamActive(const Vector<int32_t> &streams,
-            int32_t recordingStreamId);
 };