Camera1 API: Support SW encoders for new camera HALs

- Support new set video format/dataspace command in camera service
  - HALv3: Select gralloc usage flags based on format
  - HALv1: Pass format command directly to HAL layer
- Use format/dataspace command in CameraSource
- Switch all API1 recording to use metadata mode
- Switch all >= HALv2 API1 recording to use kMetadataBufferTypeANWBuffer

Bug: 13222807
Change-Id: I2e609b92c65792611bb1dab09e0c41c363ebbc42
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index f2d6ab2..ca14cdb 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1559,6 +1559,9 @@
             return commandPingL();
         case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:
             return commandSetVideoBufferCountL(arg1);
+        case CAMERA_CMD_SET_VIDEO_FORMAT:
+            return commandSetVideoFormatL(arg1,
+                    static_cast<android_dataspace>(arg2));
         default:
             ALOGE("%s: Unknown command %d (arguments %d, %d)",
                     __FUNCTION__, cmd, arg1, arg2);
@@ -1710,6 +1713,17 @@
     return mStreamingProcessor->setRecordingBufferCount(count);
 }
 
+status_t Camera2Client::commandSetVideoFormatL(int format,
+        android_dataspace dataspace) {
+    if (recordingEnabledL()) {
+        ALOGE("%s: Camera %d: Error setting video format after "
+                "recording was started", __FUNCTION__, mCameraId);
+        return INVALID_OPERATION;
+    }
+
+    return mStreamingProcessor->setRecordingFormat(format, dataspace);
+}
+
 void Camera2Client::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
         const CaptureResultExtras& resultExtras) {
     int32_t err = CAMERA_ERROR_UNKNOWN;
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 3784aab..c6df228 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -167,6 +167,7 @@
     status_t commandEnableFocusMoveMsgL(bool enable);
     status_t commandPingL();
     status_t commandSetVideoBufferCountL(size_t count);
+    status_t commandSetVideoFormatL(int format, android_dataspace dataSpace);
 
     // Current camera device configuration
     camera2::SharedParameters mParameters;
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 88c5811..143cc61 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -123,7 +123,7 @@
         // Check if stream parameters have to change
         uint32_t currentWidth, currentHeight, currentFormat;
         res = device->getStreamInfo(mCallbackStreamId,
-                &currentWidth, &currentHeight, &currentFormat);
+                &currentWidth, &currentHeight, &currentFormat, 0);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying callback output stream info: "
                     "%s (%d)", __FUNCTION__, mId,
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 34798bf..88987f9 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -115,7 +115,7 @@
         // Check if stream parameters have to change
         uint32_t currentWidth, currentHeight;
         res = device->getStreamInfo(mCaptureStreamId,
-                &currentWidth, &currentHeight, 0);
+                &currentWidth, &currentHeight, 0, 0);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying capture output stream info: "
                     "%s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index b6071f6..36d143b 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -25,11 +25,12 @@
 #define ALOGVV(...) ((void)0)
 #endif
 
+#include <cutils/properties.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 #include <gui/BufferItem.h>
 #include <gui/Surface.h>
-#include <media/hardware/MetadataBufferType.h>
+#include <media/hardware/HardwareAPI.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -51,7 +52,10 @@
         mRecordingStreamId(NO_STREAM),
         mRecordingFrameAvailable(false),
         mRecordingHeapCount(kDefaultRecordingHeapCount),
-        mRecordingHeapFree(kDefaultRecordingHeapCount)
+        mRecordingHeapFree(kDefaultRecordingHeapCount),
+        mRecordingFormat(kDefaultRecordingFormat),
+        mRecordingDataSpace(kDefaultRecordingDataSpace),
+        mRecordingGrallocUsage(kDefaultRecordingGrallocUsage)
 {
 }
 
@@ -151,7 +155,7 @@
         // Check if stream parameters have to change
         uint32_t currentWidth, currentHeight;
         res = device->getStreamInfo(mPreviewStreamId,
-                &currentWidth, &currentHeight, 0);
+                &currentWidth, &currentHeight, 0, 0);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying preview stream info: "
                     "%s (%d)", __FUNCTION__, mId, strerror(-res), res);
@@ -280,6 +284,46 @@
     return OK;
 }
 
+status_t StreamingProcessor::setRecordingFormat(int format,
+        android_dataspace dataSpace) {
+    ATRACE_CALL();
+
+    Mutex::Autolock m(mMutex);
+
+    ALOGV("%s: Camera %d: New recording format/dataspace from encoder: %X, %X",
+            __FUNCTION__, mId, format, dataSpace);
+
+    mRecordingFormat = format;
+    mRecordingDataSpace = dataSpace;
+    int prevGrallocUsage = mRecordingGrallocUsage;
+    if (mRecordingFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+        mRecordingGrallocUsage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
+    } else {
+        mRecordingGrallocUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+    }
+
+    ALOGV("%s: Camera %d: New recording gralloc usage: %08X", __FUNCTION__, mId,
+            mRecordingGrallocUsage);
+
+    if (prevGrallocUsage != mRecordingGrallocUsage) {
+        ALOGV("%s: Camera %d: Resetting recording consumer for new usage",
+            __FUNCTION__, mId);
+
+        if (isStreamActive(mActiveStreamIds, mRecordingStreamId)) {
+            ALOGE("%s: Camera %d: Changing recording format when "
+                    "recording stream is already active!", __FUNCTION__,
+                    mId);
+            return INVALID_OPERATION;
+        }
+
+        releaseAllRecordingFramesLocked();
+
+        mRecordingConsumer.clear();
+    }
+
+    return OK;
+}
+
 status_t StreamingProcessor::updateRecordingRequest(const Parameters &params) {
     ATRACE_CALL();
     status_t res;
@@ -340,9 +384,10 @@
         return INVALID_OPERATION;
     }
 
-    uint32_t currentWidth, currentHeight;
+    uint32_t currentWidth, currentHeight, currentFormat;
+    android_dataspace currentDataSpace;
     res = device->getStreamInfo(mRecordingStreamId,
-            &currentWidth, &currentHeight, 0);
+            &currentWidth, &currentHeight, &currentFormat, &currentDataSpace);
     if (res != OK) {
         ALOGE("%s: Camera %d: Error querying recording output stream info: "
                 "%s (%d)", __FUNCTION__, mId,
@@ -350,8 +395,11 @@
         return res;
     }
 
-    if (mRecordingConsumer == 0 || currentWidth != (uint32_t)params.videoWidth ||
-            currentHeight != (uint32_t)params.videoHeight) {
+    if (mRecordingConsumer == 0 ||
+            currentWidth != (uint32_t)params.videoWidth ||
+            currentHeight != (uint32_t)params.videoHeight ||
+            currentFormat != (uint32_t)mRecordingFormat ||
+            currentDataSpace != mRecordingDataSpace) {
         *needsUpdate = true;
     }
     *needsUpdate = false;
@@ -380,7 +428,7 @@
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
         mRecordingConsumer = new BufferItemConsumer(consumer,
-                GRALLOC_USAGE_HW_VIDEO_ENCODER,
+                mRecordingGrallocUsage,
                 mRecordingHeapCount + 1);
         mRecordingConsumer->setFrameAvailableListener(this);
         mRecordingConsumer->setName(String8("Camera2-RecordingConsumer"));
@@ -392,8 +440,11 @@
     if (mRecordingStreamId != NO_STREAM) {
         // Check if stream parameters have to change
         uint32_t currentWidth, currentHeight;
+        uint32_t currentFormat;
+        android_dataspace currentDataSpace;
         res = device->getStreamInfo(mRecordingStreamId,
-                &currentWidth, &currentHeight, 0);
+                &currentWidth, &currentHeight,
+                &currentFormat, &currentDataSpace);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying recording output stream info: "
                     "%s (%d)", __FUNCTION__, mId,
@@ -401,7 +452,10 @@
             return res;
         }
         if (currentWidth != (uint32_t)params.videoWidth ||
-                currentHeight != (uint32_t)params.videoHeight || newConsumer) {
+                currentHeight != (uint32_t)params.videoHeight ||
+                currentFormat != (uint32_t)mRecordingFormat ||
+                currentDataSpace != mRecordingDataSpace ||
+                newConsumer) {
             // TODO: Should wait to be sure previous recording has finished
             res = device->deleteStream(mRecordingStreamId);
 
@@ -422,11 +476,9 @@
 
     if (mRecordingStreamId == NO_STREAM) {
         mRecordingFrameCount = 0;
-        // Selecting BT.709 colorspace by default
-        // TODO: Wire this in from encoder side
         res = device->createStream(mRecordingWindow,
                 params.videoWidth, params.videoHeight,
-                CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_BT709,
+                mRecordingFormat, mRecordingDataSpace,
                 CAMERA3_STREAM_ROTATION_0, &mRecordingStreamId);
         if (res != OK) {
             ALOGE("%s: Camera %d: Can't create output stream for recording: "
@@ -722,12 +774,12 @@
         }
 
         if (mRecordingHeap == 0) {
-            const size_t bufferSize = 4 + sizeof(buffer_handle_t);
+            size_t payloadSize = sizeof(VideoNativeMetadata);
             ALOGV("%s: Camera %d: Creating recording heap with %zu buffers of "
                     "size %zu bytes", __FUNCTION__, mId,
-                    mRecordingHeapCount, bufferSize);
+                    mRecordingHeapCount, payloadSize);
 
-            mRecordingHeap = new Camera2Heap(bufferSize, mRecordingHeapCount,
+            mRecordingHeap = new Camera2Heap(payloadSize, mRecordingHeapCount,
                     "Camera2Client::RecordingHeap");
             if (mRecordingHeap->mHeap->getSize() == 0) {
                 ALOGE("%s: Camera %d: Unable to allocate memory for recording",
@@ -750,7 +802,7 @@
             mRecordingHeapFree = mRecordingHeapCount;
         }
 
-        if ( mRecordingHeapFree == 0) {
+        if (mRecordingHeapFree == 0) {
             ALOGE("%s: Camera %d: No free recording buffers, dropping frame",
                     __FUNCTION__, mId);
             mRecordingConsumer->releaseBuffer(imgBuffer);
@@ -770,13 +822,15 @@
                 mRecordingHeap->mBuffers[heapIdx]->getMemory(&offset,
                         &size);
 
-        uint8_t *data = (uint8_t*)heap->getBase() + offset;
-        uint32_t type = kMetadataBufferTypeGrallocSource;
-        *((uint32_t*)data) = type;
-        *((buffer_handle_t*)(data + 4)) = imgBuffer.mGraphicBuffer->handle;
-        ALOGVV("%s: Camera %d: Sending out buffer_handle_t %p",
-                __FUNCTION__, mId,
-                imgBuffer.mGraphicBuffer->handle);
+        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
+            (uint8_t*)heap->getBase() + offset);
+        payload->eType = kMetadataBufferTypeANWBuffer;
+        payload->pBuffer = imgBuffer.mGraphicBuffer->getNativeBuffer();
+        payload->nFenceFd = -1;
+
+        ALOGVV("%s: Camera %d: Sending out ANWBuffer %p",
+                __FUNCTION__, mId, payload->pBuffer);
+
         mRecordingBuffers.replaceAt(imgBuffer, heapIdx);
         recordingHeap = mRecordingHeap;
     }
@@ -809,42 +863,42 @@
                 heap->getHeapID(), mRecordingHeap->mHeap->getHeapID());
         return;
     }
-    uint8_t *data = (uint8_t*)heap->getBase() + offset;
-    uint32_t type = *(uint32_t*)data;
-    if (type != kMetadataBufferTypeGrallocSource) {
+
+    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
+        (uint8_t*)heap->getBase() + offset);
+
+    if (payload->eType != kMetadataBufferTypeANWBuffer) {
         ALOGE("%s: Camera %d: Recording frame type invalid (got %x, expected %x)",
-                __FUNCTION__, mId, type,
-                kMetadataBufferTypeGrallocSource);
+                __FUNCTION__, mId, payload->eType,
+                kMetadataBufferTypeANWBuffer);
         return;
     }
 
     // Release the buffer back to the recording queue
-
-    buffer_handle_t imgHandle = *(buffer_handle_t*)(data + 4);
-
     size_t itemIndex;
     for (itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) {
         const BufferItem item = mRecordingBuffers[itemIndex];
         if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT &&
-                item.mGraphicBuffer->handle == imgHandle) {
-            break;
+                item.mGraphicBuffer->getNativeBuffer() == payload->pBuffer) {
+                break;
         }
     }
+
     if (itemIndex == mRecordingBuffers.size()) {
-        ALOGE("%s: Camera %d: Can't find buffer_handle_t %p in list of "
+        ALOGE("%s: Camera %d: Can't find returned ANW Buffer %p in list of "
                 "outstanding buffers", __FUNCTION__, mId,
-                imgHandle);
+                payload->pBuffer);
         return;
     }
 
-    ALOGVV("%s: Camera %d: Freeing buffer_handle_t %p", __FUNCTION__,
-            mId, imgHandle);
+    ALOGVV("%s: Camera %d: Freeing returned ANW buffer %p index %d", __FUNCTION__,
+            mId, payload->pBuffer, itemIndex);
 
     res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]);
     if (res != OK) {
         ALOGE("%s: Camera %d: Unable to free recording frame "
-                "(buffer_handle_t: %p): %s (%d)", __FUNCTION__,
-                mId, imgHandle, strerror(-res), res);
+                "(Returned ANW buffer: %p): %s (%d)", __FUNCTION__,
+                mId, payload->pBuffer, strerror(-res), res);
         return;
     }
     mRecordingBuffers.replaceAt(itemIndex);
diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index 2474062..42e9e7a 100644
--- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -53,6 +53,8 @@
     int getPreviewStreamId() const;
 
     status_t setRecordingBufferCount(size_t count);
+    status_t setRecordingFormat(int format, android_dataspace_t dataspace);
+
     status_t updateRecordingRequest(const Parameters &params);
     // If needsUpdate is set to true, a updateRecordingStream call with params will recreate
     // recording stream
@@ -127,6 +129,18 @@
     Vector<BufferItem> mRecordingBuffers;
     size_t mRecordingHeapHead, mRecordingHeapFree;
 
+    static const int kDefaultRecordingFormat =
+            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+    int mRecordingFormat;
+
+    static const android_dataspace kDefaultRecordingDataSpace =
+            HAL_DATASPACE_BT709;
+    android_dataspace mRecordingDataSpace;
+
+    static const int kDefaultRecordingGrallocUsage =
+            GRALLOC_USAGE_HW_VIDEO_ENCODER;
+    int mRecordingGrallocUsage;
+
     virtual bool threadLoop();
 
     status_t processRecordingFrame();
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index a03f9c7..d8500df 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -147,7 +147,7 @@
         // Check if stream parameters have to change
         uint32_t currentWidth, currentHeight;
         res = device->getStreamInfo(mZslStreamId,
-                &currentWidth, &currentHeight, 0);
+                &currentWidth, &currentHeight, 0, 0);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying capture output stream info: "
                     "%s (%d)", __FUNCTION__,
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 470a6d6..69620ac 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -150,7 +150,7 @@
         // Check if stream parameters have to change
         uint32_t currentWidth, currentHeight;
         res = device->getStreamInfo(mZslStreamId,
-                &currentWidth, &currentHeight, 0);
+                &currentWidth, &currentHeight, 0, 0);
         if (res != OK) {
             ALOGE("%s: Camera %d: Error querying capture output stream info: "
                     "%s (%d)", __FUNCTION__,