Camera: Add HEIC support
- Derive HEIC capabilities from camera HAL and media framework.
- Add HeicCompositeStream to encode camera buffers to HEIC buffers.
- Add ExifUtils to overwrite JPEG APP segments and send to media codec.
- Add NDK enums and corresponding format support.
Test: Camera CTS
Bug: 79465976
Change-Id: I0a885e76335f3eba4be0fd42241edb0b7349f284
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 9e203da..b512f2b 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -34,6 +34,7 @@
#include <camera_metadata_hidden.h>
#include "DepthCompositeStream.h"
+#include "HeicCompositeStream.h"
// Convenience methods for constructing binder::Status objects for error returns
@@ -711,21 +712,35 @@
return res;
if (!isStreamInfoValid) {
- if (camera3::DepthCompositeStream::isDepthCompositeStream(surface)) {
+ bool isDepthCompositeStream =
+ camera3::DepthCompositeStream::isDepthCompositeStream(surface);
+ bool isHeicCompositeStream =
+ camera3::HeicCompositeStream::isHeicCompositeStream(surface);
+ if (isDepthCompositeStream || isHeicCompositeStream) {
// We need to take in to account that composite streams can have
// additional internal camera streams.
std::vector<OutputStreamInfo> compositeStreams;
- ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+ if (isDepthCompositeStream) {
+ ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+ mDevice->info(), &compositeStreams);
+ } else {
+ ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
mDevice->info(), &compositeStreams);
+ }
if (ret != OK) {
String8 msg = String8::format(
- "Camera %s: Failed adding depth composite streams: %s (%d)",
+ "Camera %s: Failed adding composite streams: %s (%d)",
mCameraIdStr.string(), strerror(-ret), ret);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
- if (compositeStreams.size() > 1) {
+ if (compositeStreams.size() == 0) {
+ // No internal streams means composite stream not
+ // supported.
+ *status = false;
+ return binder::Status::ok();
+ } else if (compositeStreams.size() > 1) {
streamCount += compositeStreams.size() - 1;
streamConfiguration.streams.resize(streamCount);
}
@@ -937,15 +952,16 @@
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
std::vector<int> surfaceIds;
- if (!camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0])) {
- err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
- streamInfo.height, streamInfo.format, streamInfo.dataSpace,
- static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
- &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
- isShared);
- } else {
- sp<CompositeStream> compositeStream = new camera3::DepthCompositeStream(mDevice,
- getRemoteCallback());
+ bool isDepthCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
+ bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
+ if (isDepthCompositeStream || isHeicCompisiteStream) {
+ sp<CompositeStream> compositeStream;
+ if (isDepthCompositeStream) {
+ compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
+ } else {
+ compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+ }
+
err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
@@ -955,6 +971,12 @@
mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
compositeStream);
}
+ } else {
+ err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
+ streamInfo.height, streamInfo.format, streamInfo.dataSpace,
+ static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
+ &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
+ isShared);
}
if (err != OK) {
@@ -1437,6 +1459,8 @@
camera_metadata_ro_entry streamConfigs =
(dataSpace == HAL_DATASPACE_DEPTH) ?
info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
+ (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
+ info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
int32_t bestWidth = -1;
@@ -1930,6 +1954,10 @@
remoteCb->onCaptureStarted(resultExtras, timestamp);
}
Camera2ClientBase::notifyShutter(resultExtras, timestamp);
+
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ mCompositeStreamMap.valueAt(i)->onShutter(resultExtras, timestamp);
+ }
}
void CameraDeviceClient::notifyPrepared(int streamId) {
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 796bf42..354eaf9 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -82,7 +82,8 @@
return deleteInternalStreams();
}
-void CompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) {
+void CompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& /*settings*/) {
Mutex::Autolock l(mMutex);
if (!mErrorState && (streamId == getStreamId())) {
mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 5837745..a401a82 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -23,6 +23,7 @@
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
#include <camera/CameraMetadata.h>
#include <camera/camera2/OutputConfiguration.h>
+#include <gui/IProducerListener.h>
#include "common/CameraDeviceBase.h"
#include "device3/Camera3StreamInterface.h"
@@ -66,15 +67,24 @@
// Return composite stream id.
virtual int getStreamId() = 0;
+ // Notify when shutter notify is triggered
+ virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {}
+
void onResultAvailable(const CaptureResult& result);
bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
// Camera3StreamBufferListener implementation
void onBufferAcquired(const BufferInfo& /*bufferInfo*/) override { /*Empty for now */ }
void onBufferReleased(const BufferInfo& bufferInfo) override;
- void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) override;
+ void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& settings) override;
protected:
+ struct ProducerListener : public BnProducerListener {
+ // ProducerListener impementation
+ void onBufferReleased() override { /*No impl. for now*/ };
+ };
+
status_t registerCompositeStreamListener(int32_t streamId);
void eraseResult(int64_t frameNumber);
void flagAnErrorFrameNumber(int64_t frameNumber);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index e8fe517..1bf31f4 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -21,7 +21,6 @@
#include <dynamic_depth/imaging_model.h>
#include <dynamic_depth/depth_map.h>
-#include <gui/IProducerListener.h>
#include <gui/CpuConsumer.h>
#include "CompositeStream.h"
@@ -116,11 +115,6 @@
static const auto kDepthMapDataSpace = HAL_DATASPACE_DEPTH;
static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
- struct ProducerListener : public BnProducerListener {
- // ProducerListener implementation
- void onBufferReleased() override { /*No impl. for now*/ };
- };
-
int mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
size_t mBlobWidth, mBlobHeight;
sp<CpuConsumer> mBlobConsumer, mDepthConsumer;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
new file mode 100644
index 0000000..3eba863
--- /dev/null
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -0,0 +1,1606 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-HeicCompositeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <linux/memfd.h>
+#include <pthread.h>
+#include <sys/syscall.h>
+
+#include <android/hardware/camera/device/3.5/types.h>
+#include <gui/Surface.h>
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include <media/ICrypto.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+#include "common/CameraDeviceBase.h"
+#include "utils/ExifUtils.h"
+#include "HeicEncoderInfoManager.h"
+#include "HeicCompositeStream.h"
+
+using android::hardware::camera::device::V3_5::CameraBlob;
+using android::hardware::camera::device::V3_5::CameraBlobId;
+
+namespace android {
+namespace camera3 {
+
+HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
+ CompositeStream(device, cb),
+ mUseHeic(false),
+ mNumOutputTiles(1),
+ mOutputWidth(0),
+ mOutputHeight(0),
+ mMaxHeicBufferSize(0),
+ mGridWidth(HeicEncoderInfoManager::kGridWidth),
+ mGridHeight(HeicEncoderInfoManager::kGridHeight),
+ mGridRows(1),
+ mGridCols(1),
+ mUseGrid(false),
+ mAppSegmentStreamId(-1),
+ mAppSegmentSurfaceId(-1),
+ mAppSegmentBufferAcquired(false),
+ mMainImageStreamId(-1),
+ mMainImageSurfaceId(-1),
+ mYuvBufferAcquired(false),
+ mProducerListener(new ProducerListener()),
+ mOutputBufferCounter(0),
+ mGridTimestampUs(0) {
+}
+
+HeicCompositeStream::~HeicCompositeStream() {
+ // Call deinitCodec in case stream hasn't been deleted yet to avoid any
+ // memory/resource leak.
+ deinitCodec();
+
+ mInputAppSegmentBuffers.clear();
+ mCodecOutputBuffers.clear();
+
+ mAppSegmentStreamId = -1;
+ mAppSegmentSurfaceId = -1;
+ mAppSegmentConsumer.clear();
+ mAppSegmentSurface.clear();
+
+ mMainImageStreamId = -1;
+ mMainImageSurfaceId = -1;
+ mMainImageConsumer.clear();
+ mMainImageSurface.clear();
+}
+
+bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
+ ANativeWindow *anw = surface.get();
+ status_t err;
+ int format;
+ if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
+ err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return false;
+ }
+
+ int dataspace;
+ if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+ String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
+ err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return false;
+ }
+
+ return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
+}
+
+status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
+ camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+ std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
+
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (!device.get()) {
+ ALOGE("%s: Invalid camera device!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ status_t res = initializeCodec(width, height, device);
+ if (res != OK) {
+ ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return NO_INIT;
+ }
+
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mAppSegmentConsumer = new CpuConsumer(consumer, 1);
+ mAppSegmentConsumer->setFrameAvailableListener(this);
+ mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+ mAppSegmentSurface = new Surface(producer);
+
+ res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
+ kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
+ if (res == OK) {
+ mAppSegmentSurfaceId = (*surfaceIds)[0];
+ } else {
+ ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ if (!mUseGrid) {
+ res = mCodec->createInputSurface(&producer);
+ if (res != OK) {
+ ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ } else {
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mMainImageConsumer = new CpuConsumer(consumer, 1);
+ mMainImageConsumer->setFrameAvailableListener(this);
+ mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
+ }
+ mMainImageSurface = new Surface(producer);
+
+ res = mCodec->start();
+ if (res != OK) {
+ ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ std::vector<int> sourceSurfaceId;
+ //Use YUV_888 format if framework tiling is needed.
+ int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
+ rotation, id, physicalCameraId, &sourceSurfaceId);
+ if (res == OK) {
+ mMainImageSurfaceId = sourceSurfaceId[0];
+ mMainImageStreamId = *id;
+ } else {
+ ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ mOutputSurface = consumers[0];
+ res = registerCompositeStreamListener(getStreamId());
+ if (res != OK) {
+ ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
+ return res;
+ }
+
+ return res;
+}
+
+status_t HeicCompositeStream::deleteInternalStreams() {
+ requestExit();
+ auto res = join();
+ if (res != OK) {
+ ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ }
+
+ deinitCodec();
+
+ if (mAppSegmentStreamId >= 0) {
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (!device.get()) {
+ ALOGE("%s: Invalid camera device!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ res = device->deleteStream(mAppSegmentStreamId);
+ mAppSegmentStreamId = -1;
+ }
+
+ return res;
+}
+
+void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
+ Mutex::Autolock l(mMutex);
+
+ if (bufferInfo.mError) return;
+
+ mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
+}
+
+// We need to get the settings early to handle the case where the codec output
+// arrives earlier than result metadata.
+void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& settings) {
+ ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
+
+ Mutex::Autolock l(mMutex);
+ if (mErrorState || (streamId != getStreamId())) {
+ return;
+ }
+
+ mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
+
+ camera_metadata_ro_entry entry;
+
+ int32_t orientation = 0;
+ entry = settings.find(ANDROID_JPEG_ORIENTATION);
+ if (entry.count == 1) {
+ orientation = entry.data.i32[0];
+ }
+
+ int32_t quality = kDefaultJpegQuality;
+ entry = settings.find(ANDROID_JPEG_QUALITY);
+ if (entry.count == 1) {
+ quality = entry.data.i32[0];
+ }
+
+ mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
+}
+
+void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
+ if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
+ ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
+ __func__, ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mErrorState) {
+ mInputAppSegmentBuffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else if (item.mDataSpace == kHeifDataSpace) {
+ ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
+ __func__, ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mUseGrid) {
+ ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
+ __FUNCTION__);
+ return;
+ }
+ if (!mErrorState) {
+ mInputYuvBuffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else {
+ ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
+ }
+}
+
+status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+ if (compositeOutput == nullptr) {
+ return BAD_VALUE;
+ }
+
+ compositeOutput->clear();
+
+ bool useGrid, useHeic;
+ bool isSizeSupported = isSizeSupportedByHeifEncoder(
+ streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
+ if (!isSizeSupported) {
+ // Size is not supported by either encoder.
+ return OK;
+ }
+
+ compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
+
+ // JPEG APPS segments Blob stream info
+ (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
+ (*compositeOutput)[0].height = 1;
+ (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
+ (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
+ (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+
+ // YUV/IMPLEMENTATION_DEFINED stream info
+ (*compositeOutput)[1].width = streamInfo.width;
+ (*compositeOutput)[1].height = streamInfo.height;
+ (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ (*compositeOutput)[1].dataSpace = kHeifDataSpace;
+ (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
+ useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
+
+ return NO_ERROR;
+}
+
+bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
+ bool* useHeic, bool* useGrid, int64_t* stall) {
+ static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
+ return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall);
+}
+
+bool HeicCompositeStream::isInMemoryTempFileSupported() {
+ int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
+ if (memfd == -1) {
+ if (errno != ENOSYS) {
+ ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
+ }
+ return false;
+ }
+ close(memfd);
+ return true;
+}
+
+void HeicCompositeStream::onHeicOutputFrameAvailable(
+ const CodecOutputBufferInfo& outputBufferInfo) {
+ Mutex::Autolock l(mMutex);
+
+ ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
+ __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
+ outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
+
+ if (!mErrorState) {
+ if ((outputBufferInfo.size > 0) &&
+ ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
+ mCodecOutputBuffers.push_back(outputBufferInfo);
+ mInputReadyCondition.signal();
+ } else {
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ }
+ } else {
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ }
+}
+
+void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
+ Mutex::Autolock l(mMutex);
+
+ if (!mUseGrid) {
+ ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
+ return;
+ }
+
+ mCodecInputBuffers.push_back(index);
+ mInputReadyCondition.signal();
+}
+
+void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
+ if (newFormat == nullptr) {
+ ALOGE("%s: newFormat must not be null!", __FUNCTION__);
+ return;
+ }
+
+ Mutex::Autolock l(mMutex);
+
+ AString mime;
+ AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
+ newFormat->findString(KEY_MIME, &mime);
+ if (mime != mimeHeic) {
+ // For HEVC codec, below keys need to be filled out or overwritten so that the
+ // muxer can handle them as HEIC output image.
+ newFormat->setString(KEY_MIME, mimeHeic);
+ newFormat->setInt32(KEY_WIDTH, mOutputWidth);
+ newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
+ if (mUseGrid) {
+ newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
+ newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
+ newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
+ newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
+ }
+ }
+ newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
+
+ int32_t gridRows, gridCols;
+ if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
+ newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
+ mNumOutputTiles = gridRows * gridCols;
+ } else {
+ mNumOutputTiles = 1;
+ }
+
+ ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
+ mFormat = newFormat;
+}
+
+void HeicCompositeStream::onHeicCodecError() {
+ Mutex::Autolock l(mMutex);
+ mErrorState = true;
+}
+
+status_t HeicCompositeStream::configureStream() {
+ if (isRunning()) {
+ // Processing thread is already running, nothing more to do.
+ return NO_ERROR;
+ }
+
+ if (mOutputSurface.get() == nullptr) {
+ ALOGE("%s: No valid output surface set!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+ if (res != OK) {
+ ALOGE("%s: Unable to connect to native window for stream %d",
+ __FUNCTION__, mMainImageStreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
+ != OK) {
+ ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
+ mMainImageStreamId);
+ return res;
+ }
+
+ ANativeWindow *anwConsumer = mOutputSurface.get();
+ int maxConsumerBuffers;
+ if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to query consumer undequeued"
+ " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
+ return res;
+ }
+
+ // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
+ // buffer count.
+ int maxProducerBuffers = 1;
+ if ((res = native_window_set_buffer_count(
+ anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
+ ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
+ __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
+ return res;
+ }
+
+ run("HeicCompositeStreamProc");
+
+ return NO_ERROR;
+}
+
+status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
+ Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
+ if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
+ (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
+ outputStreamIds->push_back(mAppSegmentStreamId);
+ }
+ (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
+
+ if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
+ (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
+ outputStreamIds->push_back(mMainImageStreamId);
+ }
+ (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
+
+ if (currentStreamId != nullptr) {
+ *currentStreamId = mMainImageStreamId;
+ }
+
+ return NO_ERROR;
+}
+
+void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
+ Mutex::Autolock l(mMutex);
+ if (mErrorState) {
+ return;
+ }
+
+ if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
+ mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
+ mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
+ mSettingsByFrameNumber.erase(resultExtras.frameNumber);
+ mInputReadyCondition.signal();
+ }
+}
+
+void HeicCompositeStream::compilePendingInputLocked() {
+ while (!mSettingsByTimestamp.empty()) {
+ auto it = mSettingsByTimestamp.begin();
+ mPendingInputFrames[it->first].orientation = it->second.first;
+ mPendingInputFrames[it->first].quality = it->second.second;
+ mSettingsByTimestamp.erase(it);
+ }
+
+ while (!mInputAppSegmentBuffers.empty() && !mAppSegmentBufferAcquired) {
+ CpuConsumer::LockedBuffer imgBuffer;
+ auto it = mInputAppSegmentBuffers.begin();
+ auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Canot not lock any more buffers.
+ break;
+ } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
+ if (res != OK) {
+ ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ } else {
+ ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
+ " received buffer with time stamp: %" PRId64, __FUNCTION__,
+ *it, imgBuffer.timestamp);
+ }
+ mPendingInputFrames[*it].error = true;
+ mInputAppSegmentBuffers.erase(it);
+ continue;
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mAppSegmentConsumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
+ mAppSegmentBufferAcquired = true;
+ }
+ mInputAppSegmentBuffers.erase(it);
+ }
+
+ while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
+ CpuConsumer::LockedBuffer imgBuffer;
+ auto it = mInputYuvBuffers.begin();
+ auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Canot not lock any more buffers.
+ break;
+ } else if (res != OK) {
+ ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ mPendingInputFrames[*it].error = true;
+ mInputYuvBuffers.erase(it);
+ continue;
+ } else if (*it != imgBuffer.timestamp) {
+ ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
+ "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+ mPendingInputFrames[*it].error = true;
+ mInputYuvBuffers.erase(it);
+ continue;
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mMainImageConsumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
+ mYuvBufferAcquired = true;
+ }
+ mInputYuvBuffers.erase(it);
+ }
+
+ while (!mCodecOutputBuffers.empty()) {
+ auto it = mCodecOutputBuffers.begin();
+ // Bitstream buffer timestamp doesn't necessarily directly correlate with input
+ // buffer timestamp. Assume encoder input to output is FIFO, use a queue
+ // to look up timestamp.
+ int64_t bufferTime = -1;
+ if (mCodecOutputBufferTimestamps.empty()) {
+ ALOGE("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
+ } else {
+ // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
+ bufferTime = mCodecOutputBufferTimestamps.front();
+ mOutputBufferCounter++;
+ if (mOutputBufferCounter == mNumOutputTiles) {
+ mCodecOutputBufferTimestamps.pop();
+ mOutputBufferCounter = 0;
+ }
+
+ mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
+ }
+ mCodecOutputBuffers.erase(it);
+ }
+
+ while (!mFrameNumberMap.empty()) {
+ auto it = mFrameNumberMap.begin();
+ mPendingInputFrames[it->second].frameNumber = it->first;
+ mFrameNumberMap.erase(it);
+ }
+
+ // Heic composition doesn't depend on capture result, so no need to check
+ // mErrorFrameNumbers. Just remove them.
+ mErrorFrameNumbers.clear();
+
+ // Distribute codec input buffers to be filled out from YUV output
+ for (auto it = mPendingInputFrames.begin();
+ it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
+ InputFrame& inputFrame(it->second);
+ if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
+ // Available input tiles that are required for the current input
+ // image.
+ size_t newInputTiles = std::min(mCodecInputBuffers.size(),
+ mGridRows * mGridCols - inputFrame.codecInputCounter);
+ for (size_t i = 0; i < newInputTiles; i++) {
+ CodecInputBufferInfo inputInfo =
+ { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
+ inputFrame.codecInputBuffers.push_back(inputInfo);
+
+ mCodecInputBuffers.erase(mCodecInputBuffers.begin());
+ inputFrame.codecInputCounter++;
+ }
+ break;
+ }
+ }
+}
+
+bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
+ if (currentTs == nullptr) {
+ return false;
+ }
+
+ bool newInputAvailable = false;
+ for (const auto& it : mPendingInputFrames) {
+ bool appSegmentBufferReady = (it.second.appSegmentBuffer.data != nullptr) &&
+ !it.second.appSegmentWritten;
+ bool codecOutputReady = !it.second.codecOutputBuffers.empty();
+ bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
+ (!it.second.codecInputBuffers.empty());
+ if ((!it.second.error) &&
+ (it.first < *currentTs) &&
+ (appSegmentBufferReady || codecOutputReady || codecInputReady)) {
+ *currentTs = it.first;
+ newInputAvailable = true;
+ break;
+ }
+ }
+
+ return newInputAvailable;
+}
+
+int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
+ int64_t res = -1;
+ if (currentTs == nullptr) {
+ return res;
+ }
+
+ for (const auto& it : mPendingInputFrames) {
+ if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
+ *currentTs = it.first;
+ res = it.second.frameNumber;
+ break;
+ }
+ }
+
+ return res;
+}
+
+status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
+ InputFrame &inputFrame) {
+ ATRACE_CALL();
+ status_t res = OK;
+
+ bool appSegmentBufferReady = inputFrame.appSegmentBuffer.data != nullptr &&
+ !inputFrame.appSegmentWritten;
+ bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
+ bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
+ !inputFrame.codecInputBuffers.empty();
+
+ if (!appSegmentBufferReady && !codecOutputReady && !codecInputReady) {
+ ALOGW("%s: No valid appSegmentBuffer/codec input/outputBuffer available!", __FUNCTION__);
+ return OK;
+ }
+
+ // Handle inputs for Hevc tiling
+ if (codecInputReady) {
+ res = processCodecInputFrame(inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ // Initialize and start muxer if not yet done so
+ if (inputFrame.muxer == nullptr) {
+ res = startMuxerForInputFrame(timestamp, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ // Write JPEG APP segments data to the muxer.
+ if (appSegmentBufferReady && inputFrame.muxer != nullptr) {
+ res = processAppSegment(timestamp, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ // Write media codec bitstream buffers to muxer.
+ while (!inputFrame.codecOutputBuffers.empty()) {
+ res = processOneCodecOutputFrame(timestamp, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ if (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0) {
+ res = processCompletedInputFrame(timestamp, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ return res;
+}
+
+status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
+ sp<ANativeWindow> outputANW = mOutputSurface;
+ if (inputFrame.codecOutputBuffers.size() == 0) {
+ // No single codec output buffer has been generated. Continue to
+ // wait.
+ return OK;
+ }
+
+ auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
+ res);
+ return res;
+ }
+
+ // Combine current thread id, stream id and timestamp to uniquely identify image.
+ std::ostringstream tempOutputFile;
+ tempOutputFile << "HEIF-" << pthread_self() << "-"
+ << getStreamId() << "-" << timestamp;
+ inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
+ if (inputFrame.fileFd < 0) {
+ ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
+ tempOutputFile.str().c_str(), errno);
+ return NO_INIT;
+ }
+ inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
+ if (inputFrame.muxer == nullptr) {
+ ALOGE("%s: Failed to create MediaMuxer for file fd %d",
+ __FUNCTION__, inputFrame.fileFd);
+ return NO_INIT;
+ }
+
+ res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
+ if (res != OK) {
+ ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ // Set encoder quality
+ {
+ sp<AMessage> qualityParams = new AMessage;
+ qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, inputFrame.quality);
+ res = mCodec->setParameters(qualityParams);
+ if (res != OK) {
+ ALOGE("%s: Failed to set codec quality: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ ssize_t trackId = inputFrame.muxer->addTrack(mFormat);
+ if (trackId < 0) {
+ ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
+ return NO_INIT;
+ }
+
+ inputFrame.trackIndex = trackId;
+ inputFrame.pendingOutputTiles = mNumOutputTiles;
+
+ res = inputFrame.muxer->start();
+ if (res != OK) {
+ ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ return OK;
+}
+
+status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
+ size_t app1Size = 0;
+ auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
+ inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
+ &app1Size);
+ ALOGV("%s: appSegmentSize is %zu, width %d, height %d, app1Size %zu", __FUNCTION__,
+ appSegmentSize, inputFrame.appSegmentBuffer.width,
+ inputFrame.appSegmentBuffer.height, app1Size);
+ if (appSegmentSize == 0) {
+ ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
+ auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
+ if (!exifRes) {
+ ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ //TODO: Use capture result metadata and static metadata to fill out the
+ //rest.
+ CameraMetadata dummyMeta;
+ exifRes = exifUtils->setFromMetadata(dummyMeta, mOutputWidth, mOutputHeight);
+ if (!exifRes) {
+ ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ exifRes = exifUtils->setOrientation(inputFrame.orientation);
+ if (!exifRes) {
+ ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ exifRes = exifUtils->generateApp1();
+ if (!exifRes) {
+ ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ unsigned int newApp1Length = exifUtils->getApp1Length();
+ const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
+
+ //Assemble the APP1 marker buffer required by MediaCodec
+ uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
+ kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
+ kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
+ size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
+ appSegmentSize - app1Size + newApp1Length;
+ uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
+ memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
+ memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
+ if (appSegmentSize - app1Size > 0) {
+ memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
+ inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
+ }
+
+ sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
+ auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
+ timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
+ delete[] appSegmentBuffer;
+
+ if (res != OK) {
+ ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ inputFrame.appSegmentWritten = true;
+
+ return OK;
+}
+
+status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
+ for (auto& inputBuffer : inputFrame.codecInputBuffers) {
+ sp<MediaCodecBuffer> buffer;
+ auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Copy one tile from source to destination.
+ size_t tileX = inputBuffer.tileIndex % mGridCols;
+ size_t tileY = inputBuffer.tileIndex / mGridCols;
+ size_t top = mGridHeight * tileY;
+ size_t left = mGridWidth * tileX;
+ size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
+ mOutputWidth - tileX * mGridWidth : mGridWidth;
+ size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
+ mOutputHeight - tileY * mGridHeight : mGridHeight;
+ ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu",
+ __FUNCTION__, tileX, tileY, top, left, width, height);
+
+ res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
+ if (res != OK) {
+ ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
+ inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
+ if (res != OK) {
+ ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ inputFrame.codecInputBuffers.clear();
+ return OK;
+}
+
+status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
+ InputFrame &inputFrame) {
+ auto it = inputFrame.codecOutputBuffers.begin();
+ sp<MediaCodecBuffer> buffer;
+ status_t res = mCodec->getOutputBuffer(it->index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+ if (buffer == nullptr) {
+ ALOGE("%s: Invalid Heic codec output buffer at index %d",
+ __FUNCTION__, it->index);
+ return BAD_VALUE;
+ }
+
+ sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
+ res = inputFrame.muxer->writeSampleData(
+ aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
+ if (res != OK) {
+ ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+
+ mCodec->releaseOutputBuffer(it->index);
+ if (inputFrame.pendingOutputTiles == 0) {
+ ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
+ } else {
+ inputFrame.pendingOutputTiles--;
+ }
+
+ inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
+ return OK;
+}
+
+status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
+ InputFrame &inputFrame) {
+ sp<ANativeWindow> outputANW = mOutputSurface;
+ inputFrame.muxer->stop();
+
+ // Copy the content of the file to memory.
+ sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
+ void* dstBuffer;
+ auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
+ if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
+ ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
+ __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
+ return BAD_VALUE;
+ }
+
+ lseek(inputFrame.fileFd, 0, SEEK_SET);
+ ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
+ if (bytesRead < fSize) {
+ ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
+ return BAD_VALUE;
+ }
+
+ close(inputFrame.fileFd);
+ inputFrame.fileFd = -1;
+
+ // Fill in HEIC header
+ uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
+ struct CameraBlob *blobHeader = (struct CameraBlob *)header;
+ // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
+ blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
+ blobHeader->blobSize = fSize;
+
+ res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
+ __FUNCTION__, getStreamId(), strerror(-res), res);
+ return res;
+ }
+
+ res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
+ if (res != OK) {
+ ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ inputFrame.anb = nullptr;
+
+ return OK;
+}
+
+
+void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
+ if (inputFrame == nullptr) {
+ return;
+ }
+
+ if (inputFrame->appSegmentBuffer.data != nullptr) {
+ mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
+ inputFrame->appSegmentBuffer.data = nullptr;
+ mAppSegmentBufferAcquired = false;
+ }
+
+ while (!inputFrame->codecOutputBuffers.empty()) {
+ auto it = inputFrame->codecOutputBuffers.begin();
+ ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
+ mCodec->releaseOutputBuffer(it->index);
+ inputFrame->codecOutputBuffers.erase(it);
+ }
+
+ if (inputFrame->yuvBuffer.data != nullptr) {
+ mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
+ inputFrame->yuvBuffer.data = nullptr;
+ mYuvBufferAcquired = false;
+ }
+
+ while (!inputFrame->codecInputBuffers.empty()) {
+ auto it = inputFrame->codecInputBuffers.begin();
+ inputFrame->codecInputBuffers.erase(it);
+ }
+
+ if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
+ notifyError(inputFrame->frameNumber);
+ inputFrame->errorNotified = true;
+ }
+
+ if (inputFrame->fileFd >= 0) {
+ close(inputFrame->fileFd);
+ inputFrame->fileFd = -1;
+ }
+
+ if (inputFrame->anb != nullptr) {
+ sp<ANativeWindow> outputANW = mOutputSurface;
+ outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
+ inputFrame->anb = nullptr;
+ }
+}
+
+void HeicCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
+ auto it = mPendingInputFrames.begin();
+ while (it != mPendingInputFrames.end()) {
+ if (it->first <= currentTs) {
+ releaseInputFrameLocked(&it->second);
+ it = mPendingInputFrames.erase(it);
+ } else {
+ it++;
+ }
+ }
+}
+
+status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
+ const sp<CameraDeviceBase>& cameraDevice) {
+ ALOGV("%s", __FUNCTION__);
+
+ bool useGrid = false;
+ bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
+ &mUseHeic, &useGrid, nullptr);
+ if (!isSizeSupported) {
+ ALOGE("%s: Encoder doesnt' support size %u x %u!",
+ __FUNCTION__, width, height);
+ return BAD_VALUE;
+ }
+
+ // Create Looper for MediaCodec.
+ auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
+ mCodecLooper = new ALooper;
+ mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
+ status_t res = mCodecLooper->start(
+ false, // runOnCallingThread
+ false, // canCallJava
+ PRIORITY_AUDIO);
+ if (res != OK) {
+ ALOGE("%s: Failed to start codec looper: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return NO_INIT;
+ }
+
+ // Create HEIC/HEVC codec.
+ mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
+ if (mCodec == nullptr) {
+ ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
+ return NO_INIT;
+ }
+
+ // Create Looper and handler for Codec callback.
+ mCodecCallbackHandler = new CodecCallbackHandler(this);
+ if (mCodecCallbackHandler == nullptr) {
+ ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
+ return NO_MEMORY;
+ }
+ mCallbackLooper = new ALooper;
+ mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
+ res = mCallbackLooper->start(
+ false, // runOnCallingThread
+ false, // canCallJava
+ PRIORITY_AUDIO);
+ if (res != OK) {
+ ALOGE("%s: Failed to start media callback looper: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return NO_INIT;
+ }
+ mCallbackLooper->registerHandler(mCodecCallbackHandler);
+
+ mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
+ res = mCodec->setCallback(mAsyncNotify);
+ if (res != OK) {
+ ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Create output format and configure the Codec.
+ sp<AMessage> outputFormat = new AMessage();
+ outputFormat->setString(KEY_MIME, desiredMime);
+ outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
+ outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
+ // Ask codec to skip timestamp check and encode all frames.
+ outputFormat->setInt64("max-pts-gap-to-encoder", kNoFrameDropMaxPtsGap);
+
+ int32_t gridWidth, gridHeight, gridRows, gridCols;
+ if (useGrid || mUseHeic) {
+ gridWidth = HeicEncoderInfoManager::kGridWidth;
+ gridHeight = HeicEncoderInfoManager::kGridHeight;
+ gridRows = (height + gridHeight - 1)/gridHeight;
+ gridCols = (width + gridWidth - 1)/gridWidth;
+
+ if (mUseHeic) {
+ outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
+ outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
+ outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
+ outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
+ }
+
+ } else {
+ gridWidth = width;
+ gridHeight = height;
+ gridRows = 1;
+ gridCols = 1;
+ }
+
+ outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
+ outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
+ outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
+ outputFormat->setInt32(KEY_COLOR_FORMAT,
+ useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
+ outputFormat->setInt32(KEY_FRAME_RATE, gridRows * gridCols);
+ // This only serves as a hint to encoder when encoding is not real-time.
+ outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
+
+ res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
+ nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
+ if (res != OK) {
+ ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ mGridWidth = gridWidth;
+ mGridHeight = gridHeight;
+ mGridRows = gridRows;
+ mGridCols = gridCols;
+ mUseGrid = useGrid;
+ mOutputWidth = width;
+ mOutputHeight = height;
+ mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
+ mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
+
+ return OK;
+}
+
+void HeicCompositeStream::deinitCodec() {
+ ALOGV("%s", __FUNCTION__);
+ if (mCodec != nullptr) {
+ mCodec->stop();
+ mCodec->release();
+ mCodec.clear();
+ }
+
+ if (mCodecLooper != nullptr) {
+ mCodecLooper->stop();
+ mCodecLooper.clear();
+ }
+
+ if (mCallbackLooper != nullptr) {
+ mCallbackLooper->stop();
+ mCallbackLooper.clear();
+ }
+
+ mAsyncNotify.clear();
+ mFormat.clear();
+}
+
+// Return the size of the complete list of app segment, 0 indicates failure
+size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
+ size_t maxSize, size_t *app1SegmentSize) {
+ if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
+ ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
+ __FUNCTION__, appSegmentBuffer, app1SegmentSize);
+ return 0;
+ }
+
+ size_t expectedSize = 0;
+ // First check for EXIF transport header at the end of the buffer
+ const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
+ const struct CameraBlob *blob = (const struct CameraBlob*)(header);
+ if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
+ ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
+ return 0;
+ }
+
+ expectedSize = blob->blobSize;
+ if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
+ ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
+ return 0;
+ }
+
+ uint32_t totalSize = 0;
+
+ // Verify APP1 marker (mandatory)
+ uint8_t app1Marker[] = {0xFF, 0xE1};
+ if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
+ ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
+ appSegmentBuffer[0], appSegmentBuffer[1]);
+ return 0;
+ }
+ totalSize += sizeof(app1Marker);
+
+ uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
+ appSegmentBuffer[totalSize+1];
+ totalSize += app1Size;
+
+ ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
+ __FUNCTION__, expectedSize, app1Size);
+ while (totalSize < expectedSize) {
+ if (appSegmentBuffer[totalSize] != 0xFF ||
+ appSegmentBuffer[totalSize+1] <= 0xE1 ||
+ appSegmentBuffer[totalSize+1] > 0xEF) {
+ // Invalid APPn marker
+ ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
+ appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
+ return 0;
+ }
+ totalSize += 2;
+
+ uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
+ appSegmentBuffer[totalSize+1];
+ totalSize += appnSize;
+ }
+
+ if (totalSize != expectedSize) {
+ ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
+ __FUNCTION__, totalSize, expectedSize);
+ return 0;
+ }
+
+ *app1SegmentSize = app1Size + sizeof(app1Marker);
+ return expectedSize;
+}
+
+int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
+ for (const auto& fn : mFrameNumberMap) {
+ if (timeInUs == ns2us(fn.second)) {
+ return fn.second;
+ }
+ }
+ for (const auto& inputFrame : mPendingInputFrames) {
+ if (timeInUs == ns2us(inputFrame.first)) {
+ return inputFrame.first;
+ }
+ }
+ return -1;
+}
+
+status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
+ const CpuConsumer::LockedBuffer& yuvBuffer,
+ size_t top, size_t left, size_t width, size_t height) {
+ ATRACE_CALL();
+
+ // Get stride information for codecBuffer
+ sp<ABuffer> imageData;
+ if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
+ ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ if (imageData->size() != sizeof(MediaImage2)) {
+ ALOGE("%s: Invalid codec input image size %zu, expected %zu",
+ __FUNCTION__, imageData->size(), sizeof(MediaImage2));
+ return BAD_VALUE;
+ }
+ MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
+ if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
+ imageInfo->mBitDepth != 8 ||
+ imageInfo->mBitDepthAllocated != 8 ||
+ imageInfo->mNumPlanes != 3) {
+ ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
+ "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
+ imageInfo->mType, imageInfo->mBitDepth,
+ imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
+ return BAD_VALUE;
+ }
+
+ ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
+ __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
+ ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
+ __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
+ imageInfo->mPlane[MediaImage2::V].mOffset,
+ imageInfo->mPlane[MediaImage2::U].mRowInc,
+ imageInfo->mPlane[MediaImage2::V].mRowInc,
+ imageInfo->mPlane[MediaImage2::U].mColInc,
+ imageInfo->mPlane[MediaImage2::V].mColInc);
+
+ // Y
+ for (auto row = top; row < top+height; row++) {
+ uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
+ imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
+ memcpy(dst, yuvBuffer.data+row*yuvBuffer.stride+left, width);
+ }
+
+ // U is Cb, V is Cr
+ bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
+ imageInfo->mPlane[MediaImage2::U].mOffset;
+ uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
+ imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
+ imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
+ bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
+ (imageInfo->mPlane[MediaImage2::U].mRowInc ==
+ imageInfo->mPlane[MediaImage2::V].mRowInc) &&
+ (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
+ (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
+ bool isCodecUvPlannar =
+ ((codecUPlaneFirst && codecUvOffsetDiff >=
+ imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
+ ((!codecUPlaneFirst && codecUvOffsetDiff >=
+ imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
+ imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
+ imageInfo->mPlane[MediaImage2::V].mColInc == 1;
+ bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
+
+ if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
+ (codecUPlaneFirst == cameraUPlaneFirst)) {
+ // UV semiplannar
+ // The chrome plane could be either Cb first, or Cr first. Take the
+ // smaller address.
+ uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
+ MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
+ for (auto row = top/2; row < (top+height)/2; row++) {
+ uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
+ imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
+ memcpy(dst, src+row*yuvBuffer.chromaStride+left, width);
+ }
+ } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
+ // U plane
+ for (auto row = top/2; row < (top+height)/2; row++) {
+ uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
+ imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
+ memcpy(dst, yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, width/2);
+ }
+
+ // V plane
+ for (auto row = top/2; row < (top+height)/2; row++) {
+ uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
+ imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
+ memcpy(dst, yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, width/2);
+ }
+ } else {
+ // Convert between semiplannar and plannar
+ uint8_t *dst = codecBuffer->data();
+ for (auto row = top/2; row < (top+height)/2; row++) {
+ for (auto col = left/2; col < (left+width)/2; col++) {
+ // U/Cb
+ int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
+ imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
+ imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
+ int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
+ dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
+
+ // V/Cr
+ dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
+ imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
+ imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
+ srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
+ dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
+ }
+ }
+ }
+ return OK;
+}
+
+size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
+ camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
+ size_t maxAppsSegment = 1;
+ if (entry.count > 0) {
+ maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
+ entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
+ }
+ return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
+}
+
+bool HeicCompositeStream::threadLoop() {
+ int64_t currentTs = INT64_MAX;
+ bool newInputAvailable = false;
+
+ {
+ Mutex::Autolock l(mMutex);
+ if (mErrorState) {
+ // In case we landed in error state, return any pending buffers and
+ // halt all further processing.
+ compilePendingInputLocked();
+ releaseInputFramesLocked(currentTs);
+ return false;
+ }
+
+
+ while (!newInputAvailable) {
+ compilePendingInputLocked();
+ newInputAvailable = getNextReadyInputLocked(¤tTs);
+
+ if (!newInputAvailable) {
+ auto failingFrameNumber = getNextFailingInputLocked(¤tTs);
+ if (failingFrameNumber >= 0) {
+ // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
+ // possible for two internal stream buffers to fail. In such scenario the
+ // composite stream should notify the client about a stream buffer error only
+ // once and this information is kept within 'errorNotified'.
+ // Any present failed input frames will be removed on a subsequent call to
+ // 'releaseInputFramesLocked()'.
+ releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
+ currentTs = INT64_MAX;
+ }
+
+ auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
+ if (ret == TIMED_OUT) {
+ return true;
+ } else if (ret != OK) {
+ ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ return false;
+ }
+ }
+ }
+ }
+
+ auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
+ Mutex::Autolock l(mMutex);
+ if (res != OK) {
+ ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
+ __FUNCTION__, currentTs, strerror(-res), res);
+ mPendingInputFrames[currentTs].error = true;
+ }
+
+ if (mPendingInputFrames[currentTs].error ||
+ (mPendingInputFrames[currentTs].appSegmentWritten &&
+ mPendingInputFrames[currentTs].pendingOutputTiles == 0)) {
+ releaseInputFramesLocked(currentTs);
+ }
+
+ return true;
+}
+
+bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
+ bool res = false;
+ // Buffer errors concerning internal composite streams should not be directly visible to
+ // camera clients. They must only receive a single buffer error with the public composite
+ // stream id.
+ if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
+ (resultExtras.errorStreamId == mMainImageStreamId)) {
+ flagAnErrorFrameNumber(resultExtras.frameNumber);
+ res = true;
+ }
+
+ return res;
+}
+
+void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
+ sp<HeicCompositeStream> parent = mParent.promote();
+ if (parent == nullptr) return;
+
+ switch (msg->what()) {
+ case kWhatCallbackNotify: {
+ int32_t cbID;
+ if (!msg->findInt32("callbackID", &cbID)) {
+ ALOGE("kWhatCallbackNotify: callbackID is expected.");
+ break;
+ }
+
+ ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
+
+ switch (cbID) {
+ case MediaCodec::CB_INPUT_AVAILABLE: {
+ int32_t index;
+ if (!msg->findInt32("index", &index)) {
+ ALOGE("CB_INPUT_AVAILABLE: index is expected.");
+ break;
+ }
+ parent->onHeicInputFrameAvailable(index);
+ break;
+ }
+
+ case MediaCodec::CB_OUTPUT_AVAILABLE: {
+ int32_t index;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ int32_t flags;
+
+ if (!msg->findInt32("index", &index)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
+ break;
+ }
+ if (!msg->findSize("offset", &offset)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
+ break;
+ }
+ if (!msg->findSize("size", &size)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
+ break;
+ }
+ if (!msg->findInt64("timeUs", &timeUs)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
+ break;
+ }
+ if (!msg->findInt32("flags", &flags)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
+ break;
+ }
+
+ CodecOutputBufferInfo bufferInfo = {
+ index,
+ (int32_t)offset,
+ (int32_t)size,
+ timeUs,
+ (uint32_t)flags};
+
+ parent->onHeicOutputFrameAvailable(bufferInfo);
+ break;
+ }
+
+ case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
+ sp<AMessage> format;
+ if (!msg->findMessage("format", &format)) {
+ ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
+ break;
+ }
+
+ parent->onHeicFormatChanged(format);
+ break;
+ }
+
+ case MediaCodec::CB_ERROR: {
+ status_t err;
+ int32_t actionCode;
+ AString detail;
+ if (!msg->findInt32("err", &err)) {
+ ALOGE("CB_ERROR: err is expected.");
+ break;
+ }
+ if (!msg->findInt32("action", &actionCode)) {
+ ALOGE("CB_ERROR: action is expected.");
+ break;
+ }
+ msg->findString("detail", &detail);
+ ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
+ err, actionCode, detail.c_str());
+
+ parent->onHeicCodecError();
+ break;
+ }
+
+ default: {
+ ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
+ break;
+ }
+ }
+ break;
+ }
+
+ default:
+ ALOGE("shouldn't be here");
+ break;
+ }
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
new file mode 100644
index 0000000..0a76256
--- /dev/null
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -0,0 +1,250 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
+#define ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
+
+#include <queue>
+
+#include <gui/IProducerListener.h>
+#include <gui/CpuConsumer.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaMuxer.h>
+
+#include "CompositeStream.h"
+
+namespace android {
+namespace camera3 {
+
+class HeicCompositeStream : public CompositeStream, public Thread,
+ public CpuConsumer::FrameAvailableListener {
+public:
+ HeicCompositeStream(wp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb);
+ ~HeicCompositeStream() override;
+
+ static bool isHeicCompositeStream(const sp<Surface> &surface);
+
+ status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+ camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+ std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
+
+ status_t deleteInternalStreams() override;
+
+ status_t configureStream() override;
+
+ status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
+ int32_t* /*out*/currentStreamId) override;
+
+ void onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
+
+ int getStreamId() override { return mMainImageStreamId; }
+
+ // Use onShutter to keep track of frame number <-> timestamp mapping.
+ void onBufferReleased(const BufferInfo& bufferInfo) override;
+ void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& settings) override;
+
+ // CpuConsumer listener implementation
+ void onFrameAvailable(const BufferItem& item) override;
+
+ // Return stream information about the internal camera streams
+ static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+
+ static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
+ bool* useHeic, bool* useGrid, int64_t* stall);
+ static bool isInMemoryTempFileSupported();
+protected:
+
+ bool threadLoop() override;
+ bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
+ void onResultError(const CaptureResultExtras& /*resultExtras*/) override {}
+
+private:
+ //
+ // HEIC/HEVC Codec related structures, utility functions, and callbacks
+ //
+ struct CodecOutputBufferInfo {
+ int32_t index;
+ int32_t offset;
+ int32_t size;
+ int64_t timeUs;
+ uint32_t flags;
+ };
+
+ struct CodecInputBufferInfo {
+ int32_t index;
+ int64_t timeUs;
+ size_t tileIndex;
+ };
+
+ class CodecCallbackHandler : public AHandler {
+ public:
+ explicit CodecCallbackHandler(wp<HeicCompositeStream> parent) {
+ mParent = parent;
+ }
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+ private:
+ wp<HeicCompositeStream> mParent;
+ };
+
+ enum {
+ kWhatCallbackNotify,
+ };
+
+ bool mUseHeic;
+ sp<MediaCodec> mCodec;
+ sp<ALooper> mCodecLooper, mCallbackLooper;
+ sp<CodecCallbackHandler> mCodecCallbackHandler;
+ sp<AMessage> mAsyncNotify;
+ sp<AMessage> mFormat;
+ size_t mNumOutputTiles;
+
+ int32_t mOutputWidth, mOutputHeight;
+ size_t mMaxHeicBufferSize;
+ int32_t mGridWidth, mGridHeight;
+ size_t mGridRows, mGridCols;
+ bool mUseGrid; // Whether to use framework YUV frame tiling.
+
+ static const int64_t kNoFrameDropMaxPtsGap = -1000000;
+ static const int32_t kNoGridOpRate = 30;
+ static const int32_t kGridOpRate = 120;
+
+ void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo);
+ void onHeicInputFrameAvailable(int32_t index); // Only called for YUV input mode.
+ void onHeicFormatChanged(sp<AMessage>& newFormat);
+ void onHeicCodecError();
+
+ status_t initializeCodec(uint32_t width, uint32_t height,
+ const sp<CameraDeviceBase>& cameraDevice);
+ void deinitCodec();
+
+ //
+ // Composite stream related structures, utility functions and callbacks.
+ //
+ struct InputFrame {
+ int32_t orientation;
+ int32_t quality;
+
+ CpuConsumer::LockedBuffer appSegmentBuffer;
+ std::vector<CodecOutputBufferInfo> codecOutputBuffers;
+
+ // Fields that are only applicable to HEVC tiling.
+ CpuConsumer::LockedBuffer yuvBuffer;
+ std::vector<CodecInputBufferInfo> codecInputBuffers;
+
+ bool error;
+ bool errorNotified;
+ int64_t frameNumber;
+
+ sp<MediaMuxer> muxer;
+ int fenceFd;
+ int fileFd;
+ ssize_t trackIndex;
+ ANativeWindowBuffer *anb;
+
+ bool appSegmentWritten;
+ size_t pendingOutputTiles;
+ size_t codecInputCounter;
+
+ InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false),
+ errorNotified(false), frameNumber(-1), fenceFd(-1), fileFd(-1),
+ trackIndex(-1), anb(nullptr), appSegmentWritten(false),
+ pendingOutputTiles(0), codecInputCounter(0) { }
+ };
+
+ void compilePendingInputLocked();
+ // Find first complete and valid frame with smallest timestamp
+ bool getNextReadyInputLocked(int64_t *currentTs /*out*/);
+ // Find next failing frame number with smallest timestamp and return respective frame number
+ int64_t getNextFailingInputLocked(int64_t *currentTs /*out*/);
+
+ status_t processInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
+ status_t processCodecInputFrame(InputFrame &inputFrame);
+ status_t startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
+ status_t processAppSegment(nsecs_t timestamp, InputFrame &inputFrame);
+ status_t processOneCodecOutputFrame(nsecs_t timestamp, InputFrame &inputFrame);
+ status_t processCompletedInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
+
+ void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
+ void releaseInputFramesLocked(int64_t currentTs);
+
+ size_t findAppSegmentsSize(const uint8_t* appSegmentBuffer, size_t maxSize,
+ size_t* app1SegmentSize);
+ int64_t findTimestampInNsLocked(int64_t timeInUs);
+ status_t copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
+ const CpuConsumer::LockedBuffer& yuvBuffer,
+ size_t top, size_t left, size_t width, size_t height);
+ static size_t calcAppSegmentMaxSize(const CameraMetadata& info);
+
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ static const int32_t kDefaultJpegQuality = 99;
+ static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
+ static const android_dataspace kAppSegmentDataSpace =
+ static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS);
+ static const android_dataspace kHeifDataSpace =
+ static_cast<android_dataspace>(HAL_DATASPACE_HEIF);
+
+ int mAppSegmentStreamId, mAppSegmentSurfaceId;
+ sp<CpuConsumer> mAppSegmentConsumer;
+ sp<Surface> mAppSegmentSurface;
+ bool mAppSegmentBufferAcquired;
+ size_t mAppSegmentMaxSize;
+
+ int mMainImageStreamId, mMainImageSurfaceId;
+ sp<Surface> mMainImageSurface;
+ sp<CpuConsumer> mMainImageConsumer; // Only applicable for HEVC codec.
+ bool mYuvBufferAcquired; // Only applicable to HEVC codec
+
+ sp<Surface> mOutputSurface;
+ sp<ProducerListener> mProducerListener;
+
+
+ // Map from frame number to JPEG setting of orientation+quality
+ std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByFrameNumber;
+ // Map from timestamp to JPEG setting of orientation+quality
+ std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByTimestamp;
+
+ // Keep all incoming APP segment Blob buffer pending further processing.
+ std::vector<int64_t> mInputAppSegmentBuffers;
+
+ // Keep all incoming HEIC blob buffer pending further processing.
+ std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
+ std::queue<int64_t> mCodecOutputBufferTimestamps;
+ size_t mOutputBufferCounter;
+
+ // Keep all incoming Yuv buffer pending tiling and encoding (for HEVC YUV tiling only)
+ std::vector<int64_t> mInputYuvBuffers;
+ // Keep all codec input buffers ready to be filled out (for HEVC YUV tiling only)
+ std::vector<int32_t> mCodecInputBuffers;
+
+ // Artificial strictly incremental YUV grid timestamp to make encoder happy.
+ int64_t mGridTimestampUs;
+
+ // In most common use case, entries are accessed in order.
+ std::map<int64_t, InputFrame> mPendingInputFrames;
+};
+
+}; // namespace camera3
+}; // namespace android
+
+#endif //ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
new file mode 100644
index 0000000..ed9be6e
--- /dev/null
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
@@ -0,0 +1,294 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "HeicEncoderInfoManager"
+//#define LOG_NDEBUG 0
+
+#include <cstdint>
+#include <regex>
+
+#include <cutils/properties.h>
+#include <log/log_main.h>
+#include <system/graphics.h>
+
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/foundation/ABuffer.h>
+
+#include "HeicEncoderInfoManager.h"
+
+namespace android {
+namespace camera3 {
+
+HeicEncoderInfoManager::HeicEncoderInfoManager() :
+ mIsInited(false),
+ mMinSizeHeic(0, 0),
+ mMaxSizeHeic(INT32_MAX, INT32_MAX),
+ mHasHEVC(false),
+ mHasHEIC(false),
+ mDisableGrid(false) {
+ if (initialize() == OK) {
+ mIsInited = true;
+ }
+}
+
+HeicEncoderInfoManager::~HeicEncoderInfoManager() {
+}
+
+bool HeicEncoderInfoManager::isSizeSupported(int32_t width, int32_t height, bool* useHeic,
+ bool* useGrid, int64_t* stall) const {
+ if (useHeic == nullptr || useGrid == nullptr) {
+ ALOGE("%s: invalid parameters: useHeic %p, useGrid %p",
+ __FUNCTION__, useHeic, useGrid);
+ return false;
+ }
+ if (!mIsInited) return false;
+
+ bool chooseHeic = false, enableGrid = true;
+ if (mHasHEIC && width >= mMinSizeHeic.first &&
+ height >= mMinSizeHeic.second && width <= mMaxSizeHeic.first &&
+ height <= mMaxSizeHeic.second) {
+ chooseHeic = true;
+ enableGrid = false;
+ } else if (mHasHEVC) {
+ bool fullSizeSupportedByHevc = (width >= mMinSizeHevc.first &&
+ height >= mMinSizeHevc.second &&
+ width <= mMaxSizeHevc.first &&
+ height <= mMaxSizeHevc.second);
+ if (fullSizeSupportedByHevc && (mDisableGrid ||
+ (width <= 1920 && height <= 1080))) {
+ enableGrid = false;
+ }
+ } else {
+ // No encoder available for the requested size.
+ return false;
+ }
+
+ if (stall != nullptr) {
+ // Find preferred encoder which advertise
+ // "measured-frame-rate-WIDTHxHEIGHT-range" key.
+ const FrameRateMaps& maps =
+ (chooseHeic && mHeicFrameRateMaps.size() > 0) ?
+ mHeicFrameRateMaps : mHevcFrameRateMaps;
+ const auto& closestSize = findClosestSize(maps, width, height);
+ if (closestSize == maps.end()) {
+ // The "measured-frame-rate-WIDTHxHEIGHT-range" key is optional.
+ // Hardcode to some default value (3.33ms * tile count) based on resolution.
+ *stall = 3333333LL * width * height / (kGridWidth * kGridHeight);
+ return true;
+ }
+
+ // Derive stall durations based on average fps of the closest size.
+ constexpr int64_t NSEC_PER_SEC = 1000000000LL;
+ int32_t avgFps = (closestSize->second.first + closestSize->second.second)/2;
+ float ratio = 1.0f * width * height /
+ (closestSize->first.first * closestSize->first.second);
+ *stall = ratio * NSEC_PER_SEC / avgFps;
+ }
+
+ *useHeic = chooseHeic;
+ *useGrid = enableGrid;
+ return true;
+}
+
+status_t HeicEncoderInfoManager::initialize() {
+ mDisableGrid = property_get_bool("camera.heic.disable_grid", false);
+ sp<IMediaCodecList> codecsList = MediaCodecList::getInstance();
+ if (codecsList == nullptr) {
+ // No media codec available.
+ return OK;
+ }
+
+ sp<AMessage> heicDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+ sp<AMessage> hevcDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC);
+
+ if (hevcDetails == nullptr) {
+ if (heicDetails != nullptr) {
+ ALOGE("%s: Device must support HEVC codec if HEIC codec is available!",
+ __FUNCTION__);
+ return BAD_VALUE;
+ }
+ return OK;
+ }
+
+ // Check CQ mode for HEVC codec
+ {
+ AString bitrateModes;
+ auto hasItem = hevcDetails->findString("feature-bitrate-modes", &bitrateModes);
+ if (!hasItem) {
+ ALOGE("%s: Failed to query bitrate modes for HEVC codec", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ ALOGV("%s: HEVC codec's feature-bitrate-modes value is %d, %s",
+ __FUNCTION__, hasItem, bitrateModes.c_str());
+ std::regex pattern("(^|,)CQ($|,)", std::regex_constants::icase);
+ if (!std::regex_search(bitrateModes.c_str(), pattern)) {
+ return OK;
+ }
+ }
+
+ // HEIC size range
+ if (heicDetails != nullptr) {
+ auto res = getCodecSizeRange(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
+ heicDetails, &mMinSizeHeic, &mMaxSizeHeic, &mHeicFrameRateMaps);
+ if (res != OK) {
+ ALOGE("%s: Failed to get HEIC codec size range: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return BAD_VALUE;
+ }
+ mHasHEIC = true;
+ }
+
+ // HEVC size range
+ {
+ auto res = getCodecSizeRange(MEDIA_MIMETYPE_VIDEO_HEVC,
+ hevcDetails, &mMinSizeHevc, &mMaxSizeHevc, &mHevcFrameRateMaps);
+ if (res != OK) {
+ ALOGE("%s: Failed to get HEVC codec size range: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return BAD_VALUE;
+ }
+
+ mHasHEVC = true;
+ }
+
+ return OK;
+}
+
+status_t HeicEncoderInfoManager::getFrameRateMaps(sp<AMessage> details, FrameRateMaps* maps) {
+ if (details == nullptr || maps == nullptr) {
+ ALOGE("%s: Invalid input: details: %p, maps: %p", __FUNCTION__, details.get(), maps);
+ return BAD_VALUE;
+ }
+
+ for (size_t i = 0; i < details->countEntries(); i++) {
+ AMessage::Type type;
+ const char* entryName = details->getEntryNameAt(i, &type);
+ if (type != AMessage::kTypeString) continue;
+ std::regex frameRateNamePattern("measured-frame-rate-([0-9]+)[*x]([0-9]+)-range",
+ std::regex_constants::icase);
+ std::cmatch sizeMatch;
+ if (std::regex_match(entryName, sizeMatch, frameRateNamePattern) &&
+ sizeMatch.size() == 3) {
+ AMessage::ItemData item = details->getEntryAt(i);
+ AString fpsRangeStr;
+ if (item.find(&fpsRangeStr)) {
+ ALOGV("%s: %s", entryName, fpsRangeStr.c_str());
+ std::regex frameRatePattern("([0-9]+)-([0-9]+)");
+ std::cmatch fpsMatch;
+ if (std::regex_match(fpsRangeStr.c_str(), fpsMatch, frameRatePattern) &&
+ fpsMatch.size() == 3) {
+ maps->emplace(
+ std::make_pair(stoi(sizeMatch[1]), stoi(sizeMatch[2])),
+ std::make_pair(stoi(fpsMatch[1]), stoi(fpsMatch[2])));
+ } else {
+ return BAD_VALUE;
+ }
+ }
+ }
+ }
+ return OK;
+}
+
+status_t HeicEncoderInfoManager::getCodecSizeRange(
+ const char* codecName,
+ sp<AMessage> details,
+ std::pair<int32_t, int32_t>* minSize,
+ std::pair<int32_t, int32_t>* maxSize,
+ FrameRateMaps* frameRateMaps) {
+ if (codecName == nullptr || minSize == nullptr || maxSize == nullptr ||
+ details == nullptr || frameRateMaps == nullptr) {
+ return BAD_VALUE;
+ }
+
+ AString sizeRange;
+ auto hasItem = details->findString("size-range", &sizeRange);
+ if (!hasItem) {
+ ALOGE("%s: Failed to query size range for codec %s", __FUNCTION__, codecName);
+ return BAD_VALUE;
+ }
+ ALOGV("%s: %s codec's size range is %s", __FUNCTION__, codecName, sizeRange.c_str());
+ std::regex pattern("([0-9]+)[*x]([0-9]+)-([0-9]+)[*x]([0-9]+)");
+ std::cmatch match;
+ if (std::regex_match(sizeRange.c_str(), match, pattern)) {
+ if (match.size() == 5) {
+ minSize->first = stoi(match[1]);
+ minSize->second = stoi(match[2]);
+ maxSize->first = stoi(match[3]);
+ maxSize->second = stoi(match[4]);
+ if (minSize->first > maxSize->first ||
+ minSize->second > maxSize->second) {
+ ALOGE("%s: Invalid %s code size range: %s",
+ __FUNCTION__, codecName, sizeRange.c_str());
+ return BAD_VALUE;
+ }
+ } else {
+ return BAD_VALUE;
+ }
+ }
+
+ auto res = getFrameRateMaps(details, frameRateMaps);
+ if (res != OK) {
+ return res;
+ }
+
+ return OK;
+}
+
+HeicEncoderInfoManager::FrameRateMaps::const_iterator HeicEncoderInfoManager::findClosestSize(
+ const FrameRateMaps& maps, int32_t width, int32_t height) const {
+ int32_t minDiff = INT32_MAX;
+ FrameRateMaps::const_iterator closestIter = maps.begin();
+ for (auto iter = maps.begin(); iter != maps.end(); iter++) {
+ // Use area difference between the sizes to approximate size
+ // difference.
+ int32_t diff = abs(iter->first.first * iter->first.second - width * height);
+ if (diff < minDiff) {
+ closestIter = iter;
+ minDiff = diff;
+ }
+ }
+ return closestIter;
+}
+
+sp<AMessage> HeicEncoderInfoManager::getCodecDetails(
+ sp<IMediaCodecList> codecsList, const char* name) {
+ ssize_t idx = codecsList->findCodecByType(name, true /*encoder*/);
+ if (idx < 0) {
+ return nullptr;
+ }
+
+ const sp<MediaCodecInfo> info = codecsList->getCodecInfo(idx);
+ if (info == nullptr) {
+ ALOGE("%s: Failed to get codec info for %s", __FUNCTION__, name);
+ return nullptr;
+ }
+ const sp<MediaCodecInfo::Capabilities> caps =
+ info->getCapabilitiesFor(name);
+ if (caps == nullptr) {
+ ALOGE("%s: Failed to get capabilities for codec %s", __FUNCTION__, name);
+ return nullptr;
+ }
+ const sp<AMessage> details = caps->getDetails();
+ if (details == nullptr) {
+ ALOGE("%s: Failed to get details for codec %s", __FUNCTION__, name);
+ return nullptr;
+ }
+
+ return details;
+}
+} //namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
new file mode 100644
index 0000000..fb0b914
--- /dev/null
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H
+#define ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H
+
+#include <unordered_map>
+#include <utility>
+#include <utils/Errors.h>
+#include <utils/StrongPointer.h>
+
+#include <media/IMediaCodecList.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+namespace camera3 {
+
+class HeicEncoderInfoManager {
+public:
+ static HeicEncoderInfoManager& getInstance() {
+ static HeicEncoderInfoManager instance;
+ return instance;
+ }
+
+ bool isSizeSupported(int32_t width, int32_t height,
+ bool* useHeic, bool* useGrid, int64_t* stall) const;
+
+ static const auto kGridWidth = 512;
+ static const auto kGridHeight = 512;
+private:
+ struct SizePairHash {
+ std::size_t operator () (const std::pair<int32_t,int32_t> &p) const {
+ return p.first * 31 + p.second;
+ }
+ };
+
+ typedef std::unordered_map<std::pair<int32_t, int32_t>,
+ std::pair<int32_t, int32_t>, SizePairHash> FrameRateMaps;
+
+ HeicEncoderInfoManager();
+ virtual ~HeicEncoderInfoManager();
+
+ status_t initialize();
+ status_t getFrameRateMaps(sp<AMessage> details, FrameRateMaps* maps);
+ status_t getCodecSizeRange(const char* codecName, sp<AMessage> details,
+ std::pair<int32_t, int32_t>* minSize, std::pair<int32_t, int32_t>* maxSize,
+ FrameRateMaps* frameRateMaps);
+ FrameRateMaps::const_iterator findClosestSize(const FrameRateMaps& maps,
+ int32_t width, int32_t height) const;
+ sp<AMessage> getCodecDetails(sp<IMediaCodecList> codecsList, const char* name);
+
+ bool mIsInited;
+ std::pair<int32_t, int32_t> mMinSizeHeic, mMaxSizeHeic;
+ std::pair<int32_t, int32_t> mMinSizeHevc, mMaxSizeHevc;
+ bool mHasHEVC, mHasHEIC;
+ FrameRateMaps mHeicFrameRateMaps, mHevcFrameRateMaps;
+ bool mDisableGrid;
+
+};
+
+} // namespace camera3
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H