Move Codec2-related code from hardware/google/av
Test: None
Bug: 112362730
Change-Id: Ie2f8ff431d65c40333f267ab9877d47089adeea4
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
new file mode 100644
index 0000000..d883951
--- /dev/null
+++ b/media/codec2/components/avc/Android.bp
@@ -0,0 +1,37 @@
+cc_library_shared {
+ name: "libstagefright_soft_c2avcdec",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+ static_libs: ["libavcdec"],
+
+ srcs: ["C2SoftAvcDec.cpp"],
+
+ include_dirs: [
+ "external/libavc/decoder",
+ "external/libavc/common",
+ ],
+}
+
+cc_library_shared {
+ name: "libstagefright_soft_c2avcenc",
+ defaults: [
+ "libstagefright_soft_c2-defaults",
+ "libstagefright_soft_c2_sanitize_signed-defaults",
+ ],
+
+ static_libs: ["libavcenc"],
+
+ srcs: ["C2SoftAvcEnc.cpp"],
+
+ include_dirs: [
+ "external/libavc/encoder",
+ "external/libavc/common",
+ ],
+
+ cflags: [
+ "-Wno-unused-variable",
+ ],
+}
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
new file mode 100644
index 0000000..3e62744
--- /dev/null
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -0,0 +1,978 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAvcDec"
+#include <log/log.h>
+
+#include <media/stagefright/foundation/MediaDefs.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2Mapper.h>
+#include <SimpleC2Interface.h>
+
+#include "C2SoftAvcDec.h"
+#include "ih264d.h"
+
+namespace android {
+
+namespace {
+
+constexpr char COMPONENT_NAME[] = "c2.android.avc.decoder";
+
+} // namespace
+
+class C2SoftAvcDec::IntfImpl : public SimpleInterface<void>::BaseParams {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : SimpleInterface<void>::BaseParams(
+ helper,
+ COMPONENT_NAME,
+ C2Component::KIND_DECODER,
+ C2Component::DOMAIN_VIDEO,
+ MEDIA_MIMETYPE_VIDEO_AVC) {
+ noPrivateBuffers(); // TODO: account for our buffers here
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
+
+ // TODO: output latency and reordering
+
+ addParameter(
+ DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
+ .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL))
+ .build());
+
+ // coded and output picture size is the same for this codec
+ addParameter(
+ DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
+ .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 4080, 2),
+ C2F(mSize, height).inRange(2, 4080, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE)
+ .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 4080, 2),
+ C2F(mSize, height).inRange(2, 4080, 2),
+ })
+ .withSetter(MaxPictureSizeSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::input(0u,
+ C2Config::PROFILE_AVC_CONSTRAINED_BASELINE, C2Config::LEVEL_AVC_5_2))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ C2Config::PROFILE_AVC_CONSTRAINED_BASELINE,
+ C2Config::PROFILE_AVC_BASELINE,
+ C2Config::PROFILE_AVC_MAIN,
+ C2Config::PROFILE_AVC_CONSTRAINED_HIGH,
+ C2Config::PROFILE_AVC_PROGRESSIVE_HIGH,
+ C2Config::PROFILE_AVC_HIGH}),
+ C2F(mProfileLevel, level).oneOf({
+ C2Config::LEVEL_AVC_1, C2Config::LEVEL_AVC_1B, C2Config::LEVEL_AVC_1_1,
+ C2Config::LEVEL_AVC_1_2, C2Config::LEVEL_AVC_1_3,
+ C2Config::LEVEL_AVC_2, C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2,
+ C2Config::LEVEL_AVC_3, C2Config::LEVEL_AVC_3_1, C2Config::LEVEL_AVC_3_2,
+ C2Config::LEVEL_AVC_4, C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2,
+ C2Config::LEVEL_AVC_5, C2Config::LEVEL_AVC_5_1, C2Config::LEVEL_AVC_5_2
+ })
+ })
+ .withSetter(ProfileLevelSetter, mSize)
+ .build());
+
+ addParameter(
+ DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE)
+ .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 4))
+ .withFields({
+ C2F(mMaxInputSize, value).any(),
+ })
+ .calculatedAs(MaxInputSizeSetter, mMaxSize)
+ .build());
+
+ C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() };
+ std::shared_ptr<C2StreamColorInfo::output> defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ memcpy(defaultColorInfo->m.locations, locations, sizeof(locations));
+
+ defaultColorInfo =
+ C2StreamColorInfo::output::AllocShared(
+ { C2ChromaOffsetStruct::ITU_YUV_420_0() },
+ 0u, 8u /* bitDepth */, C2Color::YUV_420);
+ helper->addStructDescriptors<C2ChromaOffsetStruct>();
+
+ addParameter(
+ DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO)
+ .withConstValue(defaultColorInfo)
+ .build());
+
+ addParameter(
+ DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsTuning::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mDefaultColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mDefaultColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mDefaultColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mDefaultColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(DefaultColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::input(
+ 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mCodedColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mCodedColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mCodedColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mCodedColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(CodedColorAspectsSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
+ .withDefault(new C2StreamColorAspectsInfo::output(
+ 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
+ .withFields({
+ C2F(mColorAspects, range).inRange(
+ C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
+ C2F(mColorAspects, primaries).inRange(
+ C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
+ C2F(mColorAspects, transfer).inRange(
+ C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
+ C2F(mColorAspects, matrix).inRange(
+ C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
+ })
+ .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects)
+ .build());
+
+ // TODO: support more formats?
+ addParameter(
+ DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT)
+ .withConstValue(new C2StreamPixelFormatInfo::output(
+ 0u, HAL_PIXEL_FORMAT_YCBCR_420_888))
+ .build());
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::output> &oldMe,
+ C2P<C2VideoSizeStreamInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R MaxPictureSizeSetter(bool mayBlock, C2P<C2StreamMaxPictureSizeTuning::output> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ // TODO: get max width/height from the size's field helpers vs. hardcoding
+ me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4080u);
+ me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4080u);
+ return C2R::Ok();
+ }
+
+ static C2R MaxInputSizeSetter(bool mayBlock, C2P<C2StreamMaxBufferSizeInfo::input> &me,
+ const C2P<C2StreamMaxPictureSizeTuning::output> &maxSize) {
+ (void)mayBlock;
+ // assume compression ratio of 2
+ me.set().value = (((maxSize.v.width + 15) / 16) * ((maxSize.v.height + 15) / 16) * 192);
+ return C2R::Ok();
+ }
+
+ static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::input> &me,
+ const C2P<C2StreamPictureSizeInfo::output> &size) {
+ (void)mayBlock;
+ (void)size;
+ (void)me; // TODO: validate
+ return C2R::Ok();
+ }
+
+ static C2R DefaultColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsTuning::output> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
+ (void)mayBlock;
+ if (me.v.range > C2Color::RANGE_OTHER) {
+ me.set().range = C2Color::RANGE_OTHER;
+ }
+ if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
+ me.set().primaries = C2Color::PRIMARIES_OTHER;
+ }
+ if (me.v.transfer > C2Color::TRANSFER_OTHER) {
+ me.set().transfer = C2Color::TRANSFER_OTHER;
+ }
+ if (me.v.matrix > C2Color::MATRIX_OTHER) {
+ me.set().matrix = C2Color::MATRIX_OTHER;
+ }
+ return C2R::Ok();
+ }
+
+ static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
+ const C2P<C2StreamColorAspectsTuning::output> &def,
+ const C2P<C2StreamColorAspectsInfo::input> &coded) {
+ (void)mayBlock;
+ // take default values for all unspecified fields, and coded values for specified ones
+ me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range;
+ me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED
+ ? def.v.primaries : coded.v.primaries;
+ me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED
+ ? def.v.transfer : coded.v.transfer;
+ me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix;
+ return C2R::Ok();
+ }
+
+ std::shared_ptr<C2StreamColorAspectsInfo::output> getColorAspects_l() {
+ return mColorAspects;
+ }
+
+private:
+ std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
+ std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
+ std::shared_ptr<C2StreamMaxPictureSizeTuning::output> mMaxSize;
+ std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mMaxInputSize;
+ std::shared_ptr<C2StreamColorInfo::output> mColorInfo;
+ std::shared_ptr<C2StreamColorAspectsInfo::input> mCodedColorAspects;
+ std::shared_ptr<C2StreamColorAspectsTuning::output> mDefaultColorAspects;
+ std::shared_ptr<C2StreamColorAspectsInfo::output> mColorAspects;
+ std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormat;
+};
+
+static size_t getCpuCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
+ (void) ctxt;
+ return memalign(alignment, size);
+}
+
+static void ivd_aligned_free(void *ctxt, void *mem) {
+ (void) ctxt;
+ free(mem);
+}
+
+C2SoftAvcDec::C2SoftAvcDec(
+ const char *name,
+ c2_node_id_t id,
+ const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mDecHandle(nullptr),
+ mOutBufferFlush(nullptr),
+ mIvColorFormat(IV_YUV_420P),
+ mWidth(320),
+ mHeight(240),
+ mHeaderDecoded(false) {
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+}
+
+C2SoftAvcDec::~C2SoftAvcDec() {
+ onRelease();
+}
+
+c2_status_t C2SoftAvcDec::onInit() {
+ status_t err = initDecoder();
+ return err == OK ? C2_OK : C2_CORRUPTED;
+}
+
+c2_status_t C2SoftAvcDec::onStop() {
+ if (OK != resetDecoder()) return C2_CORRUPTED;
+ resetPlugin();
+ return C2_OK;
+}
+
+void C2SoftAvcDec::onReset() {
+ (void) onStop();
+}
+
+void C2SoftAvcDec::onRelease() {
+ (void) deleteDecoder();
+ if (mOutBufferFlush) {
+ ivd_aligned_free(nullptr, mOutBufferFlush);
+ mOutBufferFlush = nullptr;
+ }
+ if (mOutBlock) {
+ mOutBlock.reset();
+ }
+}
+
+c2_status_t C2SoftAvcDec::onFlush_sm() {
+ if (OK != setFlushMode()) return C2_CORRUPTED;
+
+ uint32_t bufferSize = mStride * mHeight * 3 / 2;
+ mOutBufferFlush = (uint8_t *)ivd_aligned_malloc(nullptr, 128, bufferSize);
+ if (!mOutBufferFlush) {
+ ALOGE("could not allocate tmp output buffer (for flush) of size %u ", bufferSize);
+ return C2_NO_MEMORY;
+ }
+
+ while (true) {
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+
+ setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ if (0 == s_decode_op.u4_output_present) {
+ resetPlugin();
+ break;
+ }
+ }
+
+ if (mOutBufferFlush) {
+ ivd_aligned_free(nullptr, mOutBufferFlush);
+ mOutBufferFlush = nullptr;
+ }
+
+ return C2_OK;
+}
+
+status_t C2SoftAvcDec::createDecoder() {
+ ivdext_create_ip_t s_create_ip;
+ ivdext_create_op_t s_create_op;
+
+ s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+ s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
+ s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
+ s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
+ s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = nullptr;
+ s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(nullptr,
+ &s_create_ip,
+ &s_create_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__,
+ s_create_op.s_ivd_create_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mDecHandle = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle;
+ mDecHandle->pv_fxns = (void *)ivdec_api_function;
+ mDecHandle->u4_size = sizeof(iv_obj_t);
+
+ return OK;
+}
+
+status_t C2SoftAvcDec::setNumCores() {
+ ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip;
+ ivdext_ctl_set_num_cores_op_t s_set_num_cores_op;
+
+ s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+ s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_num_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+ s_set_num_cores_ip.u4_num_cores = mNumCores;
+ s_set_num_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_num_cores_ip,
+ &s_set_num_cores_op);
+ if (IV_SUCCESS != status) {
+ ALOGD("error in %s: 0x%x", __func__, s_set_num_cores_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftAvcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) {
+ ivd_ctl_set_config_ip_t s_set_dyn_params_ip;
+ ivd_ctl_set_config_op_t s_set_dyn_params_op;
+
+ s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+ s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride;
+ s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+ s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ s_set_dyn_params_ip.e_vid_dec_mode = dec_mode;
+ s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_dyn_params_ip,
+ &s_set_dyn_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+void C2SoftAvcDec::getVersion() {
+ ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip;
+ ivd_ctl_getversioninfo_op_t s_get_versioninfo_op;
+ UWORD8 au1_buf[512];
+
+ s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+ s_get_versioninfo_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_get_versioninfo_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+ s_get_versioninfo_ip.pv_version_buffer = au1_buf;
+ s_get_versioninfo_ip.u4_version_buffer_size = sizeof(au1_buf);
+ s_get_versioninfo_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_get_versioninfo_ip,
+ &s_get_versioninfo_op);
+ if (status != IV_SUCCESS) {
+ ALOGD("error in %s: 0x%x", __func__,
+ s_get_versioninfo_op.u4_error_code);
+ } else {
+ ALOGV("ittiam decoder version number: %s",
+ (char *) s_get_versioninfo_ip.pv_version_buffer);
+ }
+}
+
+status_t C2SoftAvcDec::initDecoder() {
+ if (OK != createDecoder()) return UNKNOWN_ERROR;
+ mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
+ mStride = ALIGN64(mWidth);
+ mSignalledError = false;
+ resetPlugin();
+ (void) setNumCores();
+ if (OK != setParams(mStride, IVD_DECODE_FRAME)) return UNKNOWN_ERROR;
+ (void) getVersion();
+
+ return OK;
+}
+
+bool C2SoftAvcDec::setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip,
+ ivd_video_decode_op_t *ps_decode_op,
+ C2ReadView *inBuffer,
+ C2GraphicView *outBuffer,
+ size_t inOffset,
+ size_t inSize,
+ uint32_t tsMarker) {
+ uint32_t displayStride = mStride;
+ uint32_t displayHeight = mHeight;
+ size_t lumaSize = displayStride * displayHeight;
+ size_t chromaSize = lumaSize >> 2;
+
+ ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
+ if (inBuffer) {
+ ps_decode_ip->u4_ts = tsMarker;
+ ps_decode_ip->pv_stream_buffer = const_cast<uint8_t *>(inBuffer->data() + inOffset);
+ ps_decode_ip->u4_num_Bytes = inSize;
+ } else {
+ ps_decode_ip->u4_ts = 0;
+ ps_decode_ip->pv_stream_buffer = nullptr;
+ ps_decode_ip->u4_num_Bytes = 0;
+ }
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[0] = lumaSize;
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize;
+ ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize;
+ if (outBuffer) {
+ if (outBuffer->width() < displayStride || outBuffer->height() < displayHeight) {
+ ALOGE("Output buffer too small: provided (%dx%d) required (%ux%u)",
+ outBuffer->width(), outBuffer->height(), displayStride, displayHeight);
+ return false;
+ }
+ ps_decode_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[C2PlanarLayout::PLANE_Y];
+ ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_U];
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V];
+ } else {
+ ps_decode_ip->s_out_buffer.pu1_bufs[0] = mOutBufferFlush;
+ ps_decode_ip->s_out_buffer.pu1_bufs[1] = mOutBufferFlush + lumaSize;
+ ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize;
+ }
+ ps_decode_ip->s_out_buffer.u4_num_bufs = 3;
+ ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t);
+
+ return true;
+}
+
+bool C2SoftAvcDec::getVuiParams() {
+ ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip;
+ ivdext_ctl_get_vui_params_op_t s_get_vui_params_op;
+
+ s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t);
+ s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_get_vui_params_ip.e_sub_cmd =
+ (IVD_CONTROL_API_COMMAND_TYPE_T) IH264D_CMD_CTL_GET_VUI_PARAMS;
+ s_get_vui_params_op.u4_size = sizeof(ivdext_ctl_get_vui_params_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_get_vui_params_ip,
+ &s_get_vui_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGD("error in %s: 0x%x", __func__, s_get_vui_params_op.u4_error_code);
+ return false;
+ }
+
+ VuiColorAspects vuiColorAspects;
+ vuiColorAspects.primaries = s_get_vui_params_op.u1_colour_primaries;
+ vuiColorAspects.transfer = s_get_vui_params_op.u1_tfr_chars;
+ vuiColorAspects.coeffs = s_get_vui_params_op.u1_matrix_coeffs;
+ vuiColorAspects.fullRange = s_get_vui_params_op.u1_video_full_range_flag;
+
+ // convert vui aspects to C2 values if changed
+ if (!(vuiColorAspects == mBitstreamColorAspects)) {
+ mBitstreamColorAspects = vuiColorAspects;
+ ColorAspects sfAspects;
+ C2StreamColorAspectsInfo::input codedAspects = { 0u };
+ ColorUtils::convertIsoColorAspectsToCodecAspects(
+ vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs,
+ vuiColorAspects.fullRange, sfAspects);
+ if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) {
+ codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) {
+ codedAspects.range = C2Color::RANGE_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) {
+ codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED;
+ }
+ if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) {
+ codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED;
+ }
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ (void)mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures);
+ }
+ return true;
+}
+
+status_t C2SoftAvcDec::setFlushMode() {
+ ivd_ctl_flush_ip_t s_set_flush_ip;
+ ivd_ctl_flush_op_t s_set_flush_op;
+
+ s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+ s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+ s_set_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_set_flush_ip,
+ &s_set_flush_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__, s_set_flush_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t C2SoftAvcDec::resetDecoder() {
+ ivd_ctl_reset_ip_t s_reset_ip;
+ ivd_ctl_reset_op_t s_reset_op;
+
+ s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+ s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_reset_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+ s_reset_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_reset_ip,
+ &s_reset_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("error in %s: 0x%x", __func__, s_reset_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mStride = 0;
+ (void) setNumCores();
+ mSignalledError = false;
+ mHeaderDecoded = false;
+
+ return OK;
+}
+
+void C2SoftAvcDec::resetPlugin() {
+ mSignalledOutputEos = false;
+ gettimeofday(&mTimeStart, nullptr);
+ gettimeofday(&mTimeEnd, nullptr);
+}
+
+status_t C2SoftAvcDec::deleteDecoder() {
+ if (mDecHandle) {
+ ivdext_delete_ip_t s_delete_ip;
+ ivdext_delete_op_t s_delete_op;
+
+ s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
+ s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
+ s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t);
+ IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle,
+ &s_delete_ip,
+ &s_delete_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("error in %s: 0x%x", __func__,
+ s_delete_op.s_ivd_delete_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mDecHandle = nullptr;
+ }
+
+ return OK;
+}
+
+static void fillEmptyWork(const std::unique_ptr<C2Work> &work) {
+ uint32_t flags = 0;
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
+ ALOGV("signalling eos");
+ }
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+}
+
+void C2SoftAvcDec::finishWork(uint64_t index, const std::unique_ptr<C2Work> &work) {
+ std::shared_ptr<C2Buffer> buffer = createGraphicBuffer(std::move(mOutBlock),
+ C2Rect(mWidth, mHeight));
+ mOutBlock = nullptr;
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ buffer->setInfo(mIntf->getColorAspects_l());
+ }
+
+ auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
+ work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
+ work->worklets.front()->output.buffers.clear();
+ work->worklets.front()->output.buffers.push_back(buffer);
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->workletsProcessed = 1u;
+ };
+ if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) {
+ fillWork(work);
+ } else {
+ finish(index, fillWork);
+ }
+}
+
+c2_status_t C2SoftAvcDec::ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool) {
+ if (!mDecHandle) {
+ ALOGE("not supposed to be here, invalid decoder context");
+ return C2_CORRUPTED;
+ }
+ if (mStride != ALIGN64(mWidth)) {
+ mStride = ALIGN64(mWidth);
+ if (OK != setParams(mStride, IVD_DECODE_FRAME)) return C2_CORRUPTED;
+ }
+ if (mOutBlock &&
+ (mOutBlock->width() != mStride || mOutBlock->height() != mHeight)) {
+ mOutBlock.reset();
+ }
+ if (!mOutBlock) {
+ uint32_t format = HAL_PIXEL_FORMAT_YV12;
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ c2_status_t err = pool->fetchGraphicBlock(mStride, mHeight, format, usage, &mOutBlock);
+ if (err != C2_OK) {
+ ALOGE("fetchGraphicBlock for Output failed with status %d", err);
+ return err;
+ }
+ ALOGV("provided (%dx%d) required (%dx%d)",
+ mOutBlock->width(), mOutBlock->height(), mStride, mHeight);
+ }
+
+ return C2_OK;
+}
+
+// TODO: can overall error checking be improved?
+// TODO: allow configuration of color format and usage for graphic buffers instead
+// of hard coding them to HAL_PIXEL_FORMAT_YV12
+// TODO: pass coloraspects information to surface
+// TODO: test support for dynamic change in resolution
+// TODO: verify if the decoder sent back all frames
+void C2SoftAvcDec::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 0u;
+ work->worklets.front()->output.flags = work->input.flags;
+ if (mSignalledError || mSignalledOutputEos) {
+ work->result = C2_BAD_VALUE;
+ return;
+ }
+
+ size_t inOffset = 0u;
+ size_t inSize = 0u;
+ uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF;
+ C2ReadView rView = mDummyReadView;
+ if (!work->input.buffers.empty()) {
+ rView = work->input.buffers[0]->data().linearBlocks().front().map().get();
+ inSize = rView.capacity();
+ if (inSize && rView.error()) {
+ ALOGE("read view map failed %d", rView.error());
+ work->result = rView.error();
+ return;
+ }
+ }
+ bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0);
+ bool hasPicture = false;
+
+ ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x",
+ inSize, (int)work->input.ordinal.timestamp.peeku(),
+ (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
+ size_t inPos = 0;
+ while (inPos < inSize) {
+ if (C2_OK != ensureDecoderState(pool)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+ {
+ C2GraphicView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+ if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView,
+ inOffset + inPos, inSize - inPos, workIndex)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+
+ if (false == mHeaderDecoded) {
+ /* Decode header and get dimensions */
+ setParams(mStride, IVD_DECODE_HEADER);
+ }
+
+ WORD32 delay;
+ GETTIME(&mTimeStart, nullptr);
+ TIME_DIFF(mTimeEnd, mTimeStart, delay);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ WORD32 decodeTime;
+ GETTIME(&mTimeEnd, nullptr);
+ TIME_DIFF(mTimeStart, mTimeEnd, decodeTime);
+ ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay,
+ s_decode_op.u4_num_bytes_consumed);
+ }
+ if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGE("allocation failure in decoder");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGE("unsupported resolution : %dx%d", mWidth, mHeight);
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & 0xFF)) {
+ ALOGV("resolution changed");
+ drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work);
+ resetDecoder();
+ resetPlugin();
+ work->workletsProcessed = 0u;
+
+ /* Decode header and get new dimensions */
+ setParams(mStride, IVD_DECODE_HEADER);
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ }
+ if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
+ if (mHeaderDecoded == false) {
+ mHeaderDecoded = true;
+ setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+ }
+ if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
+ mWidth = s_decode_op.u4_pic_wd;
+ mHeight = s_decode_op.u4_pic_ht;
+ CHECK_EQ(0u, s_decode_op.u4_output_present);
+
+ C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures);
+ if (err == OK) {
+ work->worklets.front()->output.configUpdate.push_back(
+ C2Param::Copy(size));
+ } else {
+ ALOGE("Cannot set width and height");
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ continue;
+ }
+ }
+ (void)getVuiParams();
+ hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag);
+ if (s_decode_op.u4_output_present) {
+ finishWork(s_decode_op.u4_ts, work);
+ }
+ if (0 == s_decode_op.u4_num_bytes_consumed) {
+ ALOGD("Bytes consumed is zero. Ignoring remaining bytes");
+ break;
+ }
+ inPos += s_decode_op.u4_num_bytes_consumed;
+ if (hasPicture && (inSize - inPos)) {
+ ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d",
+ (int)inSize - (int)inPos);
+ break;
+ }
+ }
+ if (eos) {
+ drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
+ mSignalledOutputEos = true;
+ } else if (!hasPicture) {
+ fillEmptyWork(work);
+ }
+}
+
+c2_status_t C2SoftAvcDec::drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work) {
+ if (drainMode == NO_DRAIN) {
+ ALOGW("drain with NO_DRAIN: no-op");
+ return C2_OK;
+ }
+ if (drainMode == DRAIN_CHAIN) {
+ ALOGW("DRAIN_CHAIN not supported");
+ return C2_OMITTED;
+ }
+
+ if (OK != setFlushMode()) return C2_CORRUPTED;
+ while (true) {
+ if (C2_OK != ensureDecoderState(pool)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return C2_CORRUPTED;
+ }
+ C2GraphicView wView = mOutBlock->map().get();
+ if (wView.error()) {
+ ALOGE("graphic view map failed %d", wView.error());
+ return C2_CORRUPTED;
+ }
+ ivd_video_decode_ip_t s_decode_ip;
+ ivd_video_decode_op_t s_decode_op;
+ if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) {
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ return C2_CORRUPTED;
+ }
+ (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op);
+ if (s_decode_op.u4_output_present) {
+ finishWork(s_decode_op.u4_ts, work);
+ } else {
+ fillEmptyWork(work);
+ break;
+ }
+ }
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcDec::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ return drainInternal(drainMode, pool, nullptr);
+}
+
+class C2SoftAvcDecFactory : public C2ComponentFactory {
+public:
+ C2SoftAvcDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAvcDec(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftAvcDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAvcDec::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftAvcDec::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAvcDecFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAvcDecFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h
new file mode 100644
index 0000000..2127a93
--- /dev/null
+++ b/media/codec2/components/avc/C2SoftAvcDec.h
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AVC_DEC_H_
+#define ANDROID_C2_SOFT_AVC_DEC_H_
+
+#include <sys/time.h>
+
+#include <media/stagefright/foundation/ColorUtils.h>
+
+#include <SimpleC2Component.h>
+
+#include "ih264_typedefs.h"
+#include "iv.h"
+#include "ivd.h"
+
+namespace android {
+
+#define ivdec_api_function ih264d_api_function
+#define ivdext_create_ip_t ih264d_create_ip_t
+#define ivdext_create_op_t ih264d_create_op_t
+#define ivdext_delete_ip_t ih264d_delete_ip_t
+#define ivdext_delete_op_t ih264d_delete_op_t
+#define ivdext_ctl_set_num_cores_ip_t ih264d_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t ih264d_ctl_set_num_cores_op_t
+#define ivdext_ctl_get_vui_params_ip_t ih264d_ctl_get_vui_params_ip_t
+#define ivdext_ctl_get_vui_params_op_t ih264d_ctl_get_vui_params_op_t
+#define ALIGN64(x) ((((x) + 63) >> 6) << 6)
+#define MAX_NUM_CORES 4
+#define IVDEXT_CMD_CTL_SET_NUM_CORES \
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
+#define MIN(a, b) (((a) < (b)) ? (a) : (b))
+#define GETTIME(a, b) gettimeofday(a, b);
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+#ifdef FILE_DUMP_ENABLE
+ #define INPUT_DUMP_PATH "/sdcard/clips/avcd_input"
+ #define INPUT_DUMP_EXT "h264"
+ #define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+ }
+ #define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+ }
+ #define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset)\
+ { \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL && m_offset == 0) { \
+ int i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (int) m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ } else if (fp == NULL) { \
+ ALOGD("Could not write to file %s", m_filename);\
+ } \
+ if (fp) { \
+ fclose(fp); \
+ } \
+ }
+#else /* FILE_DUMP_ENABLE */
+ #define INPUT_DUMP_PATH
+ #define INPUT_DUMP_EXT
+ #define OUTPUT_DUMP_PATH
+ #define OUTPUT_DUMP_EXT
+ #define GENERATE_FILE_NAMES()
+ #define CREATE_DUMP_FILE(m_filename)
+ #define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset)
+#endif /* FILE_DUMP_ENABLE */
+
+
+class C2SoftAvcDec : public SimpleC2Component {
+public:
+ class IntfImpl;
+ C2SoftAvcDec(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+ virtual ~C2SoftAvcDec();
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+private:
+ status_t createDecoder();
+ status_t setNumCores();
+ status_t setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode);
+ void getVersion();
+ status_t initDecoder();
+ bool setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip,
+ ivd_video_decode_op_t *ps_decode_op,
+ C2ReadView *inBuffer,
+ C2GraphicView *outBuffer,
+ size_t inOffset,
+ size_t inSize,
+ uint32_t tsMarker);
+ bool getVuiParams();
+ c2_status_t ensureDecoderState(const std::shared_ptr<C2BlockPool> &pool);
+ void finishWork(uint64_t index, const std::unique_ptr<C2Work> &work);
+ status_t setFlushMode();
+ c2_status_t drainInternal(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool,
+ const std::unique_ptr<C2Work> &work);
+ status_t resetDecoder();
+ void resetPlugin();
+ status_t deleteDecoder();
+
+ std::shared_ptr<IntfImpl> mIntf;
+
+ // TODO:This is not the right place for this enum. These should
+ // be part of c2-vndk so that they can be accessed by all video plugins
+ // until then, make them feel at home
+ enum {
+ kNotSupported,
+ kPreferBitstream,
+ kPreferContainer,
+ };
+
+ iv_obj_t *mDecHandle;
+ std::shared_ptr<C2GraphicBlock> mOutBlock;
+ uint8_t *mOutBufferFlush;
+
+ size_t mNumCores;
+ IV_COLOR_FORMAT_T mIvColorFormat;
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint32_t mStride;
+ bool mSignalledOutputEos;
+ bool mSignalledError;
+ bool mHeaderDecoded;
+ // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid
+ // converting them to C2 values for each frame
+ struct VuiColorAspects {
+ uint8_t primaries;
+ uint8_t transfer;
+ uint8_t coeffs;
+ uint8_t fullRange;
+
+ // default color aspects
+ VuiColorAspects()
+ : primaries(2), transfer(2), coeffs(2), fullRange(0) { }
+
+ bool operator==(const VuiColorAspects &o) {
+ return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs
+ && fullRange == o.fullRange;
+ }
+ } mBitstreamColorAspects;
+
+ // profile
+ struct timeval mTimeStart;
+ struct timeval mTimeEnd;
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ C2_DO_NOT_COPY(C2SoftAvcDec);
+};
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AVC_DEC_H_
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
new file mode 100644
index 0000000..ee5cf27
--- /dev/null
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -0,0 +1,1559 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2SoftAvcEnc"
+#include <log/log.h>
+#include <utils/misc.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+#include <Codec2BufferUtils.h>
+#include <SimpleC2Interface.h>
+#include <util/C2InterfaceHelper.h>
+
+#include "C2SoftAvcEnc.h"
+#include "ih264e.h"
+#include "ih264e_error.h"
+
+namespace android {
+
+class C2SoftAvcEnc::IntfImpl : public C2InterfaceHelper {
+public:
+ explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
+ : C2InterfaceHelper(helper) {
+
+ setDerivedInstance(this);
+
+ addParameter(
+ DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatVideo))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
+ .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed))
+ .build());
+
+ addParameter(
+ DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
+ MEDIA_MIMETYPE_VIDEO_RAW))
+ .build());
+
+ addParameter(
+ DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
+ .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
+ MEDIA_MIMETYPE_VIDEO_AVC))
+ .build());
+
+ addParameter(
+ DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING)
+ .withConstValue(new C2StreamUsageTuning::input(
+ 0u, (uint64_t)C2MemoryUsage::CPU_READ))
+ .build());
+
+ addParameter(
+ DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING)
+ .withDefault(new C2VideoSizeStreamTuning::input(0u, 320, 240))
+ .withFields({
+ C2F(mSize, width).inRange(2, 2560, 2),
+ C2F(mSize, height).inRange(2, 2560, 2),
+ })
+ .withSetter(SizeSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING)
+ .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+ // TODO: More restriction?
+ .withFields({C2F(mFrameRate, value).greaterThan(0.)})
+ .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING)
+ .withDefault(new C2BitrateTuning::output(0u, 64000))
+ .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
+ .withSetter(BitrateSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
+ .withDefault(new C2StreamIntraRefreshTuning::output(
+ 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
+ .withFields({
+ C2F(mIntraRefresh, mode).oneOf({
+ C2Config::INTRA_REFRESH_DISABLED, C2Config::INTRA_REFRESH_ARBITRARY }),
+ C2F(mIntraRefresh, period).any()
+ })
+ .withSetter(IntraRefreshSetter)
+ .build());
+
+ addParameter(
+ DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
+ .withDefault(new C2StreamProfileLevelInfo::output(
+ 0u, PROFILE_AVC_CONSTRAINED_BASELINE, LEVEL_AVC_4_1))
+ .withFields({
+ C2F(mProfileLevel, profile).oneOf({
+ PROFILE_AVC_BASELINE,
+ PROFILE_AVC_CONSTRAINED_BASELINE,
+ PROFILE_AVC_MAIN,
+ }),
+ C2F(mProfileLevel, level).oneOf({
+ LEVEL_AVC_1,
+ LEVEL_AVC_1B,
+ LEVEL_AVC_1_1,
+ LEVEL_AVC_1_2,
+ LEVEL_AVC_1_3,
+ LEVEL_AVC_2,
+ LEVEL_AVC_2_1,
+ LEVEL_AVC_2_2,
+ LEVEL_AVC_3,
+ LEVEL_AVC_3_1,
+ LEVEL_AVC_3_2,
+ LEVEL_AVC_4,
+ LEVEL_AVC_4_1,
+ LEVEL_AVC_4_2,
+ LEVEL_AVC_5,
+ }),
+ })
+ .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
+ .build());
+
+ addParameter(
+ DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
+ .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
+ .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
+ .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
+ .build());
+
+ addParameter(
+ DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
+ .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
+ .withFields({C2F(mSyncFramePeriod, value).any()})
+ .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
+ .build());
+ }
+
+ static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.value <= 4096) {
+ me.set().value = 4096;
+ }
+ return res;
+ }
+
+ static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
+ C2P<C2StreamPictureSizeInfo::input> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
+ me.set().width = oldMe.v.width;
+ }
+ if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
+ res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
+ me.set().height = oldMe.v.height;
+ }
+ return res;
+ }
+
+ static C2R ProfileLevelSetter(
+ bool mayBlock,
+ C2P<C2StreamProfileLevelInfo::output> &me,
+ const C2P<C2VideoSizeStreamTuning::input> &size,
+ const C2P<C2StreamFrameRateInfo::output> &frameRate,
+ const C2P<C2BitrateTuning::output> &bitrate) {
+ (void)mayBlock;
+ if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+ me.set().profile = PROFILE_AVC_CONSTRAINED_BASELINE;
+ }
+
+ struct LevelLimits {
+ C2Config::level_t level;
+ float mbsPerSec;
+ uint64_t mbs;
+ uint32_t bitrate;
+ };
+ constexpr LevelLimits kLimits[] = {
+ { LEVEL_AVC_1, 1485, 99, 64000 },
+ // Decoder does not properly handle level 1b.
+ // { LEVEL_AVC_1B, 1485, 99, 128000 },
+ { LEVEL_AVC_1_1, 3000, 396, 192000 },
+ { LEVEL_AVC_1_2, 6000, 396, 384000 },
+ { LEVEL_AVC_1_3, 11880, 396, 768000 },
+ { LEVEL_AVC_2, 11880, 396, 2000000 },
+ { LEVEL_AVC_2_1, 19800, 792, 4000000 },
+ { LEVEL_AVC_2_2, 20250, 1620, 4000000 },
+ { LEVEL_AVC_3, 40500, 1620, 10000000 },
+ { LEVEL_AVC_3_1, 108000, 3600, 14000000 },
+ { LEVEL_AVC_3_2, 216000, 5120, 20000000 },
+ { LEVEL_AVC_4, 245760, 8192, 20000000 },
+ { LEVEL_AVC_4_1, 245760, 8192, 50000000 },
+ { LEVEL_AVC_4_2, 522240, 8704, 50000000 },
+ { LEVEL_AVC_5, 589824, 22080, 135000000 },
+ };
+
+ uint64_t mbs = uint64_t((size.v.width + 15) / 16) * ((size.v.height + 15) / 16);
+ float mbsPerSec = float(mbs) / frameRate.v.value;
+
+ // Check if the supplied level meets the MB / bitrate requirements. If
+ // not, update the level with the lowest level meeting the requirements.
+
+ bool found = false;
+ // By default needsUpdate = false in case the supplied level does meet
+ // the requirements. For Level 1b, we want to update the level anyway,
+ // so we set it to true in that case.
+ bool needsUpdate = (me.v.level == LEVEL_AVC_1B);
+ for (const LevelLimits &limit : kLimits) {
+ if (mbs <= limit.mbs && mbsPerSec <= limit.mbsPerSec &&
+ bitrate.v.value <= limit.bitrate) {
+ // This is the lowest level that meets the requirements, and if
+ // we haven't seen the supplied level yet, that means we don't
+ // need the update.
+ if (needsUpdate) {
+ ALOGD("Given level %x does not cover current configuration: "
+ "adjusting to %x", me.v.level, limit.level);
+ me.set().level = limit.level;
+ }
+ found = true;
+ break;
+ }
+ if (me.v.level == limit.level) {
+ // We break out of the loop when the lowest feasible level is
+ // found. The fact that we're here means that our level doesn't
+ // meet the requirement and needs to be updated.
+ needsUpdate = true;
+ }
+ }
+ if (!found) {
+ // We set to the highest supported level.
+ me.set().level = LEVEL_AVC_5;
+ }
+
+ return C2R::Ok();
+ }
+
+ static C2R IntraRefreshSetter(bool mayBlock, C2P<C2StreamIntraRefreshTuning::output> &me) {
+ (void)mayBlock;
+ C2R res = C2R::Ok();
+ if (me.v.period < 1) {
+ me.set().mode = C2Config::INTRA_REFRESH_DISABLED;
+ me.set().period = 0;
+ } else {
+ // only support arbitrary mode (cyclic in our case)
+ me.set().mode = C2Config::INTRA_REFRESH_ARBITRARY;
+ }
+ return res;
+ }
+
+ IV_PROFILE_T getProfile_l() const {
+ switch (mProfileLevel->profile) {
+ case PROFILE_AVC_CONSTRAINED_BASELINE: [[fallthrough]];
+ case PROFILE_AVC_BASELINE: return IV_PROFILE_BASE;
+ case PROFILE_AVC_MAIN: return IV_PROFILE_MAIN;
+ default:
+ ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
+ return IV_PROFILE_DEFAULT;
+ }
+ }
+
+ UWORD32 getLevel_l() const {
+ struct Level {
+ C2Config::level_t c2Level;
+ UWORD32 avcLevel;
+ };
+ constexpr Level levels[] = {
+ { LEVEL_AVC_1, 10 },
+ { LEVEL_AVC_1B, 9 },
+ { LEVEL_AVC_1_1, 11 },
+ { LEVEL_AVC_1_2, 12 },
+ { LEVEL_AVC_1_3, 13 },
+ { LEVEL_AVC_2, 20 },
+ { LEVEL_AVC_2_1, 21 },
+ { LEVEL_AVC_2_2, 22 },
+ { LEVEL_AVC_3, 30 },
+ { LEVEL_AVC_3_1, 31 },
+ { LEVEL_AVC_3_2, 32 },
+ { LEVEL_AVC_4, 40 },
+ { LEVEL_AVC_4_1, 41 },
+ { LEVEL_AVC_4_2, 42 },
+ { LEVEL_AVC_5, 50 },
+ };
+ for (const Level &level : levels) {
+ if (mProfileLevel->level == level.c2Level) {
+ return level.avcLevel;
+ }
+ }
+ ALOGD("Unrecognized level: %x", mProfileLevel->level);
+ return 41;
+ }
+ uint32_t getSyncFramePeriod_l() const {
+ if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
+ return 0;
+ }
+ double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
+ return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
+ }
+
+ // unsafe getters
+ std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
+ std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
+ std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
+
+private:
+ std::shared_ptr<C2StreamFormatConfig::input> mInputFormat;
+ std::shared_ptr<C2StreamFormatConfig::output> mOutputFormat;
+ std::shared_ptr<C2PortMimeConfig::input> mInputMediaType;
+ std::shared_ptr<C2PortMimeConfig::output> mOutputMediaType;
+ std::shared_ptr<C2StreamUsageTuning::input> mUsage;
+ std::shared_ptr<C2VideoSizeStreamTuning::input> mSize;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+ std::shared_ptr<C2BitrateTuning::output> mBitrate;
+ std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
+ std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
+};
+
+#define ive_api_function ih264e_api_function
+
+constexpr char COMPONENT_NAME[] = "c2.android.avc.encoder";
+
+namespace {
+
+// From external/libavc/encoder/ih264e_bitstream.h
+constexpr uint32_t MIN_STREAM_SIZE = 0x800;
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+} // namespace
+
+C2SoftAvcEnc::C2SoftAvcEnc(
+ const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl)
+ : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
+ mIntf(intfImpl),
+ mIvVideoColorFormat(IV_YUV_420P),
+ mAVCEncProfile(IV_PROFILE_BASE),
+ mAVCEncLevel(41),
+ mStarted(false),
+ mSawInputEOS(false),
+ mSawOutputEOS(false),
+ mSignalledError(false),
+ mCodecCtx(nullptr),
+ // TODO: output buffer size
+ mOutBufferSize(524288) {
+
+ // If dump is enabled, then open create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+ CREATE_DUMP_FILE(mOutFile);
+
+ initEncParams();
+}
+
+C2SoftAvcEnc::~C2SoftAvcEnc() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftAvcEnc::onInit() {
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::onStop() {
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::onReset() {
+ // TODO: use IVE_CMD_CTL_RESET?
+ releaseEncoder();
+ initEncParams();
+}
+
+void C2SoftAvcEnc::onRelease() {
+ releaseEncoder();
+}
+
+c2_status_t C2SoftAvcEnc::onFlush_sm() {
+ // TODO: use IVE_CMD_CTL_FLUSH?
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::initEncParams() {
+ mCodecCtx = nullptr;
+ mMemRecords = nullptr;
+ mNumMemRecords = DEFAULT_MEM_REC_CNT;
+ mHeaderGenerated = 0;
+ mNumCores = GetCPUCoreCount();
+ mArch = DEFAULT_ARCH;
+ mSliceMode = DEFAULT_SLICE_MODE;
+ mSliceParam = DEFAULT_SLICE_PARAM;
+ mHalfPelEnable = DEFAULT_HPEL;
+ mIInterval = DEFAULT_I_INTERVAL;
+ mIDRInterval = DEFAULT_IDR_INTERVAL;
+ mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL;
+ mEnableFastSad = DEFAULT_ENABLE_FAST_SAD;
+ mEnableAltRef = DEFAULT_ENABLE_ALT_REF;
+ mEncSpeed = DEFAULT_ENC_SPEED;
+ mIntra4x4 = DEFAULT_INTRA4x4;
+ mConstrainedIntraFlag = DEFAULT_CONSTRAINED_INTRA;
+ mPSNREnable = DEFAULT_PSNR_ENABLE;
+ mReconEnable = DEFAULT_RECON_ENABLE;
+ mEntropyMode = DEFAULT_ENTROPY_MODE;
+ mBframes = DEFAULT_B_FRAMES;
+
+ gettimeofday(&mTimeStart, nullptr);
+ gettimeofday(&mTimeEnd, nullptr);
+}
+
+c2_status_t C2SoftAvcEnc::setDimensions() {
+ ive_ctl_set_dimensions_ip_t s_dimensions_ip;
+ ive_ctl_set_dimensions_op_t s_dimensions_op;
+ IV_STATUS_T status;
+
+ s_dimensions_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS;
+ s_dimensions_ip.u4_ht = mSize->height;
+ s_dimensions_ip.u4_wd = mSize->width;
+
+ s_dimensions_ip.u4_timestamp_high = -1;
+ s_dimensions_ip.u4_timestamp_low = -1;
+
+ s_dimensions_ip.u4_size = sizeof(ive_ctl_set_dimensions_ip_t);
+ s_dimensions_op.u4_size = sizeof(ive_ctl_set_dimensions_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_dimensions_ip, &s_dimensions_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame dimensions = 0x%x\n",
+ s_dimensions_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setNumCores() {
+ IV_STATUS_T status;
+ ive_ctl_set_num_cores_ip_t s_num_cores_ip;
+ ive_ctl_set_num_cores_op_t s_num_cores_op;
+ s_num_cores_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_num_cores_ip.e_sub_cmd = IVE_CMD_CTL_SET_NUM_CORES;
+ s_num_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_CORES);
+ s_num_cores_ip.u4_timestamp_high = -1;
+ s_num_cores_ip.u4_timestamp_low = -1;
+ s_num_cores_ip.u4_size = sizeof(ive_ctl_set_num_cores_ip_t);
+
+ s_num_cores_op.u4_size = sizeof(ive_ctl_set_num_cores_op_t);
+
+ status = ive_api_function(
+ mCodecCtx, (void *) &s_num_cores_ip, (void *) &s_num_cores_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set processor params = 0x%x\n",
+ s_num_cores_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setFrameRate() {
+ ive_ctl_set_frame_rate_ip_t s_frame_rate_ip;
+ ive_ctl_set_frame_rate_op_t s_frame_rate_op;
+ IV_STATUS_T status;
+
+ s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE;
+
+ s_frame_rate_ip.u4_src_frame_rate = mFrameRate->value + 0.5;
+ s_frame_rate_ip.u4_tgt_frame_rate = mFrameRate->value + 0.5;
+
+ s_frame_rate_ip.u4_timestamp_high = -1;
+ s_frame_rate_ip.u4_timestamp_low = -1;
+
+ s_frame_rate_ip.u4_size = sizeof(ive_ctl_set_frame_rate_ip_t);
+ s_frame_rate_op.u4_size = sizeof(ive_ctl_set_frame_rate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_rate_ip, &s_frame_rate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame rate = 0x%x\n",
+ s_frame_rate_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setIpeParams() {
+ ive_ctl_set_ipe_params_ip_t s_ipe_params_ip;
+ ive_ctl_set_ipe_params_op_t s_ipe_params_op;
+ IV_STATUS_T status;
+
+ s_ipe_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ipe_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_IPE_PARAMS;
+
+ s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4;
+ s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed;
+ s_ipe_params_ip.u4_constrained_intra_pred = mConstrainedIntraFlag;
+
+ s_ipe_params_ip.u4_timestamp_high = -1;
+ s_ipe_params_ip.u4_timestamp_low = -1;
+
+ s_ipe_params_ip.u4_size = sizeof(ive_ctl_set_ipe_params_ip_t);
+ s_ipe_params_op.u4_size = sizeof(ive_ctl_set_ipe_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_ipe_params_ip, &s_ipe_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set ipe params = 0x%x\n",
+ s_ipe_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setBitRate() {
+ ive_ctl_set_bitrate_ip_t s_bitrate_ip;
+ ive_ctl_set_bitrate_op_t s_bitrate_op;
+ IV_STATUS_T status;
+
+ s_bitrate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_bitrate_ip.e_sub_cmd = IVE_CMD_CTL_SET_BITRATE;
+
+ s_bitrate_ip.u4_target_bitrate = mBitrate->value;
+
+ s_bitrate_ip.u4_timestamp_high = -1;
+ s_bitrate_ip.u4_timestamp_low = -1;
+
+ s_bitrate_ip.u4_size = sizeof(ive_ctl_set_bitrate_ip_t);
+ s_bitrate_op.u4_size = sizeof(ive_ctl_set_bitrate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_bitrate_ip, &s_bitrate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set bit rate = 0x%x\n", s_bitrate_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) {
+ ive_ctl_set_frame_type_ip_t s_frame_type_ip;
+ ive_ctl_set_frame_type_op_t s_frame_type_op;
+ IV_STATUS_T status;
+ s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE;
+
+ s_frame_type_ip.e_frame_type = e_frame_type;
+
+ s_frame_type_ip.u4_timestamp_high = -1;
+ s_frame_type_ip.u4_timestamp_low = -1;
+
+ s_frame_type_ip.u4_size = sizeof(ive_ctl_set_frame_type_ip_t);
+ s_frame_type_op.u4_size = sizeof(ive_ctl_set_frame_type_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_type_ip, &s_frame_type_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame type = 0x%x\n",
+ s_frame_type_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setQp() {
+ ive_ctl_set_qp_ip_t s_qp_ip;
+ ive_ctl_set_qp_op_t s_qp_op;
+ IV_STATUS_T status;
+
+ s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
+
+ s_qp_ip.u4_i_qp = DEFAULT_I_QP;
+ s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_p_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_b_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_timestamp_high = -1;
+ s_qp_ip.u4_timestamp_low = -1;
+
+ s_qp_ip.u4_size = sizeof(ive_ctl_set_qp_ip_t);
+ s_qp_op.u4_size = sizeof(ive_ctl_set_qp_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_qp_ip, &s_qp_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set qp 0x%x\n", s_qp_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setEncMode(IVE_ENC_MODE_T e_enc_mode) {
+ IV_STATUS_T status;
+ ive_ctl_set_enc_mode_ip_t s_enc_mode_ip;
+ ive_ctl_set_enc_mode_op_t s_enc_mode_op;
+
+ s_enc_mode_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_enc_mode_ip.e_sub_cmd = IVE_CMD_CTL_SET_ENC_MODE;
+
+ s_enc_mode_ip.e_enc_mode = e_enc_mode;
+
+ s_enc_mode_ip.u4_timestamp_high = -1;
+ s_enc_mode_ip.u4_timestamp_low = -1;
+
+ s_enc_mode_ip.u4_size = sizeof(ive_ctl_set_enc_mode_ip_t);
+ s_enc_mode_op.u4_size = sizeof(ive_ctl_set_enc_mode_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_enc_mode_ip, &s_enc_mode_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set in header encode mode = 0x%x\n",
+ s_enc_mode_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setVbvParams() {
+ ive_ctl_set_vbv_params_ip_t s_vbv_ip;
+ ive_ctl_set_vbv_params_op_t s_vbv_op;
+ IV_STATUS_T status;
+
+ s_vbv_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_vbv_ip.e_sub_cmd = IVE_CMD_CTL_SET_VBV_PARAMS;
+
+ s_vbv_ip.u4_vbv_buf_size = 0;
+ s_vbv_ip.u4_vbv_buffer_delay = 1000;
+
+ s_vbv_ip.u4_timestamp_high = -1;
+ s_vbv_ip.u4_timestamp_low = -1;
+
+ s_vbv_ip.u4_size = sizeof(ive_ctl_set_vbv_params_ip_t);
+ s_vbv_op.u4_size = sizeof(ive_ctl_set_vbv_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_vbv_ip, &s_vbv_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set VBV params = 0x%x\n", s_vbv_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setAirParams() {
+ ive_ctl_set_air_params_ip_t s_air_ip;
+ ive_ctl_set_air_params_op_t s_air_op;
+ IV_STATUS_T status;
+
+ s_air_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_air_ip.e_sub_cmd = IVE_CMD_CTL_SET_AIR_PARAMS;
+
+ s_air_ip.e_air_mode =
+ (mIntraRefresh->mode == C2Config::INTRA_REFRESH_DISABLED || mIntraRefresh->period < 1)
+ ? IVE_AIR_MODE_NONE : IVE_AIR_MODE_CYCLIC;
+ s_air_ip.u4_air_refresh_period = mIntraRefresh->period;
+
+ s_air_ip.u4_timestamp_high = -1;
+ s_air_ip.u4_timestamp_low = -1;
+
+ s_air_ip.u4_size = sizeof(ive_ctl_set_air_params_ip_t);
+ s_air_op.u4_size = sizeof(ive_ctl_set_air_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_air_ip, &s_air_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set air params = 0x%x\n", s_air_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setMeParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_me_params_ip_t s_me_params_ip;
+ ive_ctl_set_me_params_op_t s_me_params_op;
+
+ s_me_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_me_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_ME_PARAMS;
+
+ s_me_params_ip.u4_enable_fast_sad = mEnableFastSad;
+ s_me_params_ip.u4_enable_alt_ref = mEnableAltRef;
+
+ s_me_params_ip.u4_enable_hpel = mHalfPelEnable;
+ s_me_params_ip.u4_enable_qpel = DEFAULT_QPEL;
+ s_me_params_ip.u4_me_speed_preset = DEFAULT_ME_SPEED;
+ s_me_params_ip.u4_srch_rng_x = DEFAULT_SRCH_RNG_X;
+ s_me_params_ip.u4_srch_rng_y = DEFAULT_SRCH_RNG_Y;
+
+ s_me_params_ip.u4_timestamp_high = -1;
+ s_me_params_ip.u4_timestamp_low = -1;
+
+ s_me_params_ip.u4_size = sizeof(ive_ctl_set_me_params_ip_t);
+ s_me_params_op.u4_size = sizeof(ive_ctl_set_me_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_me_params_ip, &s_me_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set me params = 0x%x\n", s_me_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setGopParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_gop_params_ip_t s_gop_params_ip;
+ ive_ctl_set_gop_params_op_t s_gop_params_op;
+
+ s_gop_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_gop_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_GOP_PARAMS;
+
+ s_gop_params_ip.u4_i_frm_interval = mIInterval;
+ s_gop_params_ip.u4_idr_frm_interval = mIDRInterval;
+
+ s_gop_params_ip.u4_timestamp_high = -1;
+ s_gop_params_ip.u4_timestamp_low = -1;
+
+ s_gop_params_ip.u4_size = sizeof(ive_ctl_set_gop_params_ip_t);
+ s_gop_params_op.u4_size = sizeof(ive_ctl_set_gop_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_gop_params_ip, &s_gop_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set GOP params = 0x%x\n",
+ s_gop_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setProfileParams() {
+ IntfImpl::Lock lock = mIntf->lock();
+
+ IV_STATUS_T status;
+ ive_ctl_set_profile_params_ip_t s_profile_params_ip;
+ ive_ctl_set_profile_params_op_t s_profile_params_op;
+
+ s_profile_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS;
+
+ s_profile_params_ip.e_profile = mIntf->getProfile_l();
+ s_profile_params_ip.u4_entropy_coding_mode = mEntropyMode;
+ s_profile_params_ip.u4_timestamp_high = -1;
+ s_profile_params_ip.u4_timestamp_low = -1;
+
+ s_profile_params_ip.u4_size = sizeof(ive_ctl_set_profile_params_ip_t);
+ s_profile_params_op.u4_size = sizeof(ive_ctl_set_profile_params_op_t);
+ lock.unlock();
+
+ status = ive_api_function(mCodecCtx, &s_profile_params_ip, &s_profile_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set profile params = 0x%x\n",
+ s_profile_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setDeblockParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_deblock_params_ip_t s_deblock_params_ip;
+ ive_ctl_set_deblock_params_op_t s_deblock_params_op;
+
+ s_deblock_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_deblock_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_DEBLOCK_PARAMS;
+
+ s_deblock_params_ip.u4_disable_deblock_level = mDisableDeblkLevel;
+
+ s_deblock_params_ip.u4_timestamp_high = -1;
+ s_deblock_params_ip.u4_timestamp_low = -1;
+
+ s_deblock_params_ip.u4_size = sizeof(ive_ctl_set_deblock_params_ip_t);
+ s_deblock_params_op.u4_size = sizeof(ive_ctl_set_deblock_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_deblock_params_ip, &s_deblock_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to enable/disable deblock params = 0x%x\n",
+ s_deblock_params_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::logVersion() {
+ ive_ctl_getversioninfo_ip_t s_ctl_ip;
+ ive_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVE_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ive_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ive_ctl_getversioninfo_op_t);
+ s_ctl_ip.pu1_version = au1_buf;
+ s_ctl_ip.u4_version_bufsize = sizeof(au1_buf);
+
+ status = ive_api_function(mCodecCtx, (void *) &s_ctl_ip, (void *) &s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version: 0x%x", s_ctl_op.u4_error_code);
+ } else {
+ ALOGV("Ittiam encoder version: %s", (char *)s_ctl_ip.pu1_version);
+ }
+ return;
+}
+
+c2_status_t C2SoftAvcEnc::initEncoder() {
+ IV_STATUS_T status;
+ WORD32 level;
+
+ CHECK(!mStarted);
+
+ c2_status_t errType = C2_OK;
+
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ mSize = mIntf->getSize_l();
+ mBitrate = mIntf->getBitrate_l();
+ mFrameRate = mIntf->getFrameRate_l();
+ mIntraRefresh = mIntf->getIntraRefresh_l();
+ mAVCEncLevel = mIntf->getLevel_l();
+ mIInterval = mIntf->getSyncFramePeriod_l();
+ mIDRInterval = mIntf->getSyncFramePeriod_l();
+ }
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+
+ mStride = width;
+
+ // TODO
+ mIvVideoColorFormat = IV_YUV_420P;
+
+ ALOGD("Params width %d height %d level %d colorFormat %d", width,
+ height, mAVCEncLevel, mIvVideoColorFormat);
+
+ /* Getting Number of MemRecords */
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(iv_num_mem_rec_ip_t);
+ s_num_mem_rec_op.u4_size = sizeof(iv_num_mem_rec_op_t);
+
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ status = ive_api_function(nullptr, &s_num_mem_rec_ip, &s_num_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Get number of memory records failed = 0x%x\n",
+ s_num_mem_rec_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ /* Allocate array to hold memory records */
+ if (mNumMemRecords > SIZE_MAX / sizeof(iv_mem_rec_t)) {
+ ALOGE("requested memory size is too big.");
+ return C2_CORRUPTED;
+ }
+ mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (nullptr == mMemRecords) {
+ ALOGE("Unable to allocate memory for hold memory records: Size %zu",
+ mNumMemRecords * sizeof(iv_mem_rec_t));
+ mSignalledError = true;
+ return C2_CORRUPTED;
+ }
+
+ {
+ iv_mem_rec_t *ps_mem_rec;
+ ps_mem_rec = mMemRecords;
+ for (size_t i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->u4_size = sizeof(iv_mem_rec_t);
+ ps_mem_rec->pv_base = nullptr;
+ ps_mem_rec->u4_mem_size = 0;
+ ps_mem_rec->u4_mem_alignment = 0;
+ ps_mem_rec->e_mem_type = IV_NA_MEM_TYPE;
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Getting MemRecords Attributes */
+ {
+ iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
+ iv_fill_mem_rec_op_t s_fill_mem_rec_op;
+
+ s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
+ s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
+
+ s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
+ s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
+ s_fill_mem_rec_ip.u4_max_wd = width;
+ s_fill_mem_rec_ip.u4_max_ht = height;
+ s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
+ s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
+ s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+
+ status = ive_api_function(nullptr, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Fill memory records failed = 0x%x\n",
+ s_fill_mem_rec_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ }
+
+ /* Allocating Memory for Mem Records */
+ {
+ WORD32 total_size;
+ iv_mem_rec_t *ps_mem_rec;
+ total_size = 0;
+ ps_mem_rec = mMemRecords;
+
+ for (size_t i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ive_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == nullptr) {
+ ALOGE("Allocation failure for mem record id %zu size %u\n", i,
+ ps_mem_rec->u4_mem_size);
+ return C2_CORRUPTED;
+
+ }
+ total_size += ps_mem_rec->u4_mem_size;
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Codec Instance Creation */
+ {
+ ive_init_ip_t s_init_ip;
+ ive_init_op_t s_init_op;
+
+ mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+ mCodecCtx->pv_fxns = (void *)ive_api_function;
+
+ s_init_ip.u4_size = sizeof(ive_init_ip_t);
+ s_init_op.u4_size = sizeof(ive_init_op_t);
+
+ s_init_ip.e_cmd = IV_CMD_INIT;
+ s_init_ip.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.ps_mem_rec = mMemRecords;
+ s_init_ip.u4_max_wd = width;
+ s_init_ip.u4_max_ht = height;
+ s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_init_ip.u4_max_level = mAVCEncLevel;
+ s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
+
+ if (mReconEnable || mPSNREnable) {
+ s_init_ip.u4_enable_recon = 1;
+ } else {
+ s_init_ip.u4_enable_recon = 0;
+ }
+ s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
+ s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
+ s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
+ s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
+ s_init_ip.u4_num_bframes = mBframes;
+ s_init_ip.e_content_type = IV_PROGRESSIVE;
+ s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+ s_init_ip.e_slice_mode = mSliceMode;
+ s_init_ip.u4_slice_param = mSliceParam;
+ s_init_ip.e_arch = mArch;
+ s_init_ip.e_soc = DEFAULT_SOC;
+
+ status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Init encoder failed = 0x%x\n", s_init_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+ }
+
+ /* Get Codec Version */
+ logVersion();
+
+ /* set processor details */
+ setNumCores();
+
+ /* Video control Set Frame dimensions */
+ setDimensions();
+
+ /* Video control Set Frame rates */
+ setFrameRate();
+
+ /* Video control Set IPE Params */
+ setIpeParams();
+
+ /* Video control Set Bitrate */
+ setBitRate();
+
+ /* Video control Set QP */
+ setQp();
+
+ /* Video control Set AIR params */
+ setAirParams();
+
+ /* Video control Set VBV params */
+ setVbvParams();
+
+ /* Video control Set Motion estimation params */
+ setMeParams();
+
+ /* Video control Set GOP params */
+ setGopParams();
+
+ /* Video control Set Deblock params */
+ setDeblockParams();
+
+ /* Video control Set Profile params */
+ setProfileParams();
+
+ /* Video control Set in Encode header mode */
+ setEncMode(IVE_ENC_MODE_HEADER);
+
+ ALOGV("init_codec successfull");
+
+ mSpsPpsHeaderReceived = false;
+ mStarted = true;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::releaseEncoder() {
+ IV_STATUS_T status = IV_SUCCESS;
+ iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
+ iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+
+ if (!mStarted) {
+ return C2_OK;
+ }
+
+ s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
+ s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
+ s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
+ s_retrieve_mem_ip.ps_mem_rec = mMemRecords;
+
+ status = ive_api_function(mCodecCtx, &s_retrieve_mem_ip, &s_retrieve_mem_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to retrieve memory records = 0x%x\n",
+ s_retrieve_mem_op.u4_error_code);
+ return C2_CORRUPTED;
+ }
+
+ /* Free memory records */
+ ps_mem_rec = mMemRecords;
+ for (size_t i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) {
+ if (ps_mem_rec) ive_aligned_free(ps_mem_rec->pv_base);
+ else {
+ ALOGE("memory record is null.");
+ return C2_CORRUPTED;
+ }
+ ps_mem_rec++;
+ }
+
+ if (mMemRecords) free(mMemRecords);
+
+ // clear other pointers into the space being free()d
+ mCodecCtx = nullptr;
+
+ mStarted = false;
+
+ return C2_OK;
+}
+
+c2_status_t C2SoftAvcEnc::setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ const C2GraphicView *const input,
+ uint8_t *base,
+ uint32_t capacity,
+ uint64_t timestamp) {
+ iv_raw_buf_t *ps_inp_raw_buf;
+
+ ps_inp_raw_buf = &ps_encode_ip->s_inp_buf;
+ ps_encode_ip->s_out_buf.pv_buf = base;
+ ps_encode_ip->s_out_buf.u4_bytes = 0;
+ ps_encode_ip->s_out_buf.u4_bufsize = capacity;
+ ps_encode_ip->u4_size = sizeof(ive_video_encode_ip_t);
+ ps_encode_op->u4_size = sizeof(ive_video_encode_op_t);
+
+ ps_encode_ip->e_cmd = IVE_CMD_VIDEO_ENCODE;
+ ps_encode_ip->pv_bufs = nullptr;
+ ps_encode_ip->pv_mb_info = nullptr;
+ ps_encode_ip->pv_pic_info = nullptr;
+ ps_encode_ip->u4_mb_info_type = 0;
+ ps_encode_ip->u4_pic_info_type = 0;
+ ps_encode_ip->u4_is_last = 0;
+ ps_encode_ip->u4_timestamp_high = timestamp >> 32;
+ ps_encode_ip->u4_timestamp_low = timestamp & 0xFFFFFFFF;
+ ps_encode_op->s_out_buf.pv_buf = nullptr;
+
+ /* Initialize color formats */
+ memset(ps_inp_raw_buf, 0, sizeof(iv_raw_buf_t));
+ ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t);
+ ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
+ if (input == nullptr) {
+ if (mSawInputEOS){
+ ps_encode_ip->u4_is_last = 1;
+ }
+ return C2_OK;
+ }
+
+ if (input->width() < mSize->width ||
+ input->height() < mSize->height) {
+ /* Expect width height to be configured */
+ ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(),
+ mSize->width, input->height(), mSize->height);
+ return C2_BAD_VALUE;
+ }
+ ALOGV("width = %d, height = %d", input->width(), input->height());
+ const C2PlanarLayout &layout = input->layout();
+ uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
+ uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
+ uint8_t *vPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_V]);
+ int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
+ int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+ int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
+
+ uint32_t width = mSize->width;
+ uint32_t height = mSize->height;
+ // width and height are always even (as block size is 16x16)
+ CHECK_EQ((width & 1u), 0u);
+ CHECK_EQ((height & 1u), 0u);
+ size_t yPlaneSize = width * height;
+
+ switch (layout.type) {
+ case C2PlanarLayout::TYPE_RGB:
+ [[fallthrough]];
+ case C2PlanarLayout::TYPE_RGBA: {
+ ALOGV("yPlaneSize = %zu", yPlaneSize);
+ MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ yStride = width;
+ uStride = vStride = yStride / 2;
+ ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *input);
+ break;
+ }
+ case C2PlanarLayout::TYPE_YUV: {
+ if (!IsYUV420(*input)) {
+ ALOGE("input is not YUV420");
+ return C2_BAD_VALUE;
+ }
+
+ if (layout.planes[layout.PLANE_Y].colInc == 1
+ && layout.planes[layout.PLANE_U].colInc == 1
+ && layout.planes[layout.PLANE_V].colInc == 1
+ && uStride == vStride
+ && yStride == 2 * vStride) {
+ // I420 compatible - already set up above
+ break;
+ }
+
+ // copy to I420
+ yStride = width;
+ uStride = vStride = yStride / 2;
+ MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
+ mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
+ MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
+ status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
+ if (err != OK) {
+ ALOGE("Buffer conversion failed: %d", err);
+ return C2_BAD_VALUE;
+ }
+ yPlane = conversionBuffer.data();
+ uPlane = yPlane + yPlaneSize;
+ vPlane = uPlane + yPlaneSize / 4;
+ break;
+
+ }
+
+ case C2PlanarLayout::TYPE_YUVA:
+ ALOGE("YUVA plane type is not supported");
+ return C2_BAD_VALUE;
+
+ default:
+ ALOGE("Unrecognized plane type: %d", layout.type);
+ return C2_BAD_VALUE;
+ }
+
+ switch (mIvVideoColorFormat) {
+ case IV_YUV_420P:
+ {
+ // input buffer is supposed to be const but Ittiam API wants bare pointer.
+ ps_inp_raw_buf->apv_bufs[0] = yPlane;
+ ps_inp_raw_buf->apv_bufs[1] = uPlane;
+ ps_inp_raw_buf->apv_bufs[2] = vPlane;
+
+ ps_inp_raw_buf->au4_wd[0] = input->width();
+ ps_inp_raw_buf->au4_wd[1] = input->width() / 2;
+ ps_inp_raw_buf->au4_wd[2] = input->width() / 2;
+
+ ps_inp_raw_buf->au4_ht[0] = input->height();
+ ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+ ps_inp_raw_buf->au4_ht[2] = input->height() / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = yStride;
+ ps_inp_raw_buf->au4_strd[1] = uStride;
+ ps_inp_raw_buf->au4_strd[2] = vStride;
+ break;
+ }
+
+ case IV_YUV_422ILE:
+ {
+ // TODO
+ // ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
+ // ps_inp_raw_buf->au4_wd[0] = mWidth * 2;
+ // ps_inp_raw_buf->au4_ht[0] = mHeight;
+ // ps_inp_raw_buf->au4_strd[0] = mStride * 2;
+ break;
+ }
+
+ case IV_YUV_420SP_UV:
+ case IV_YUV_420SP_VU:
+ default:
+ {
+ ps_inp_raw_buf->apv_bufs[0] = yPlane;
+ ps_inp_raw_buf->apv_bufs[1] = uPlane;
+
+ ps_inp_raw_buf->au4_wd[0] = input->width();
+ ps_inp_raw_buf->au4_wd[1] = input->width();
+
+ ps_inp_raw_buf->au4_ht[0] = input->height();
+ ps_inp_raw_buf->au4_ht[1] = input->height() / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = yStride;
+ ps_inp_raw_buf->au4_strd[1] = uStride;
+ break;
+ }
+ }
+ return C2_OK;
+}
+
+void C2SoftAvcEnc::process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // Initialize output work
+ work->result = C2_OK;
+ work->workletsProcessed = 1u;
+ work->worklets.front()->output.flags = work->input.flags;
+
+ IV_STATUS_T status;
+ WORD32 timeDelay, timeTaken;
+ uint64_t timestamp = work->input.ordinal.timestamp.peekull();
+
+ // Initialize encoder if not already initialized
+ if (mCodecCtx == nullptr) {
+ if (C2_OK != initEncoder()) {
+ ALOGE("Failed to initialize encoder");
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ }
+ if (mSignalledError) {
+ return;
+ }
+
+ // while (!mSawOutputEOS && !outQueue.empty()) {
+ c2_status_t error;
+ ive_video_encode_ip_t s_encode_ip;
+ ive_video_encode_op_t s_encode_op;
+
+ if (!mSpsPpsHeaderReceived) {
+ constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
+ uint8_t header[kHeaderLength];
+ error = setEncodeArgs(
+ &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, timestamp);
+ if (error != C2_OK) {
+ ALOGE("setEncodeArgs failed: %d", error);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Encode header failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ return;
+ } else {
+ ALOGV("Bytes Generated in header %d\n",
+ s_encode_op.s_out_buf.u4_bytes);
+ }
+
+ mSpsPpsHeaderReceived = true;
+
+ std::unique_ptr<C2StreamCsdInfo::output> csd =
+ C2StreamCsdInfo::output::AllocUnique(s_encode_op.s_out_buf.u4_bytes, 0u);
+ if (!csd) {
+ ALOGE("CSD allocation failed");
+ mSignalledError = true;
+ work->result = C2_NO_MEMORY;
+ return;
+ }
+ memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes);
+ work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+
+ DUMP_TO_FILE(
+ mOutFile, csd->m.value, csd->flexCount());
+ }
+
+ // handle dynamic config parameters
+ {
+ IntfImpl::Lock lock = mIntf->lock();
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh = mIntf->getIntraRefresh_l();
+ std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync = mIntf->getRequestSync_l();
+ lock.unlock();
+
+ if (bitrate != mBitrate) {
+ mBitrate = bitrate;
+ setBitRate();
+ }
+
+ if (intraRefresh != mIntraRefresh) {
+ mIntraRefresh = intraRefresh;
+ setAirParams();
+ }
+
+ if (requestSync != mRequestSync) {
+ // we can handle IDR immediately
+ if (requestSync->value) {
+ // unset request
+ C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures);
+ ALOGV("Got sync request");
+ setFrameType(IV_IDR_FRAME);
+ }
+ mRequestSync = requestSync;
+ }
+ }
+
+ if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
+ mSawInputEOS = true;
+ }
+
+ /* In normal mode, store inputBufferInfo and this will be returned
+ when encoder consumes this input */
+ // if (!mInputDataIsMeta && (inputBufferInfo != NULL)) {
+ // for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) {
+ // if (NULL == mInputBufferInfo[i]) {
+ // mInputBufferInfo[i] = inputBufferInfo;
+ // break;
+ // }
+ // }
+ // }
+ std::shared_ptr<const C2GraphicView> view;
+ std::shared_ptr<C2Buffer> inputBuffer;
+ if (!work->input.buffers.empty()) {
+ inputBuffer = work->input.buffers[0];
+ view = std::make_shared<const C2GraphicView>(
+ inputBuffer->data().graphicBlocks().front().map().get());
+ if (view->error() != C2_OK) {
+ ALOGE("graphic view map err = %d", view->error());
+ return;
+ }
+ }
+
+ std::shared_ptr<C2LinearBlock> block;
+
+ do {
+ C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
+ // TODO: error handling, proper usage, etc.
+ c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block);
+ if (err != C2_OK) {
+ ALOGE("fetch linear block err = %d", err);
+ work->result = err;
+ return;
+ }
+ C2WriteView wView = block->map().get();
+ if (wView.error() != C2_OK) {
+ ALOGE("write view map err = %d", wView.error());
+ work->result = wView.error();
+ return;
+ }
+
+ error = setEncodeArgs(
+ &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), timestamp);
+ if (error != C2_OK) {
+ ALOGE("setEncodeArgs failed : %d", error);
+ mSignalledError = true;
+ work->result = error;
+ return;
+ }
+
+ // DUMP_TO_FILE(
+ // mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],
+ // (mHeight * mStride * 3 / 2));
+
+ GETTIME(&mTimeStart, nullptr);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
+ // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
+ mOutBufferSize *= 2;
+ continue;
+ }
+ ALOGE("Encode Frame failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ mSignalledError = true;
+ work->result = C2_CORRUPTED;
+ return;
+ }
+ } while (IV_SUCCESS != status);
+
+ // Hold input buffer reference
+ if (inputBuffer) {
+ mBuffers[s_encode_ip.s_inp_buf.apv_bufs[0]] = inputBuffer;
+ }
+
+ GETTIME(&mTimeEnd, nullptr);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_encode_op.s_out_buf.u4_bytes);
+
+ void *freed = s_encode_op.s_inp_buf.apv_bufs[0];
+ /* If encoder frees up an input buffer, mark it as free */
+ if (freed != nullptr) {
+ if (mBuffers.count(freed) == 0u) {
+ ALOGD("buffer not tracked");
+ } else {
+ // Release input buffer reference
+ mBuffers.erase(freed);
+ mConversionBuffersInUse.erase(freed);
+ }
+ }
+
+ work->worklets.front()->output.flags = work->input.flags;
+ work->worklets.front()->output.ordinal = work->input.ordinal;
+ work->worklets.front()->output.ordinal.timestamp =
+ ((uint64_t)s_encode_op.u4_timestamp_high << 32) | s_encode_op.u4_timestamp_low;
+ work->worklets.front()->output.buffers.clear();
+
+ if (s_encode_op.s_out_buf.u4_bytes) {
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, s_encode_op.s_out_buf.u4_bytes);
+ if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) {
+ ALOGV("IDR frame produced");
+ buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
+ 0u /* stream id */, C2PictureTypeKeyFrame));
+ }
+ work->worklets.front()->output.buffers.push_back(buffer);
+ }
+
+ if (s_encode_op.u4_is_last) {
+ // outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+ mSawOutputEOS = true;
+ } else {
+ // outputBufferHeader->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ }
+}
+
+c2_status_t C2SoftAvcEnc::drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) {
+ // TODO: use IVE_CMD_CTL_FLUSH?
+ (void)drainMode;
+ (void)pool;
+ return C2_OK;
+}
+
+
+class C2SoftAvcEncFactory : public C2ComponentFactory {
+public:
+ C2SoftAvcEncFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
+ GetCodec2PlatformComponentStore()->getParamReflector())) {
+ }
+
+ virtual c2_status_t createComponent(
+ c2_node_id_t id,
+ std::shared_ptr<C2Component>* const component,
+ std::function<void(C2Component*)> deleter) override {
+ *component = std::shared_ptr<C2Component>(
+ new C2SoftAvcEnc(COMPONENT_NAME,
+ id,
+ std::make_shared<C2SoftAvcEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual c2_status_t createInterface(
+ c2_node_id_t id,
+ std::shared_ptr<C2ComponentInterface>* const interface,
+ std::function<void(C2ComponentInterface*)> deleter) override {
+ *interface = std::shared_ptr<C2ComponentInterface>(
+ new SimpleInterface<C2SoftAvcEnc::IntfImpl>(
+ COMPONENT_NAME, id, std::make_shared<C2SoftAvcEnc::IntfImpl>(mHelper)),
+ deleter);
+ return C2_OK;
+ }
+
+ virtual ~C2SoftAvcEncFactory() override = default;
+
+private:
+ std::shared_ptr<C2ReflectorHelper> mHelper;
+};
+
+} // namespace android
+
+extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
+ ALOGV("in %s", __func__);
+ return new ::android::C2SoftAvcEncFactory();
+}
+
+extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
+ ALOGV("in %s", __func__);
+ delete factory;
+}
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
new file mode 100644
index 0000000..aa3ca61
--- /dev/null
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_C2_SOFT_AVC_ENC_H__
+#define ANDROID_C2_SOFT_AVC_ENC_H__
+
+#include <map>
+
+#include <utils/Vector.h>
+
+#include <SimpleC2Component.h>
+
+#include "ih264_typedefs.h"
+#include "iv2.h"
+#include "ive2.h"
+
+namespace android {
+
+#define CODEC_MAX_CORES 4
+#define LEN_STATUS_BUFFER (10 * 1024)
+#define MAX_VBV_BUFF_SIZE (120 * 16384)
+#define MAX_NUM_IO_BUFS 3
+
+#define DEFAULT_MAX_REF_FRM 2
+#define DEFAULT_MAX_REORDER_FRM 0
+#define DEFAULT_QP_MIN 10
+#define DEFAULT_QP_MAX 40
+#define DEFAULT_MAX_BITRATE 20000000
+#define DEFAULT_MAX_SRCH_RANGE_X 256
+#define DEFAULT_MAX_SRCH_RANGE_Y 256
+#define DEFAULT_MAX_FRAMERATE 120000
+#define DEFAULT_NUM_CORES 1
+#define DEFAULT_NUM_CORES_PRE_ENC 0
+#define DEFAULT_FPS 30
+#define DEFAULT_ENC_SPEED IVE_NORMAL
+
+#define DEFAULT_MEM_REC_CNT 0
+#define DEFAULT_RECON_ENABLE 0
+#define DEFAULT_CHKSUM_ENABLE 0
+#define DEFAULT_START_FRM 0
+#define DEFAULT_NUM_FRMS 0xFFFFFFFF
+#define DEFAULT_INP_COLOR_FORMAT IV_YUV_420SP_VU
+#define DEFAULT_RECON_COLOR_FORMAT IV_YUV_420P
+#define DEFAULT_LOOPBACK 0
+#define DEFAULT_SRC_FRAME_RATE 30
+#define DEFAULT_TGT_FRAME_RATE 30
+#define DEFAULT_MAX_WD 1920
+#define DEFAULT_MAX_HT 1920
+#define DEFAULT_MAX_LEVEL 41
+#define DEFAULT_STRIDE 0
+#define DEFAULT_WD 1280
+#define DEFAULT_HT 720
+#define DEFAULT_PSNR_ENABLE 0
+#define DEFAULT_ME_SPEED 100
+#define DEFAULT_ENABLE_FAST_SAD 0
+#define DEFAULT_ENABLE_ALT_REF 0
+#define DEFAULT_RC_MODE IVE_RC_STORAGE
+#define DEFAULT_BITRATE 6000000
+#define DEFAULT_I_QP 22
+#define DEFAULT_I_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_I_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_P_QP 28
+#define DEFAULT_P_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_P_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_B_QP 22
+#define DEFAULT_B_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_B_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_AIR IVE_AIR_MODE_NONE
+#define DEFAULT_AIR_REFRESH_PERIOD 30
+#define DEFAULT_SRCH_RNG_X 64
+#define DEFAULT_SRCH_RNG_Y 48
+#define DEFAULT_I_INTERVAL 30
+#define DEFAULT_IDR_INTERVAL 1000
+#define DEFAULT_B_FRAMES 0
+#define DEFAULT_DISABLE_DEBLK_LEVEL 0
+#define DEFAULT_HPEL 1
+#define DEFAULT_QPEL 1
+#define DEFAULT_I4 1
+#define DEFAULT_EPROFILE IV_PROFILE_BASE
+#define DEFAULT_ENTROPY_MODE 0
+#define DEFAULT_SLICE_MODE IVE_SLICE_MODE_NONE
+#define DEFAULT_SLICE_PARAM 256
+#define DEFAULT_ARCH ARCH_ARM_A9Q
+#define DEFAULT_SOC SOC_GENERIC
+#define DEFAULT_INTRA4x4 0
+#define STRLENGTH 500
+#define DEFAULT_CONSTRAINED_INTRA 0
+
+#define MIN(a, b) ((a) < (b))? (a) : (b)
+#define MAX(a, b) ((a) > (b))? (a) : (b)
+#define ALIGN16(x) ((((x) + 15) >> 4) << 4)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
+#define ALIGN4096(x) ((((x) + 4095) >> 12) << 12)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \
+ ((end).tv_usec - (start).tv_usec);
+
+#define ive_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ive_aligned_free(buf) free(buf)
+
+struct C2SoftAvcEnc : public SimpleC2Component {
+ class IntfImpl;
+
+ C2SoftAvcEnc(const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl);
+
+ // From SimpleC2Component
+ c2_status_t onInit() override;
+ c2_status_t onStop() override;
+ void onReset() override;
+ void onRelease() override;
+ c2_status_t onFlush_sm() override;
+ void process(
+ const std::unique_ptr<C2Work> &work,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+ c2_status_t drain(
+ uint32_t drainMode,
+ const std::shared_ptr<C2BlockPool> &pool) override;
+
+protected:
+ virtual ~C2SoftAvcEnc();
+
+private:
+ // OMX input buffer's timestamp and flags
+ typedef struct {
+ int64_t mTimeUs;
+ int32_t mFlags;
+ } InputBufferInfo;
+
+ std::shared_ptr<IntfImpl> mIntf;
+
+ int32_t mStride;
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+ char mOutFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ IV_COLOR_FORMAT_T mIvVideoColorFormat;
+
+ IV_PROFILE_T mAVCEncProfile __unused;
+ WORD32 mAVCEncLevel;
+ bool mStarted;
+ bool mSpsPpsHeaderReceived;
+
+ bool mSawInputEOS;
+ bool mSawOutputEOS;
+ bool mSignalledError;
+ bool mIntra4x4;
+ bool mEnableFastSad;
+ bool mEnableAltRef;
+ bool mReconEnable;
+ bool mPSNREnable;
+ bool mEntropyMode;
+ bool mConstrainedIntraFlag;
+ IVE_SPEED_CONFIG mEncSpeed;
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by codec
+ size_t mNumCores; // Number of cores used by the codec
+
+ // configurations used by component in process
+ // (TODO: keep this in intf but make them internal only)
+ std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
+ std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
+ std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
+ std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
+
+ uint32_t mOutBufferSize;
+ UWORD32 mHeaderGenerated;
+ UWORD32 mBframes;
+ IV_ARCH_T mArch;
+ IVE_SLICE_MODE_T mSliceMode;
+ UWORD32 mSliceParam;
+ bool mHalfPelEnable;
+ UWORD32 mIInterval;
+ UWORD32 mIDRInterval;
+ UWORD32 mDisableDeblkLevel;
+ std::map<const void *, std::shared_ptr<C2Buffer>> mBuffers;
+ MemoryBlockPool mConversionBuffers;
+ std::map<const void *, MemoryBlock> mConversionBuffersInUse;
+
+ void initEncParams();
+ c2_status_t initEncoder();
+ c2_status_t releaseEncoder();
+
+ c2_status_t setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type);
+ c2_status_t setQp();
+ c2_status_t setEncMode(IVE_ENC_MODE_T e_enc_mode);
+ c2_status_t setDimensions();
+ c2_status_t setNumCores();
+ c2_status_t setFrameRate();
+ c2_status_t setIpeParams();
+ c2_status_t setBitRate();
+ c2_status_t setAirParams();
+ c2_status_t setMeParams();
+ c2_status_t setGopParams();
+ c2_status_t setProfileParams();
+ c2_status_t setDeblockParams();
+ c2_status_t setVbvParams();
+ void logVersion();
+ c2_status_t setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ const C2GraphicView *const input,
+ uint8_t *base,
+ uint32_t capacity,
+ uint64_t timestamp);
+
+ C2_DO_NOT_COPY(C2SoftAvcEnc);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/sdcard/media/avce_input"
+#define INPUT_DUMP_EXT "yuv"
+#define OUTPUT_DUMP_PATH "/sdcard/media/avce_output"
+#define OUTPUT_DUMP_EXT "h264"
+
+#define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+ strcpy(mOutFile, ""); \
+ sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ OUTPUT_DUMP_EXT); \
+}
+
+#define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ ALOGD("Opened file %s", m_filename); \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+{ \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (int)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename);\
+ if (fp != NULL) \
+ fclose(fp); \
+ } \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // ANDROID_C2_SOFT_AVC_ENC_H__