Merge "C2 mpeg4enc: Support dynamic bitrate change"
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index ab73245..1a92c08 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -33,12 +33,14 @@
 #include <OMX_IndexExt.h>
 
 #include <android/fdsan.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/MediaErrors.h>
 #include <ui/Fence.h>
 #include <ui/GraphicBuffer.h>
 #include <utils/Thread.h>
 
+#include "utils/Codec2Mapper.h"
 #include "C2OMXNode.h"
 
 namespace android {
@@ -71,6 +73,25 @@
         jobs->cond.broadcast();
     }
 
+    void setDataspace(android_dataspace dataspace) {
+        Mutexed<Jobs>::Locked jobs(mJobs);
+        ColorUtils::convertDataSpaceToV0(dataspace);
+        jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
+        int32_t standard = (int32_t(dataspace) & HAL_DATASPACE_STANDARD_MASK)
+            >> HAL_DATASPACE_STANDARD_SHIFT;
+        int32_t transfer = (int32_t(dataspace) & HAL_DATASPACE_TRANSFER_MASK)
+            >> HAL_DATASPACE_TRANSFER_SHIFT;
+        int32_t range = (int32_t(dataspace) & HAL_DATASPACE_RANGE_MASK)
+            >> HAL_DATASPACE_RANGE_SHIFT;
+        std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
+            std::make_unique<C2StreamColorAspectsInfo::input>(0u);
+        if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
+                && C2Mapper::map(transfer, &colorAspects->transfer)
+                && C2Mapper::map(range, &colorAspects->range)) {
+            jobs->configUpdate.push_back(std::move(colorAspects));
+        }
+    }
+
 protected:
     bool threadLoop() override {
         constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000;  // 10ms
@@ -102,6 +123,9 @@
                     uniqueFds.push_back(std::move(queue.workList.front().fd1));
                     queue.workList.pop_front();
                 }
+                for (const std::unique_ptr<C2Param> &param : jobs->configUpdate) {
+                    items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
+                }
 
                 jobs.unlock();
                 for (int fenceFd : fenceFds) {
@@ -119,6 +143,7 @@
                 queued = true;
             }
             if (queued) {
+                jobs->configUpdate.clear();
                 return true;
             }
             if (i == 0) {
@@ -161,6 +186,7 @@
         std::map<std::weak_ptr<Codec2Client::Component>,
                  Queue,
                  std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
+        std::vector<std::unique_ptr<C2Param>> configUpdate;
         Condition cond;
     };
     Mutexed<Jobs> mJobs;
@@ -172,6 +198,9 @@
       mQueueThread(new QueueThread) {
     android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
     mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = HAL_DATASPACE_UNKNOWN;
 }
 
 status_t C2OMXNode::freeNode() {
@@ -459,8 +488,11 @@
     android_dataspace dataSpace = (android_dataspace)msg.u.event_data.data1;
     uint32_t pixelFormat = msg.u.event_data.data3;
 
-    // TODO: set dataspace on component to see if it impacts color aspects
     ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
+    mQueueThread->setDataspace(dataSpace);
+
+    Mutexed<android_dataspace>::Locked ds(mDataspace);
+    *ds = dataSpace;
     return OK;
 }
 
@@ -493,4 +525,8 @@
     (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
 }
 
+android_dataspace C2OMXNode::getDataspace() {
+    return *mDataspace.lock();
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index 1717c96..5d587bc 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -93,6 +93,8 @@
      */
     void onInputBufferDone(c2_cntr64_t index);
 
+    android_dataspace getDataspace();
+
 private:
     std::weak_ptr<Codec2Client::Component> mComp;
     sp<IOMXBufferSource> mBufferSource;
@@ -101,6 +103,7 @@
     uint32_t mWidth;
     uint32_t mHeight;
     uint64_t mUsage;
+    Mutexed<android_dataspace> mDataspace;
 
     // WORKAROUND: timestamp adjustment
 
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 80ef0c1..7b914e4 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -44,6 +44,7 @@
 #include <media/stagefright/BufferProducerWrapper.h>
 #include <media/stagefright/MediaCodecConstants.h>
 #include <media/stagefright/PersistentSurface.h>
+#include <utils/NativeHandle.h>
 
 #include "C2OMXNode.h"
 #include "CCodecBufferChannel.h"
@@ -210,8 +211,6 @@
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                 &usage, sizeof(usage));
 
-        // NOTE: we do not use/pass through color aspects from GraphicBufferSource as we
-        // communicate that directly to the component.
         mSource->configure(
                 mOmxNode, static_cast<hardware::graphics::common::V1_0::Dataspace>(mDataSpace));
         return OK;
@@ -410,6 +409,10 @@
         mNode->onInputBufferDone(index);
     }
 
+    android_dataspace getDataspace() override {
+        return mNode->getDataspace();
+    }
+
 private:
     sp<HGraphicBufferSource> mSource;
     sp<C2OMXNode> mNode;
@@ -795,10 +798,30 @@
             mChannel->setMetaMode(CCodecBufferChannel::MODE_ANW);
         }
 
+        status_t err = OK;
         sp<RefBase> obj;
         sp<Surface> surface;
         if (msg->findObject("native-window", &obj)) {
             surface = static_cast<Surface *>(obj.get());
+            // setup tunneled playback
+            if (surface != nullptr) {
+                Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+                const std::unique_ptr<Config> &config = *configLocked;
+                if ((config->mDomain & Config::IS_DECODER)
+                        && (config->mDomain & Config::IS_VIDEO)) {
+                    int32_t tunneled;
+                    if (msg->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
+                        ALOGI("Configuring TUNNELED video playback.");
+
+                        err = configureTunneledVideoPlayback(comp, &config->mSidebandHandle, msg);
+                        if (err != OK) {
+                            ALOGE("configureTunneledVideoPlayback failed!");
+                            return err;
+                        }
+                        config->mTunneled = true;
+                    }
+                }
+            }
             setSurface(surface);
         }
 
@@ -1007,6 +1030,29 @@
             }
         }
 
+        // get color aspects
+        getColorAspectsFromFormat(msg, config->mClientColorAspects);
+
+        /*
+         * Handle dataspace
+         */
+        int32_t usingRecorder;
+        if (msg->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) {
+            android_dataspace dataSpace = HAL_DATASPACE_BT709;
+            int32_t width, height;
+            if (msg->findInt32("width", &width)
+                    && msg->findInt32("height", &height)) {
+                setDefaultCodecColorAspectsIfNeeded(config->mClientColorAspects, width, height);
+                // TODO: read dataspace / color aspect from the component
+                setColorAspectsIntoFormat(
+                        config->mClientColorAspects, const_cast<sp<AMessage> &>(msg));
+                dataSpace = getDataSpaceForColorAspects(
+                        config->mClientColorAspects, true /* mayexpand */);
+            }
+            msg->setInt32("android._dataspace", (int32_t)dataSpace);
+            ALOGD("setting dataspace to %x", dataSpace);
+        }
+
         int32_t subscribeToAllVendorParams;
         if (msg->findInt32("x-*", &subscribeToAllVendorParams) && subscribeToAllVendorParams) {
             if (config->subscribeToAllVendorParams(comp, C2_MAY_BLOCK) != OK) {
@@ -1023,7 +1069,7 @@
             sdkParams = msg->dup();
             sdkParams->removeEntryAt(sdkParams->findEntryByName(PARAMETER_KEY_VIDEO_BITRATE));
         }
-        status_t err = config->getConfigUpdateFromSdkParams(
+        err = config->getConfigUpdateFromSdkParams(
                 comp, sdkParams, Config::IS_CONFIG, C2_DONT_BLOCK, &configUpdate);
         if (err != OK) {
             ALOGW("failed to convert configuration to c2 params");
@@ -1703,6 +1749,19 @@
 }
 
 status_t CCodec::setSurface(const sp<Surface> &surface) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    if (config->mTunneled && config->mSidebandHandle != nullptr) {
+        sp<ANativeWindow> nativeWindow = static_cast<ANativeWindow *>(surface.get());
+        status_t err = native_window_set_sideband_stream(
+                nativeWindow.get(),
+                const_cast<native_handle_t *>(config->mSidebandHandle->handle()));
+        if (err != OK) {
+            ALOGE("NativeWindow(%p) native_window_set_sideband_stream(%p) failed! (err %d).",
+                    nativeWindow.get(), config->mSidebandHandle->handle(), err);
+            return err;
+        }
+    }
     return mChannel->setSurface(surface);
 }
 
@@ -1926,6 +1985,44 @@
     }
 }
 
+static void HandleDataspace(
+        android_dataspace dataspace, ColorAspects *colorAspects, sp<AMessage> *format) {
+    ColorUtils::convertDataSpaceToV0(dataspace);
+    int32_t range, standard, transfer;
+    range = (dataspace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT;
+    if (range == 0) {
+        range = ColorUtils::wrapColorAspectsIntoColorRange(
+                colorAspects->mRange);
+    }
+    standard = (dataspace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT;
+    if (standard == 0) {
+        standard = ColorUtils::wrapColorAspectsIntoColorStandard(
+                colorAspects->mPrimaries,
+                colorAspects->mMatrixCoeffs);
+    }
+    transfer = (dataspace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT;
+    if (transfer == 0) {
+        transfer = ColorUtils::wrapColorAspectsIntoColorTransfer(
+                colorAspects->mTransfer);
+    }
+    ColorAspects newColorAspects;
+    ColorUtils::convertPlatformColorAspectsToCodecAspects(
+            range, standard, transfer, newColorAspects);
+    if (ColorUtils::checkIfAspectsChangedAndUnspecifyThem(
+            newColorAspects, *colorAspects)) {
+        *format = (*format)->dup();
+        (*format)->setInt32(KEY_COLOR_RANGE, range);
+        (*format)->setInt32(KEY_COLOR_STANDARD, standard);
+        (*format)->setInt32(KEY_COLOR_TRANSFER, transfer);
+        // Record current color aspects into |colorAspects|.
+        // NOTE: newColorAspects could have been modified by
+        //       checkIfAspectsChangedAndUnspecifyThem() above,
+        //       so *colorAspects = newColorAspects does not work as intended.
+        ColorUtils::convertPlatformColorAspectsToCodecAspects(
+                range, standard, transfer, *colorAspects);
+    }
+}
+
 void CCodec::onMessageReceived(const sp<AMessage> &msg) {
     TimePoint now = std::chrono::steady_clock::now();
     CCodecWatchdog::getInstance()->watch(this);
@@ -2040,6 +2137,10 @@
 
                 sp<AMessage> outputFormat = config->mOutputFormat;
                 config->updateConfiguration(updates, config->mOutputDomain);
+                if (config->mInputSurface) {
+                    android_dataspace ds = config->mInputSurface->getDataspace();
+                    HandleDataspace(ds, &config->mClientColorAspects, &config->mOutputFormat);
+                }
                 RevertOutputFormatIfNeeded(outputFormat, config->mOutputFormat);
 
                 // copy standard infos to graphic buffers if not already present (otherwise, we
@@ -2099,6 +2200,51 @@
     deadline->set(now + (timeout * mult), name);
 }
 
+status_t CCodec::configureTunneledVideoPlayback(
+        std::shared_ptr<Codec2Client::Component> comp,
+        sp<NativeHandle> *sidebandHandle,
+        const sp<AMessage> &msg) {
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+
+    std::unique_ptr<C2PortTunneledModeTuning::output> tunneledPlayback =
+        C2PortTunneledModeTuning::output::AllocUnique(
+            1,
+            C2PortTunneledModeTuning::Struct::SIDEBAND,
+            C2PortTunneledModeTuning::Struct::REALTIME,
+            0);
+    // TODO: use KEY_AUDIO_HW_SYNC, KEY_HARDWARE_AV_SYNC_ID when they are in MediaCodecConstants.h
+    if (msg->findInt32("audio-hw-sync", &tunneledPlayback->m.syncId[0])) {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::AUDIO_HW_SYNC;
+    } else if (msg->findInt32("hw-av-sync-id", &tunneledPlayback->m.syncId[0])) {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::HW_AV_SYNC;
+    } else {
+        tunneledPlayback->m.syncType = C2PortTunneledModeTuning::Struct::sync_type_t::REALTIME;
+        tunneledPlayback->setFlexCount(0);
+    }
+    c2_status_t c2err = comp->config({ tunneledPlayback.get() }, C2_MAY_BLOCK, &failures);
+    if (c2err != C2_OK) {
+        return UNKNOWN_ERROR;
+    }
+
+    std::vector<std::unique_ptr<C2Param>> params;
+    c2err = comp->query({}, {C2PortTunnelHandleTuning::output::PARAM_TYPE}, C2_DONT_BLOCK, &params);
+    if (c2err == C2_OK && params.size() == 1u) {
+        C2PortTunnelHandleTuning::output *videoTunnelSideband =
+            C2PortTunnelHandleTuning::output::From(params[0].get());
+        // Currently, Codec2 only supports non-fd case for sideband native_handle.
+        native_handle_t *handle = native_handle_create(0, videoTunnelSideband->flexCount());
+        *sidebandHandle = NativeHandle::create(handle, true /* ownsHandle */);
+        if (handle != nullptr && videoTunnelSideband->flexCount()) {
+            memcpy(handle->data, videoTunnelSideband->m.values,
+                    sizeof(int32_t) * videoTunnelSideband->flexCount());
+            return OK;
+        } else {
+            return NO_MEMORY;
+        }
+    }
+    return UNKNOWN_ERROR;
+}
+
 void CCodec::initiateReleaseIfStuck() {
     std::string name;
     bool pendingDeadline = false;
@@ -2111,7 +2257,9 @@
             pendingDeadline = true;
         }
     }
-    if (name.empty()) {
+    Mutexed<std::unique_ptr<Config>>::Locked configLocked(mConfig);
+    const std::unique_ptr<Config> &config = *configLocked;
+    if (config->mTunneled == false && name.empty()) {
         constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
         std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
         if (elapsed >= kWorkDurationThreshold) {
@@ -2500,4 +2648,3 @@
 }
 
 }  // namespace android
-
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 4d2700a..ad02edb 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1712,6 +1712,17 @@
                 }
                 break;
             }
+            case C2PortTunnelSystemTime::CORE_INDEX: {
+                C2PortTunnelSystemTime::output frameRenderTime;
+                if (frameRenderTime.updateFrom(*param)) {
+                    ALOGV("[%s] onWorkDone: frame rendered (sys:%lld ns, media:%lld us)",
+                          mName, (long long)frameRenderTime.value,
+                          (long long)worklet->output.ordinal.timestamp.peekll());
+                    mCCodecCallback->onOutputFramesRendered(
+                            worklet->output.ordinal.timestamp.peek(), frameRenderTime.value);
+                }
+                break;
+            }
             default:
                 ALOGV("[%s] onWorkDone: unrecognized config update (%08X)",
                       mName, param->index());
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 8a2a8cf..f5cc98e 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -18,6 +18,7 @@
 #define LOG_TAG "CCodecConfig"
 #include <cutils/properties.h>
 #include <log/log.h>
+#include <utils/NativeHandle.h>
 
 #include <C2Component.h>
 #include <C2Param.h>
@@ -321,7 +322,8 @@
 CCodecConfig::CCodecConfig()
     : mInputFormat(new AMessage),
       mOutputFormat(new AMessage),
-      mUsingSurface(false) { }
+      mUsingSurface(false),
+      mTunneled(false) { }
 
 void CCodecConfig::initializeStandardParams() {
     typedef Domain D;
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 2895746..d9116f7 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -27,6 +27,7 @@
 #include <C2Debug.h>
 
 #include <codec2/hidl/client.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <utils/RefBase.h>
 
 #include "InputSurfaceWrapper.h"
@@ -35,6 +36,7 @@
 namespace android {
 
 struct AMessage;
+class NativeHandle;
 struct StandardParams;
 
 /**
@@ -123,6 +125,7 @@
 
     std::shared_ptr<InputSurfaceWrapper> mInputSurface;
     std::unique_ptr<InputSurfaceWrapper::Config> mISConfig;
+    ColorAspects mClientColorAspects;
 
     /// the current configuration. Updated after configure() and based on configUpdate in
     /// onWorkDone
@@ -141,6 +144,10 @@
 
     std::set<std::string> mLastConfig;
 
+    /// Tunneled codecs
+    bool mTunneled;
+    sp<NativeHandle> mSidebandHandle;
+
     CCodecConfig();
 
     /// initializes the members required to manage the format: descriptors, reflector,
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index bb35763..bb7ca02 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -106,6 +106,8 @@
      */
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
+    virtual android_dataspace getDataspace() { return mDataSpace; }
+
 protected:
     android_dataspace mDataSpace;
 };
diff --git a/media/codec2/sfplugin/include/media/stagefright/CCodec.h b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
index dbbb5d5..ba69d7e 100644
--- a/media/codec2/sfplugin/include/media/stagefright/CCodec.h
+++ b/media/codec2/sfplugin/include/media/stagefright/CCodec.h
@@ -126,6 +126,11 @@
             const std::chrono::milliseconds &timeout,
             const char *name);
 
+    status_t configureTunneledVideoPlayback(
+            const std::shared_ptr<Codec2Client::Component> comp,
+            sp<NativeHandle> *sidebandHandle,
+            const sp<AMessage> &msg);
+
     enum {
         kWhatAllocate,
         kWhatConfigure,
diff --git a/media/libeffects/preprocessing/Android.bp b/media/libeffects/preprocessing/Android.bp
index eb3ce34..87ed8b6 100644
--- a/media/libeffects/preprocessing/Android.bp
+++ b/media/libeffects/preprocessing/Android.bp
@@ -22,6 +22,7 @@
     name: "libaudiopreprocessing",
     vendor: true,
     relative_install_path: "soundfx",
+    host_supported: true,
     srcs: ["PreProcessing.cpp"],
     local_include_dirs: [
         ".",
@@ -47,4 +48,9 @@
         "libhardware_headers",
         "libwebrtc_absl_headers",
     ],
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index 03ccc34..3b0b6d6 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -670,8 +670,8 @@
     return 0;
 }
 
-int NsGetParameter(preproc_effect_t* effect __unused, void* pParam __unused,
-                   uint32_t* pValueSize __unused, void* pValue __unused) {
+int NsGetParameter(preproc_effect_t* /*effect __unused*/, void* /*pParam __unused*/,
+                   uint32_t* /*pValueSize __unused*/, void* /*pValue __unused*/) {
     int status = 0;
     return status;
 }
@@ -910,7 +910,7 @@
         session->apm = NULL;
         delete session->inBuf;
         session->inBuf = NULL;
-        delete session->outBuf;
+        free(session->outBuf);
         session->outBuf = NULL;
         delete session->revBuf;
         session->revBuf = NULL;
@@ -1551,7 +1551,7 @@
 }
 
 int PreProcessingFx_ProcessReverse(effect_handle_t self, audio_buffer_t* inBuffer,
-                                   audio_buffer_t* outBuffer __unused) {
+                                   audio_buffer_t* outBuffer) {
     preproc_effect_t* effect = (preproc_effect_t*)self;
 
     if (effect == NULL) {
diff --git a/media/libeffects/preprocessing/tests/Android.bp b/media/libeffects/preprocessing/tests/Android.bp
index 8848e79..6413945 100644
--- a/media/libeffects/preprocessing/tests/Android.bp
+++ b/media/libeffects/preprocessing/tests/Android.bp
@@ -13,6 +13,8 @@
 cc_test {
     name: "AudioPreProcessingTest",
     vendor: true,
+    host_supported: true,
+    gtest: false,
     srcs: ["PreProcessingTest.cpp"],
     shared_libs: [
         "libaudioutils",
@@ -27,7 +29,11 @@
         "libaudioeffects",
         "libhardware_headers",
     ],
-    gtest: false,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
 }
 
 cc_test {
diff --git a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
index 5f223c9..e0025fe 100644
--- a/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
+++ b/media/libeffects/preprocessing/tests/PreProcessingTest.cpp
@@ -24,6 +24,7 @@
 #include <audio_effects/effect_agc.h>
 #include <audio_effects/effect_agc2.h>
 #include <audio_effects/effect_ns.h>
+#include <audio_utils/channels.h>
 #include <log/log.h>
 
 // This is the only symbol that needs to be imported
@@ -55,7 +56,9 @@
     ARG_NS_LVL,
     ARG_AGC2_GAIN,
     ARG_AGC2_LVL,
-    ARG_AGC2_SAT_MGN
+    ARG_AGC2_SAT_MGN,
+    ARG_FILE_CHANNELS,
+    ARG_MONO_MODE
 };
 
 struct preProcConfigParams_t {
@@ -68,6 +71,8 @@
     float agc2SaturationMargin = 2.f;  // in dB
     int agc2Level = 0;                 // either kRms(0) or kPeak(1)
     int aecDelay = 0;  // in ms
+    int fileChannels = 1;
+    int monoMode = 0;
 };
 
 const effect_uuid_t kPreProcUuids[PREPROC_NUM_EFFECTS] = {
@@ -106,7 +111,7 @@
     printf("\n           Prints this usage information");
     printf("\n     --fs <sampling_freq>");
     printf("\n           Sampling frequency in Hz, default 16000.");
-    printf("\n     -ch_mask <channel_mask>\n");
+    printf("\n     --ch_mask <channel_mask>\n");
     printf("\n         0  - AUDIO_CHANNEL_IN_MONO");
     printf("\n         1  - AUDIO_CHANNEL_IN_STEREO");
     printf("\n         2  - AUDIO_CHANNEL_IN_FRONT_BACK");
@@ -144,6 +149,10 @@
     printf("\n           AGC Adaptive Digital Saturation Margin in dB, default value 2dB");
     printf("\n     --aec_delay <delay>");
     printf("\n           AEC delay value in ms, default value 0ms");
+    printf("\n     --fch <fileChannels>");
+    printf("\n           number of channels in the input file");
+    printf("\n     --mono <Mono Mode>");
+    printf("\n           Mode to make data of all channels the same as first channel");
     printf("\n");
 }
 
@@ -189,10 +198,17 @@
         printUsage();
         return EXIT_FAILURE;
     }
+
+    // Print the arguments passed
+    for (int i = 1; i < argc; i++) {
+        printf("%s ", argv[i]);
+    }
+
     const char* inputFile = nullptr;
     const char* outputFile = nullptr;
     const char* farFile = nullptr;
     int effectEn[PREPROC_NUM_EFFECTS] = {0};
+    struct preProcConfigParams_t preProcCfgParams {};
 
     const option long_opts[] = {
             {"help", no_argument, nullptr, ARG_HELP},
@@ -212,9 +228,10 @@
             {"agc", no_argument, &effectEn[PREPROC_AGC], 1},
             {"agc2", no_argument, &effectEn[PREPROC_AGC2], 1},
             {"ns", no_argument, &effectEn[PREPROC_NS], 1},
+            {"fch", required_argument, nullptr, ARG_FILE_CHANNELS},
+            {"mono", no_argument, &preProcCfgParams.monoMode, 1},
             {nullptr, 0, nullptr, 0},
     };
-    struct preProcConfigParams_t preProcCfgParams {};
 
     while (true) {
         const int opt = getopt_long(argc, (char* const*)argv, "i:o:", long_opts, nullptr);
@@ -279,6 +296,14 @@
                 preProcCfgParams.nsLevel = atoi(optarg);
                 break;
             }
+            case ARG_FILE_CHANNELS: {
+                preProcCfgParams.fileChannels = atoi(optarg);
+                break;
+            }
+            case ARG_MONO_MODE: {
+                preProcCfgParams.monoMode = 1;
+                break;
+            }
             default:
                 break;
         }
@@ -402,16 +427,28 @@
     // Process Call
     const int frameLength = (int)(preProcCfgParams.samplingFreq * kTenMilliSecVal);
     const int ioChannelCount = audio_channel_count_from_in_mask(preProcCfgParams.chMask);
+    const int fileChannelCount = preProcCfgParams.fileChannels;
     const int ioFrameSize = ioChannelCount * sizeof(short);
+    const int inFrameSize = fileChannelCount * sizeof(short);
     int frameCounter = 0;
     while (true) {
         std::vector<short> in(frameLength * ioChannelCount);
         std::vector<short> out(frameLength * ioChannelCount);
         std::vector<short> farIn(frameLength * ioChannelCount);
-        size_t samplesRead = fread(in.data(), ioFrameSize, frameLength, inputFp.get());
+        size_t samplesRead = fread(in.data(), inFrameSize, frameLength, inputFp.get());
         if (samplesRead == 0) {
             break;
         }
+        if (fileChannelCount != ioChannelCount) {
+            adjust_channels(in.data(), fileChannelCount, in.data(), ioChannelCount, sizeof(short),
+                            frameLength * inFrameSize);
+            if (preProcCfgParams.monoMode == 1) {
+                for (int i = 0; i < frameLength; ++i) {
+                    auto* fp = &in[i * ioChannelCount];
+                    std::fill(fp + 1, fp + ioChannelCount, *fp);  // replicate ch 0
+                }
+            }
+        }
         audio_buffer_t inputBuffer, outputBuffer;
         audio_buffer_t farInBuffer{};
         inputBuffer.frameCount = samplesRead;
@@ -420,10 +457,21 @@
         outputBuffer.s16 = out.data();
 
         if (farFp != nullptr) {
-            samplesRead = fread(farIn.data(), ioFrameSize, frameLength, farFp.get());
+            samplesRead = fread(farIn.data(), inFrameSize, frameLength, farFp.get());
             if (samplesRead == 0) {
                 break;
             }
+            if (fileChannelCount != ioChannelCount) {
+                adjust_channels(farIn.data(), fileChannelCount, farIn.data(), ioChannelCount,
+                                sizeof(short), frameLength * inFrameSize);
+                if (preProcCfgParams.monoMode == 1) {
+                    for (int i = 0; i < frameLength; ++i) {
+                        auto* fp = &farIn[i * ioChannelCount];
+                        std::fill(fp + 1, fp + ioChannelCount, *fp);  // replicate ch 0
+                    }
+                }
+            }
+
             farInBuffer.frameCount = samplesRead;
             farInBuffer.s16 = farIn.data();
         }
@@ -458,8 +506,12 @@
             }
         }
         if (outputFp != nullptr) {
+            if (fileChannelCount != ioChannelCount) {
+                adjust_channels(out.data(), ioChannelCount, out.data(), fileChannelCount,
+                                sizeof(short), frameLength * ioFrameSize);
+            }
             size_t samplesWritten =
-                    fwrite(out.data(), ioFrameSize, outputBuffer.frameCount, outputFp.get());
+                    fwrite(out.data(), inFrameSize, outputBuffer.frameCount, outputFp.get());
             if (samplesWritten != outputBuffer.frameCount) {
                 ALOGE("\nError: Output file writing failed");
                 break;
diff --git a/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..942f2ec
--- /dev/null
+++ b/media/libeffects/preprocessing/tests/build_and_run_all_unit_tests.sh
@@ -0,0 +1,115 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm -j
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount
+
+# location of test files
+testdir="/data/local/tmp/AudioPreProcessingTest"
+
+echo "========================================"
+echo "testing PreProcessing modules"
+adb shell mkdir -p $testdir
+adb push $ANDROID_BUILD_TOP/frameworks/av/media/libeffects/res/raw/sinesweepraw.raw $testdir
+adb push $OUT/testcases/snr/arm64/snr $testdir
+
+E_VAL=1
+if [ -z "$1" ]
+then
+    cmds=("adb push $OUT/testcases/AudioPreProcessingTest/arm64/AudioPreProcessingTest $testdir"
+          "adb push $OUT/testcases/AudioPreProcessingTest/arm/AudioPreProcessingTest $testdir"
+)
+elif [ "$1" == "32" ]
+then
+    cmds="adb push $OUT/testcases/AudioPreProcessingTest/arm/AudioPreProcessingTest $testdir"
+elif [ "$1" == "64" ]
+then
+    cmds="adb push $OUT/testcases/AudioPreProcessingTest/arm64/AudioPreProcessingTest $testdir"
+else
+    echo ""
+    echo "Invalid \"val\""
+    echo "Usage:"
+    echo "      "$0" [val]"
+    echo "      where, val can be either 32 or 64."
+    echo ""
+    echo "      If val is not specified then both 32 bit and 64 bit binaries"
+    echo "      are tested."
+    exit $E_VAL
+fi
+
+flags_arr=(
+    "--agc --mono"
+    "--ns --mono"
+    "--agc2 --mono"
+    "--aec --mono"
+)
+
+fs_arr=(
+    8000
+    16000
+    24000
+    32000
+    48000
+)
+
+# run multichannel effects at different configs, saving only the mono channel
+error_count=0
+test_count=0
+for cmd in "${cmds[@]}"
+do
+    $cmd
+    for flags in "${flags_arr[@]}"
+    do
+        for fs in ${fs_arr[*]}
+        do
+            for chMask in {0..7}
+            do
+                adb shell $testdir/AudioPreProcessingTest $flags \
+                    --i $testdir/sinesweepraw.raw --far $testdir/sinesweepraw.raw \
+                    --output $testdir/sinesweep_$((chMask))_$((fs)).raw --ch_mask $chMask \
+                    --fs $fs --fch 1
+
+                shell_ret=$?
+                if [ $shell_ret -ne 0 ]; then
+                    echo "error shell_ret here is zero: $shell_ret"
+                    ((++error_count))
+                fi
+
+
+                # single channel files should be identical to higher channel
+                # computation (first channel).
+                if  [[ "$chMask" -gt 1 ]]
+                then
+                    adb shell cmp $testdir/sinesweep_1_$((fs)).raw \
+                        $testdir/sinesweep_$((chMask))_$((fs)).raw
+                fi
+
+                # cmp return EXIT_FAILURE on mismatch.
+                shell_ret=$?
+                if [ $shell_ret -ne 0 ]; then
+                    echo "error: $shell_ret"
+                    ((++error_count))
+                fi
+                ((++test_count))
+            done
+        done
+    done
+done
+
+adb shell rm -r $testdir
+echo "$test_count tests performed"
+echo "$error_count errors"
+exit $error_count
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 4d90d98..7cda2fb 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1822,8 +1822,7 @@
 //static
 void MediaPlayerService::AudioOutput::setMinBufferCount()
 {
-    char value[PROPERTY_VALUE_MAX];
-    if (property_get("ro.kernel.qemu", value, 0)) {
+    if (property_get_bool("ro.boot.qemu", false)) {
         mIsOnEmulator = true;
         mMinBufferCount = 12;  // to prevent systematic buffer underrun for emulator
     }
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 9b3f420..4e34a26 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -797,7 +797,7 @@
     mStartTimeUs = 0;
     mNumInputBuffers = 0;
     mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
-    mEncoderDataSpace = HAL_DATASPACE_V0_BT709;
+    mEncoderDataSpace = mBufferDataSpace = HAL_DATASPACE_V0_BT709;
 
     if (meta) {
         int64_t startTimeUs;
@@ -817,6 +817,7 @@
         }
         if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
             ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
+            mBufferDataSpace = mEncoderDataSpace;
         }
     }
 
@@ -1114,6 +1115,11 @@
         (*buffer)->setObserver(this);
         (*buffer)->add_ref();
         (*buffer)->meta_data().setInt64(kKeyTime, frameTime);
+        if (mBufferDataSpace != mEncoderDataSpace) {
+            ALOGD("Data space updated to %x", mBufferDataSpace);
+            (*buffer)->meta_data().setInt32(kKeyColorSpace, mBufferDataSpace);
+            mEncoderDataSpace = mBufferDataSpace;
+        }
     }
     return OK;
 }
@@ -1391,6 +1397,7 @@
     // Find a available memory slot to store the buffer as VideoNativeMetadata.
     sp<IMemory> data = *mMemoryBases.begin();
     mMemoryBases.erase(mMemoryBases.begin());
+    mBufferDataSpace = buffer.mDataSpace;
 
     ssize_t offset;
     size_t size;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index d99596e..b2fae96 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -4217,13 +4217,20 @@
 void MPEG4Writer::Track::writeColrBox() {
     ColorAspects aspects;
     memset(&aspects, 0, sizeof(aspects));
+    // Color metadata may have changed.
+    sp<MetaData> meta = mSource->getFormat();
     // TRICKY: using | instead of || because we want to execute all findInt32-s
-    if (mMeta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
-            | mMeta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
-            | mMeta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
-            | mMeta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
+    if (meta->findInt32(kKeyColorPrimaries, (int32_t*)&aspects.mPrimaries)
+            | meta->findInt32(kKeyTransferFunction, (int32_t*)&aspects.mTransfer)
+            | meta->findInt32(kKeyColorMatrix, (int32_t*)&aspects.mMatrixCoeffs)
+            | meta->findInt32(kKeyColorRange, (int32_t*)&aspects.mRange)) {
         int32_t primaries, transfer, coeffs;
         bool fullRange;
+        ALOGV("primaries=%s transfer=%s matrix=%s range=%s",
+                asString(aspects.mPrimaries),
+                asString(aspects.mTransfer),
+                asString(aspects.mMatrixCoeffs),
+                asString(aspects.mRange));
         ColorUtils::convertCodecColorAspectsToIsoAspects(
                 aspects, &primaries, &transfer, &coeffs, &fullRange);
         mOwner->beginBox("colr");
@@ -4233,6 +4240,8 @@
         mOwner->writeInt16(coeffs);
         mOwner->writeInt8(int8_t(fullRange ? 0x80 : 0x0));
         mOwner->endBox(); // colr
+    } else {
+        ALOGV("no color information");
     }
 }
 
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index bc656a2..0f7df24 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -30,6 +30,7 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/ALooper.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaCodec.h>
 #include <media/stagefright/MediaCodecConstants.h>
@@ -768,6 +769,26 @@
             memcpy(inbuf->data(), mbuf->data(), size);
 
             if (mIsVideo) {
+                int32_t ds = 0;
+                if (mbuf->meta_data().findInt32(kKeyColorSpace, &ds)
+                        && ds != HAL_DATASPACE_UNKNOWN) {
+                    android_dataspace dataspace = static_cast<android_dataspace>(ds);
+                    ColorUtils::convertDataSpaceToV0(dataspace);
+                    ALOGD("Updating dataspace to %x", dataspace);
+                    int32_t standard = (int32_t(dataspace) & HAL_DATASPACE_STANDARD_MASK)
+                        >> HAL_DATASPACE_STANDARD_SHIFT;
+                    int32_t transfer = (int32_t(dataspace) & HAL_DATASPACE_TRANSFER_MASK)
+                        >> HAL_DATASPACE_TRANSFER_SHIFT;
+                    int32_t range = (int32_t(dataspace) & HAL_DATASPACE_RANGE_MASK)
+                        >> HAL_DATASPACE_RANGE_SHIFT;
+                    sp<AMessage> msg = new AMessage;
+                    msg->setInt32(KEY_COLOR_STANDARD, standard);
+                    msg->setInt32(KEY_COLOR_TRANSFER, transfer);
+                    msg->setInt32(KEY_COLOR_RANGE, range);
+                    msg->setInt32("android._dataspace", dataspace);
+                    mEncoder->setParameters(msg);
+                }
+
                 // video encoder will release MediaBuffer when done
                 // with underlying data.
                 inbuf->meta()->setObject("mediaBufferHolder", new MediaBufferHolder(mbuf));
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index 6f0d3b5..efdfa02 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -192,6 +192,7 @@
     int32_t  mColorFormat;
     int32_t  mEncoderFormat;
     int32_t  mEncoderDataSpace;
+    int32_t  mBufferDataSpace;
     status_t mInitCheck;
 
     sp<Camera>   mCamera;
diff --git a/media/tests/SampleVideoEncoder/app/Android.bp b/media/tests/SampleVideoEncoder/app/Android.bp
index 35fe0d8..3a66955 100644
--- a/media/tests/SampleVideoEncoder/app/Android.bp
+++ b/media/tests/SampleVideoEncoder/app/Android.bp
@@ -14,6 +14,15 @@
  * limitations under the License.
  */
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
 android_app {
     name: "SampleVideoEncoder",