Merge "API changes in MicrophoneDirection"
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index 09b85d5..691996b 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -287,3 +287,16 @@
     }
     return device->createCaptureSession(outputs, sessionParameters, callbacks, session);
 }
+
+EXPORT
+camera_status_t ACameraDevice_isSessionConfigurationSupported(
+        const ACameraDevice* device,
+        const ACaptureSessionOutputContainer* sessionOutputContainer) {
+    ATRACE_CALL();
+    if (device == nullptr || sessionOutputContainer == nullptr) {
+        ALOGE("%s: Error: invalid input: device %p, sessionOutputContainer %p",
+                __FUNCTION__, device, sessionOutputContainer);
+        return ACAMERA_ERROR_INVALID_PARAMETER;
+    }
+    return device->isSessionConfigurationSupported(sessionOutputContainer);
+}
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 5e4fcd0..c9db01e 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -227,6 +227,55 @@
     return ACAMERA_OK;
 }
 
+camera_status_t CameraDevice::isSessionConfigurationSupported(
+        const ACaptureSessionOutputContainer* sessionOutputContainer) const {
+    Mutex::Autolock _l(mDeviceLock);
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        return ret;
+    }
+
+    SessionConfiguration sessionConfiguration(0 /*inputWidth*/, 0 /*inputHeight*/,
+            -1 /*inputFormat*/, CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE);
+    for (const auto& output : sessionOutputContainer->mOutputs) {
+        sp<IGraphicBufferProducer> iGBP(nullptr);
+        ret = getIGBPfromAnw(output.mWindow, iGBP);
+        if (ret != ACAMERA_OK) {
+            ALOGE("Camera device %s failed to extract graphic producer from native window",
+                    getId());
+            return ret;
+        }
+
+        String16 physicalId16(output.mPhysicalCameraId.c_str());
+        OutputConfiguration outConfig(iGBP, output.mRotation, physicalId16,
+                OutputConfiguration::INVALID_SET_ID, true);
+
+        for (auto& anw : output.mSharedWindows) {
+            ret = getIGBPfromAnw(anw, iGBP);
+            if (ret != ACAMERA_OK) {
+                ALOGE("Camera device %s failed to extract graphic producer from native window",
+                        getId());
+                return ret;
+            }
+            outConfig.addGraphicProducer(iGBP);
+        }
+
+        sessionConfiguration.addOutputConfiguration(outConfig);
+    }
+
+    bool supported = false;
+    binder::Status remoteRet = mRemote->isSessionConfigurationSupported(
+            sessionConfiguration, &supported);
+    if (remoteRet.serviceSpecificErrorCode() ==
+            hardware::ICameraService::ERROR_INVALID_OPERATION) {
+        return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+    } else if (!remoteRet.isOk()) {
+        return ACAMERA_ERROR_UNKNOWN;
+    } else {
+        return supported ? ACAMERA_OK : ACAMERA_ERROR_STREAM_CONFIGURE_FAIL;
+    }
+}
+
 camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOutput *output) {
     camera_status_t ret = checkCameraClosedOrErrorLocked();
     if (ret != ACAMERA_OK) {
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 103efd5..56741ce 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -35,6 +35,7 @@
 #include <media/stagefright/foundation/AMessage.h>
 #include <camera/CaptureResult.h>
 #include <camera/camera2/OutputConfiguration.h>
+#include <camera/camera2/SessionConfiguration.h>
 #include <camera/camera2/CaptureRequest.h>
 
 #include <camera/NdkCameraManager.h>
@@ -77,6 +78,9 @@
             const ACameraCaptureSession_stateCallbacks* callbacks,
             /*out*/ACameraCaptureSession** session);
 
+    camera_status_t isSessionConfigurationSupported(
+            const ACaptureSessionOutputContainer* sessionOutputContainer) const;
+
     // Callbacks from camera service
     class ServiceCallback : public hardware::camera2::BnCameraDeviceCallbacks {
       public:
@@ -369,6 +373,11 @@
         return mDevice->createCaptureSession(outputs, sessionParameters, callbacks, session);
     }
 
+    camera_status_t isSessionConfigurationSupported(
+            const ACaptureSessionOutputContainer* sessionOutputContainer) const {
+        return mDevice->isSessionConfigurationSupported(sessionOutputContainer);
+    }
+
     /***********************
      * Device interal APIs *
      ***********************/
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index cedf83a..bc544e3 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -845,6 +845,43 @@
         const ACameraIdList* physicalIdList,
         /*out*/ACaptureRequest** request) __INTRODUCED_IN(29);
 
+/**
+ * Check whether a particular {@ACaptureSessionOutputContainer} is supported by
+ * the camera device.
+ *
+ * <p>This method performs a runtime check of a given {@link
+ * ACaptureSessionOutputContainer}. The result confirms whether or not the
+ * passed CaptureSession outputs can be successfully used to create a camera
+ * capture session using {@link ACameraDevice_createCaptureSession}.</p>
+ *
+ * <p>This method can be called at any point before, during and after active
+ * capture session. It must not impact normal camera behavior in any way and
+ * must complete significantly faster than creating a capture session.</p>
+ *
+ * <p>Although this method is faster than creating a new capture session, it is not intended
+ * to be used for exploring the entire space of supported stream combinations.</p>
+ *
+ * @param device the camera device of interest
+ * @param sessionOutputContainer the {@link ACaptureSessionOutputContainer} of
+ *                               interest.
+ *
+ * @return <ul>
+ *         <li>{@link ACAMERA_OK} if the given {@link ACaptureSessionOutputContainer}
+ *                                is supported by the camera device.</li>
+ *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if device, or sessionOutputContainer
+ *                                                     is NULL.</li>
+ *         <li>{@link ACAMERA_ERROR_STREAM_CONFIGURE_FAIL} if the given
+ *                                                         {@link ACaptureSessionOutputContainer}
+ *                                                         is not supported by
+ *                                                         the camera
+ *                                                         device.</li>
+ *        <li>{@link ACAMERA_ERROR_UNSUPPORTED_OPERATION} if the query operation is not
+ *                                                        supported by the camera device.</li>
+ */
+camera_status_t ACameraDevice_isSessionConfigurationSupported(
+        const ACameraDevice* device,
+        const ACaptureSessionOutputContainer* sessionOutputContainer) __INTRODUCED_IN(29);
+
 #endif /* __ANDROID_API__ >= 29 */
 
 __END_DECLS
diff --git a/camera/ndk/include/camera/NdkCameraError.h b/camera/ndk/include/camera/NdkCameraError.h
index 6b58155..fc618ee 100644
--- a/camera/ndk/include/camera/NdkCameraError.h
+++ b/camera/ndk/include/camera/NdkCameraError.h
@@ -106,7 +106,8 @@
 
     /**
      * Camera device does not support the stream configuration provided by application in
-     * {@link ACameraDevice_createCaptureSession}.
+     * {@link ACameraDevice_createCaptureSession} or {@link
+     * ACameraDevice_isSessionConfigurationSupported}.
      */
     ACAMERA_ERROR_STREAM_CONFIGURE_FAIL = ACAMERA_ERROR_BASE - 9,
 
@@ -130,6 +131,11 @@
      * The application does not have permission to open camera.
      */
     ACAMERA_ERROR_PERMISSION_DENIED     = ACAMERA_ERROR_BASE - 13,
+
+    /**
+     * The operation is not supported by the camera device.
+     */
+    ACAMERA_ERROR_UNSUPPORTED_OPERATION = ACAMERA_ERROR_BASE - 14,
 } camera_status_t;
 
 #endif /* __ANDROID_API__ >= 24 */
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 946a98e..b6f1553 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -14,6 +14,7 @@
     ACameraDevice_createCaptureRequest_withPhysicalIds; # introduced=29
     ACameraDevice_createCaptureSession;
     ACameraDevice_createCaptureSessionWithSessionParameters; # introduced=28
+    ACameraDevice_isSessionConfigurationSupported; # introduced=29
     ACameraDevice_getId;
     ACameraManager_create;
     ACameraManager_delete;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index a38a31e..d7d774b 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -38,6 +38,7 @@
 
 using HCameraMetadata = frameworks::cameraservice::device::V2_0::CameraMetadata;
 using OutputConfiguration = frameworks::cameraservice::device::V2_0::OutputConfiguration;
+using SessionConfiguration = frameworks::cameraservice::device::V2_0::SessionConfiguration;
 using hardware::Void;
 
 // Static member definitions
@@ -216,6 +217,47 @@
     return ACAMERA_OK;
 }
 
+camera_status_t CameraDevice::isSessionConfigurationSupported(
+        const ACaptureSessionOutputContainer* sessionOutputContainer) const {
+    Mutex::Autolock _l(mDeviceLock);
+    camera_status_t ret = checkCameraClosedOrErrorLocked();
+    if (ret != ACAMERA_OK) {
+        return ret;
+    }
+
+    SessionConfiguration sessionConfig;
+    sessionConfig.inputWidth = 0;
+    sessionConfig.inputHeight = 0;
+    sessionConfig.inputFormat = -1;
+    sessionConfig.operationMode = StreamConfigurationMode::NORMAL_MODE;
+    sessionConfig.outputStreams.resize(sessionOutputContainer->mOutputs.size());
+    size_t index = 0;
+    for (const auto& output : sessionOutputContainer->mOutputs) {
+        sessionConfig.outputStreams[index].rotation = utils::convertToHidl(output.mRotation);
+        sessionConfig.outputStreams[index].windowGroupId = -1;
+        sessionConfig.outputStreams[index].windowHandles.resize(output.mSharedWindows.size() + 1);
+        sessionConfig.outputStreams[index].windowHandles[0] = output.mWindow;
+        sessionConfig.outputStreams[index].physicalCameraId = output.mPhysicalCameraId;
+        index++;
+    }
+
+    bool configSupported = false;
+    Status status = Status::NO_ERROR;
+    auto remoteRet = mRemote->isSessionConfigurationSupported(sessionConfig,
+        [&status, &configSupported](auto s, auto supported) {
+            status = s;
+            configSupported = supported;
+        });
+
+    if (status == Status::INVALID_OPERATION) {
+        return ACAMERA_ERROR_UNSUPPORTED_OPERATION;
+    } else if (!remoteRet.isOk()) {
+        return ACAMERA_ERROR_UNKNOWN;
+    } else {
+        return configSupported ? ACAMERA_OK : ACAMERA_ERROR_STREAM_CONFIGURE_FAIL;
+    }
+}
+
 void CameraDevice::addRequestSettingsMetadata(ACaptureRequest *aCaptureRequest,
         sp<CaptureRequest> &req) {
     CameraMetadata metadataCopy = aCaptureRequest->settings->getInternalData();
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 28092fd..47e6f56 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -101,6 +101,9 @@
             const ACameraCaptureSession_stateCallbacks* callbacks,
             /*out*/ACameraCaptureSession** session);
 
+    camera_status_t isSessionConfigurationSupported(
+            const ACaptureSessionOutputContainer* sessionOutputContainer) const;
+
     // Callbacks from camera service
     class ServiceCallback : public ICameraDeviceCallback {
       public:
@@ -397,6 +400,11 @@
         return mDevice->createCaptureSession(outputs, sessionParameters, callbacks, session);
     }
 
+    camera_status_t isSessionConfigurationSupported(
+            const ACaptureSessionOutputContainer* sessionOutputContainer) const {
+        return mDevice->isSessionConfigurationSupported(sessionOutputContainer);
+    }
+
     /***********************
      * Device interal APIs *
      ***********************/
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 2398922..c51f93b 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -121,6 +121,12 @@
         cameraIdList.numCameras = idPointerList.size();
         cameraIdList.cameraIds = idPointerList.data();
 
+        ret = ACameraDevice_isSessionConfigurationSupported(mDevice, mOutputs);
+        if (ret != ACAMERA_OK && ret != ACAMERA_ERROR_UNSUPPORTED_OPERATION) {
+            ALOGE("ACameraDevice_isSessionConfigurationSupported failed, ret=%d", ret);
+            return ret;
+        }
+
         ret = ACameraDevice_createCaptureSession(mDevice, mOutputs, &mSessionCb, &mSession);
         if (ret != AMEDIA_OK) {
             ALOGE("ACameraDevice_createCaptureSession failed, ret=%d", ret);
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 78a444b..f10835f 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -36,13 +36,18 @@
     ldflags: ["-Wl,-Bsymbolic"],
 }
 
+filegroup {
+    name: "codec2_soft_exports",
+    srcs: [ "exports.lds" ],
+}
+
 // public dependency for software codec implementation
 // to be used by code under media/codecs/* only as its stability is not guaranteed
 cc_defaults {
     name: "libcodec2_soft-defaults",
     defaults: ["libcodec2-impl-defaults"],
     vendor_available: true,
-
+    version_script: ":codec2_soft_exports",
     export_shared_lib_headers: [
         "libsfplugin_ccodec_utils",
     ],
diff --git a/media/codec2/components/base/exports.lds b/media/codec2/components/base/exports.lds
new file mode 100644
index 0000000..641bae8
--- /dev/null
+++ b/media/codec2/components/base/exports.lds
@@ -0,0 +1,7 @@
+{
+    global:
+        CreateCodec2Factory;
+        DestroyCodec2Factory;
+    local: *;
+};
+
diff --git a/media/codec2/hidl/1.0/utils/Android.bp b/media/codec2/hidl/1.0/utils/Android.bp
index f5aa65b..0bb2418 100644
--- a/media/codec2/hidl/1.0/utils/Android.bp
+++ b/media/codec2/hidl/1.0/utils/Android.bp
@@ -27,6 +27,7 @@
 
     shared_libs: [
         "android.hardware.graphics.bufferqueue@1.0",
+        "android.hardware.graphics.bufferqueue@2.0",
         "android.hardware.graphics.common@1.0",
         "android.hardware.media@1.0",
         "android.hardware.media.bufferpool@2.0",
@@ -36,6 +37,7 @@
         "libcodec2",
         "libcodec2_vndk",
         "libcutils",
+        "libgui",
         "libhidlbase",
         "libhidltransport",
         "libhwbinder",
diff --git a/media/codec2/hidl/1.0/utils/Component.cpp b/media/codec2/hidl/1.0/utils/Component.cpp
index f3bf6f7..5897dce 100644
--- a/media/codec2/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hidl/1.0/utils/Component.cpp
@@ -272,7 +272,7 @@
 
 Return<Status> Component::setOutputSurface(
         uint64_t blockPoolId,
-        const sp<HGraphicBufferProducer>& surface) {
+        const sp<HGraphicBufferProducer2>& surface) {
     std::shared_ptr<C2BlockPool> pool;
     GetCodec2BlockPool(blockPoolId, mComponent, &pool);
     if (pool && pool->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
@@ -312,7 +312,7 @@
 }
 
 Return<void> Component::connectToOmxInputSurface(
-        const sp<HGraphicBufferProducer>& producer,
+        const sp<HGraphicBufferProducer1>& producer,
         const sp<::android::hardware::media::omx::V1_0::
         IGraphicBufferSource>& source,
         connectToOmxInputSurface_cb _hidl_cb) {
diff --git a/media/codec2/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hidl/1.0/utils/ComponentStore.cpp
index bb5faa5..53bb6d2 100644
--- a/media/codec2/hidl/1.0/utils/ComponentStore.cpp
+++ b/media/codec2/hidl/1.0/utils/ComponentStore.cpp
@@ -23,7 +23,7 @@
 #include <codec2/hidl/1.0/types.h>
 
 #include <android-base/file.h>
-#include <media/stagefright/bqhelper/WGraphicBufferProducer.h>
+#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
 #include <media/stagefright/bqhelper/GraphicBufferSource.h>
 #include <utils/Errors.h>
 
@@ -219,13 +219,11 @@
         _hidl_cb(Status::CORRUPTED, nullptr);
         return Void();
     }
-    typedef ::android::hardware::graphics::bufferqueue::V1_0::
-            IGraphicBufferProducer HGbp;
-    typedef ::android::TWGraphicBufferProducer<HGbp> B2HGbp;
     sp<InputSurface> inputSurface = new InputSurface(
             this,
             std::make_shared<C2ReflectorHelper>(),
-            new B2HGbp(source->getIGraphicBufferProducer()),
+            new ::android::hardware::graphics::bufferqueue::V2_0::utils::
+                B2HGraphicBufferProducer(source->getIGraphicBufferProducer()),
             source);
     _hidl_cb(inputSurface ? Status::OK : Status::NO_MEMORY,
              inputSurface);
diff --git a/media/codec2/hidl/1.0/utils/InputSurface.cpp b/media/codec2/hidl/1.0/utils/InputSurface.cpp
index 85c44c3..2b4ca85 100644
--- a/media/codec2/hidl/1.0/utils/InputSurface.cpp
+++ b/media/codec2/hidl/1.0/utils/InputSurface.cpp
@@ -151,8 +151,6 @@
     return Void();
 }
 
-// Derived methods from IGraphicBufferProducer
-
 // Constructor is exclusive to ComponentStore.
 InputSurface::InputSurface(
         const sp<ComponentStore>& store,
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
index e444013..86dccd0 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
+++ b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
@@ -68,7 +68,9 @@
     c2_status_t status() const;
 
     typedef ::android::hardware::graphics::bufferqueue::V1_0::
-            IGraphicBufferProducer HGraphicBufferProducer;
+            IGraphicBufferProducer HGraphicBufferProducer1;
+    typedef ::android::hardware::graphics::bufferqueue::V2_0::
+            IGraphicBufferProducer HGraphicBufferProducer2;
 
     // Methods from IComponent follow.
     virtual Return<Status> queue(const WorkBundle& workBundle) override;
@@ -76,12 +78,12 @@
     virtual Return<Status> drain(bool withEos) override;
     virtual Return<Status> setOutputSurface(
             uint64_t blockPoolId,
-            const sp<HGraphicBufferProducer>& surface) override;
+            const sp<HGraphicBufferProducer2>& surface) override;
     virtual Return<void> connectToInputSurface(
             const sp<IInputSurface>& inputSurface,
             connectToInputSurface_cb _hidl_cb) override;
     virtual Return<void> connectToOmxInputSurface(
-            const sp<HGraphicBufferProducer>& producer,
+            const sp<HGraphicBufferProducer1>& producer,
             const sp<::android::hardware::media::omx::V1_0::
             IGraphicBufferSource>& source,
             connectToOmxInputSurface_cb _hidl_cb) override;
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
index 2682c13..34ea959 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
+++ b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/InputSurface.h
@@ -19,7 +19,7 @@
 
 #include <codec2/hidl/1.0/ComponentStore.h>
 
-#include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
+#include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 #include <android/hardware/media/c2/1.0/IInputSink.h>
 #include <android/hardware/media/c2/1.0/IInputSurface.h>
 #include <gui/IGraphicBufferProducer.h>
@@ -44,7 +44,7 @@
 
 struct InputSurface : public IInputSurface {
 
-    typedef ::android::hardware::graphics::bufferqueue::V1_0::
+    typedef ::android::hardware::graphics::bufferqueue::V2_0::
             IGraphicBufferProducer HGraphicBufferProducer;
 
     typedef ::android::
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 343bcb5..031e637 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -18,9 +18,9 @@
 #define LOG_TAG "Codec2-types"
 #include <android-base/logging.h>
 
+#include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 #include <codec2/hidl/1.0/types.h>
-
-#include <media/stagefright/bqhelper/WGraphicBufferProducer.h>
+#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
 #include <media/stagefright/foundation/AUtils.h>
 
 #include <C2AllocatorIon.h>
@@ -46,7 +46,6 @@
 namespace V1_0 {
 namespace utils {
 
-using namespace ::android;
 using ::android::hardware::Return;
 using ::android::hardware::media::bufferpool::BufferPoolData;
 using ::android::hardware::media::bufferpool::V2_0::BufferStatusMessage;
@@ -55,7 +54,10 @@
         ClientManager;
 using ::android::hardware::media::bufferpool::V2_0::implementation::
         TransactionId;
-using ::android::TWGraphicBufferProducer;
+using HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::
+        V2_0::IGraphicBufferProducer;
+using B2HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::
+        V2_0::utils::B2HGraphicBufferProducer;
 
 const char* asString(Status status, const char* def) {
     return asString(static_cast<c2_status_t>(status), def);
@@ -1806,7 +1808,7 @@
     sp<HGraphicBufferProducer> hgbp =
             igbp->getHalInterface<HGraphicBufferProducer>();
     return hgbp ? hgbp :
-            new TWGraphicBufferProducer<HGraphicBufferProducer>(igbp);
+            new B2HGraphicBufferProducer(igbp);
 }
 
 } // unnamed namespace
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 7a2e549..0fe8376 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -29,9 +29,9 @@
 #include <android-base/properties.h>
 #include <bufferpool/ClientManager.h>
 #include <cutils/native_handle.h>
-#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
+#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
+#include <gui/bufferqueue/2.0/H2BGraphicBufferProducer.h>
 #include <hidl/HidlSupport.h>
-#include <media/stagefright/bqhelper/WGraphicBufferProducer.h>
 
 #include <android/hardware/media/bufferpool/2.0/IClientManager.h>
 #include <android/hardware/media/c2/1.0/IComponent.h>
@@ -50,13 +50,21 @@
 using ::android::hardware::hidl_string;
 using ::android::hardware::Return;
 using ::android::hardware::Void;
-using ::android::TWGraphicBufferProducer;
 
 using namespace ::android::hardware::media::c2::V1_0;
 using namespace ::android::hardware::media::c2::V1_0::utils;
 using namespace ::android::hardware::media::bufferpool::V2_0;
 using namespace ::android::hardware::media::bufferpool::V2_0::implementation;
 
+using HGraphicBufferProducer1 = ::android::hardware::graphics::bufferqueue::
+        V1_0::IGraphicBufferProducer;
+using HGraphicBufferProducer2 = ::android::hardware::graphics::bufferqueue::
+        V2_0::IGraphicBufferProducer;
+using B2HGraphicBufferProducer2 = ::android::hardware::graphics::bufferqueue::
+        V2_0::utils::B2HGraphicBufferProducer;
+using H2BGraphicBufferProducer2 = ::android::hardware::graphics::bufferqueue::
+        V2_0::utils::H2BGraphicBufferProducer;
+
 namespace /* unnamed */ {
 
 // c2_status_t value that corresponds to hwbinder transaction failure.
@@ -1064,11 +1072,11 @@
         C2BlockPool::local_id_t blockPoolId,
         const sp<IGraphicBufferProducer>& surface,
         uint32_t generation) {
-    sp<HGraphicBufferProducer> igbp =
-            surface->getHalInterface<HGraphicBufferProducer>();
+    sp<HGraphicBufferProducer2> igbp =
+            surface->getHalInterface<HGraphicBufferProducer2>();
 
     if (!igbp) {
-        igbp = new TWGraphicBufferProducer<HGraphicBufferProducer>(surface);
+        igbp = new B2HGraphicBufferProducer2(surface);
     }
 
     Return<Status> transStatus = mBase->setOutputSurface(
@@ -1195,7 +1203,7 @@
 }
 
 c2_status_t Codec2Client::Component::connectToOmxInputSurface(
-        const sp<HGraphicBufferProducer>& producer,
+        const sp<HGraphicBufferProducer1>& producer,
         const sp<HGraphicBufferSource>& source,
         std::shared_ptr<InputSurfaceConnection>* connection) {
     c2_status_t status;
@@ -1284,12 +1292,11 @@
         },
         mBase{base},
         mGraphicBufferProducer{new
-            ::android::hardware::graphics::bufferqueue::V1_0::utils::
-            H2BGraphicBufferProducer([base]() -> sp<HGraphicBufferProducer> {
-                Return<sp<HGraphicBufferProducer>> transResult =
+            H2BGraphicBufferProducer2([base]() -> sp<HGraphicBufferProducer2> {
+                Return<sp<HGraphicBufferProducer2>> transResult =
                         base->getGraphicBufferProducer();
                 return transResult.isOk() ?
-                        static_cast<sp<HGraphicBufferProducer>>(transResult) :
+                        static_cast<sp<HGraphicBufferProducer2>>(transResult) :
                         nullptr;
             }())} {
 }
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index 478ce6e..cd42205 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -63,55 +63,29 @@
  * Codec2Client are all subclasses of Configurable.
  */
 
-// Forward declaration of Codec2.0 HIDL interfaces
-namespace android {
-namespace hardware {
-namespace media {
-namespace c2 {
-namespace V1_0 {
+// Forward declaration of relevant HIDL interfaces
+
+namespace android::hardware::media::c2::V1_0 {
 struct IConfigurable;
 struct IComponent;
 struct IComponentInterface;
 struct IComponentStore;
+struct IInputSink;
 struct IInputSurface;
 struct IInputSurfaceConnection;
-} // namespace V1_0
-} // namespace c2
-} // namespace media
-} // namespace hardware
-} // namespace android
+}  // namespace android::hardware::media::c2::V1_0
 
-namespace android {
-namespace hardware {
-namespace media {
-namespace bufferpool {
-namespace V2_0 {
+namespace android::hardware::media::bufferpool::V2_0 {
 struct IClientManager;
-} // namespace V2_0
-} // namespace bufferpool
-} // namespace media
-} // namespace hardware
-} // namespace android
+}  // namespace android::hardware::media::bufferpool::V2_0
 
-// Forward declarations of other classes
-namespace android {
-namespace hardware {
-namespace graphics {
-namespace bufferqueue {
-namespace V1_0 {
+namespace android::hardware::graphics::bufferqueue::V1_0 {
 struct IGraphicBufferProducer;
-} // namespace V1_0
-} // namespace bufferqueue
-} // namespace graphics
-namespace media {
-namespace omx {
-namespace V1_0 {
+}  // android::hardware::graphics::bufferqueue::V1_0
+
+namespace android::hardware::media::omx::V1_0 {
 struct IGraphicBufferSource;
-} // namespace V1_0
-} // namespace omx
-} // namespace media
-} // namespace hardware
-} // namespace android
+}  // namespace android::hardware::media::omx::V1_0
 
 namespace android {
 
@@ -324,7 +298,9 @@
             QueueBufferOutput QueueBufferOutput;
 
     typedef ::android::hardware::graphics::bufferqueue::V1_0::
-            IGraphicBufferProducer HGraphicBufferProducer;
+            IGraphicBufferProducer HGraphicBufferProducer1;
+    typedef ::android::hardware::graphics::bufferqueue::V2_0::
+            IGraphicBufferProducer HGraphicBufferProducer2;
     typedef ::android::hardware::media::omx::V1_0::
             IGraphicBufferSource HGraphicBufferSource;
 
@@ -362,7 +338,7 @@
             std::shared_ptr<InputSurfaceConnection>* connection);
 
     c2_status_t connectToOmxInputSurface(
-            const sp<HGraphicBufferProducer>& producer,
+            const sp<HGraphicBufferProducer1>& producer,
             const sp<HGraphicBufferSource>& source,
             std::shared_ptr<InputSurfaceConnection>* connection);
 
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 2d10c67..5f60378 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -271,9 +271,12 @@
             if (mNode != nullptr) {
                 OMX_PARAM_U32TYPE ptrGapParam = {};
                 ptrGapParam.nSize = sizeof(OMX_PARAM_U32TYPE);
-                ptrGapParam.nU32 = (config.mMinAdjustedFps > 0)
+                float gap = (config.mMinAdjustedFps > 0)
                         ? c2_min(INT32_MAX + 0., 1e6 / config.mMinAdjustedFps + 0.5)
                         : c2_max(0. - INT32_MAX, -1e6 / config.mFixedAdjustedFps - 0.5);
+                // float -> uint32_t is undefined if the value is negative.
+                // First convert to int32_t to ensure the expected behavior.
+                ptrGapParam.nU32 = int32_t(gap);
                 (void)mNode->setParameter(
                         (OMX_INDEXTYPE)OMX_IndexParamMaxFrameDurationForBitrateControl,
                         &ptrGapParam, sizeof(ptrGapParam));
@@ -282,7 +285,7 @@
 
         // max fps
         // TRICKY: we do not unset max fps to 0 unless using fixed fps
-        if ((config.mMaxFps > 0 || (config.mFixedAdjustedFps > 0 && config.mMaxFps == 0))
+        if ((config.mMaxFps > 0 || (config.mFixedAdjustedFps > 0 && config.mMaxFps == -1))
                 && config.mMaxFps != mConfig.mMaxFps) {
             status_t res = GetStatus(mSource->setMaxFps(config.mMaxFps));
             status << " maxFps=" << config.mMaxFps;
@@ -764,13 +767,16 @@
                 if (msg->findInt64(KEY_REPEAT_PREVIOUS_FRAME_AFTER, &value) && value > 0) {
                     config->mISConfig->mMinFps = 1e6 / value;
                 }
-                (void)msg->findFloat(
-                        KEY_MAX_FPS_TO_ENCODER, &config->mISConfig->mMaxFps);
+                if (!msg->findFloat(
+                        KEY_MAX_FPS_TO_ENCODER, &config->mISConfig->mMaxFps)) {
+                    config->mISConfig->mMaxFps = -1;
+                }
                 config->mISConfig->mMinAdjustedFps = 0;
                 config->mISConfig->mFixedAdjustedFps = 0;
                 if (msg->findInt64(KEY_MAX_PTS_GAP_TO_ENCODER, &value)) {
                     if (value < 0 && value >= INT32_MIN) {
                         config->mISConfig->mFixedAdjustedFps = -1e6 / value;
+                        config->mISConfig->mMaxFps = -1;
                     } else if (value > 0 && value <= INT32_MAX) {
                         config->mISConfig->mMinAdjustedFps = 1e6 / value;
                     }
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index fb6af93..a6fa333 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -2593,9 +2593,9 @@
         return false;
     }
 
-    if (work->worklets.size() != 1u
+    if (mInputSurface == nullptr && (work->worklets.size() != 1u
             || !work->worklets.front()
-            || !(work->worklets.front()->output.flags & C2FrameData::FLAG_INCOMPLETE)) {
+            || !(work->worklets.front()->output.flags & C2FrameData::FLAG_INCOMPLETE))) {
         mPipelineWatcher.lock()->onWorkDone(work->input.ordinal.frameIndex.peeku());
     }
 
@@ -2709,6 +2709,10 @@
     c2_cntr64_t timestamp =
         worklet->output.ordinal.timestamp + work->input.ordinal.customOrdinal
                 - work->input.ordinal.timestamp;
+    if (mInputSurface != nullptr) {
+        // When using input surface we need to restore the original input timestamp.
+        timestamp = work->input.ordinal.customOrdinal;
+    }
     ALOGV("[%s] onWorkDone: input %lld, codec %lld => output %lld => %lld",
           mName,
           work->input.ordinal.customOrdinal.peekll(),
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 6da131f..d62944a 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -589,6 +589,21 @@
     bool mIsHdr10Plus;
 };
 
+struct Av1ProfileLevelMapper : ProfileLevelMapperHelper {
+    virtual bool simpleMap(C2Config::level_t from, int32_t *to) {
+        return sAv1Levels.map(from, to);
+    }
+    virtual bool simpleMap(int32_t from, C2Config::level_t *to) {
+        return sAv1Levels.map(from, to);
+    }
+    virtual bool simpleMap(C2Config::profile_t from, int32_t *to) {
+        return sAv1Profiles.map(from, to);
+    }
+    virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
+        return sAv1Profiles.map(from, to);
+    }
+};
+
 } // namespace
 
 // static
@@ -613,6 +628,8 @@
         return std::make_shared<Vp8ProfileLevelMapper>();
     } else if (mediaType == MIMETYPE_VIDEO_VP9) {
         return std::make_shared<Vp9ProfileLevelMapper>();
+    } else if (mediaType == MIMETYPE_VIDEO_AV1) {
+        return std::make_shared<Av1ProfileLevelMapper>();
     }
     return nullptr;
 }
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index ab6a105..c6ca670 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -38,6 +38,7 @@
 
     export_shared_lib_headers: [
         "libbase",
+        "libgui",
         "android.hardware.media.bufferpool@2.0",
     ],
 
@@ -52,13 +53,14 @@
 
     shared_libs: [
         "android.hardware.graphics.allocator@2.0",
-        "android.hardware.graphics.bufferqueue@1.0",
+        "android.hardware.graphics.bufferqueue@2.0",
         "android.hardware.graphics.mapper@2.0",
         "android.hardware.media.bufferpool@2.0",
         "libbase",
         "libbinder",
         "libcutils",
         "libdl",
+        "libgui",
         "libhardware",
         "libhidlbase",
         "libion",
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index 9271a71..e1a8138 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -17,10 +17,11 @@
 #ifndef STAGEFRIGHT_CODEC2_BQ_BUFFER_PRIV_H_
 #define STAGEFRIGHT_CODEC2_BQ_BUFFER_PRIV_H_
 
-#include <functional>
+#include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 
 #include <C2Buffer.h>
-#include <media/stagefright/bqhelper/WGraphicBufferProducer.h>
+
+#include <functional>
 
 class C2BufferQueueBlockPool : public C2BlockPool {
 public:
@@ -52,6 +53,8 @@
      */
     virtual void setRenderCallback(const OnRenderCallback &renderCallback = OnRenderCallback());
 
+    typedef ::android::hardware::graphics::bufferqueue::V2_0::
+            IGraphicBufferProducer HGraphicBufferProducer;
     /**
      * Configures an IGBP in order to create blocks. A newly created block is
      * dequeued from the configured IGBP. Unique Id of IGBP and the slot number of
@@ -62,7 +65,7 @@
      *
      * \param producer      the IGBP, which will be used to fetch blocks
      */
-    virtual void configureProducer(const android::sp<android::HGraphicBufferProducer> &producer);
+    virtual void configureProducer(const android::sp<HGraphicBufferProducer> &producer);
 
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
diff --git a/media/codec2/vndk/internal/C2BlockInternal.h b/media/codec2/vndk/internal/C2BlockInternal.h
index 2abf3ac..84ce70a 100644
--- a/media/codec2/vndk/internal/C2BlockInternal.h
+++ b/media/codec2/vndk/internal/C2BlockInternal.h
@@ -17,7 +17,7 @@
 #ifndef ANDROID_STAGEFRIGHT_C2BLOCK_INTERNAL_H_
 #define ANDROID_STAGEFRIGHT_C2BLOCK_INTERNAL_H_
 
-#include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
+#include <android/hardware/graphics/bufferqueue/2.0/IGraphicBufferProducer.h>
 
 #include <C2Buffer.h>
 
@@ -291,7 +291,7 @@
     bool AssignBlockToBufferQueue(
             const std::shared_ptr<_C2BlockPoolData>& poolData,
             const ::android::sp<::android::hardware::graphics::bufferqueue::
-                                V1_0::IGraphicBufferProducer>& igbp,
+                                V2_0::IGraphicBufferProducer>& igbp,
             uint32_t generation,
             uint64_t bqId,
             int32_t bqSlot,
@@ -314,7 +314,7 @@
     bool HoldBlockFromBufferQueue(
             const std::shared_ptr<_C2BlockPoolData>& poolData,
             const ::android::sp<::android::hardware::graphics::bufferqueue::
-                                V1_0::IGraphicBufferProducer>& igbp = nullptr);
+                                V2_0::IGraphicBufferProducer>& igbp = nullptr);
 
     /**
      * Yield a block to the designated bufferqueue. This causes the destruction
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 41a5b3f..9cc5677 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -19,29 +19,37 @@
 #include <utils/Log.h>
 
 #include <gui/BufferQueueDefs.h>
-#include <list>
-#include <map>
-#include <mutex>
+#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
+#include <hidl/HidlSupport.h>
 
 #include <C2AllocatorGralloc.h>
 #include <C2BqBufferPriv.h>
 #include <C2BlockInternal.h>
 
-using ::android::AnwBuffer;
+#include <list>
+#include <map>
+#include <mutex>
+
 using ::android::BufferQueueDefs::NUM_BUFFER_SLOTS;
 using ::android::C2AllocatorGralloc;
 using ::android::C2AndroidMemoryUsage;
 using ::android::Fence;
 using ::android::GraphicBuffer;
-using ::android::HGraphicBufferProducer;
 using ::android::IGraphicBufferProducer;
-using ::android::hidl_handle;
 using ::android::sp;
 using ::android::status_t;
 using ::android::wp;
-
+using ::android::hardware::hidl_handle;
 using ::android::hardware::Return;
-using ::android::hardware::graphics::common::V1_0::PixelFormat;
+
+using HBuffer = ::android::hardware::graphics::common::V1_2::HardwareBuffer;
+using HStatus = ::android::hardware::graphics::bufferqueue::V2_0::Status;
+using ::android::hardware::graphics::bufferqueue::V2_0::utils::b2h;
+using ::android::hardware::graphics::bufferqueue::V2_0::utils::h2b;
+using ::android::hardware::graphics::bufferqueue::V2_0::utils::HFenceWrapper;
+
+using HGraphicBufferProducer = ::android::hardware::graphics::bufferqueue::V2_0
+        ::IGraphicBufferProducer;
 
 struct C2BufferQueueBlockPoolData : public _C2BlockPoolData {
 
@@ -185,57 +193,67 @@
             C2MemoryUsage usage,
             std::shared_ptr<C2GraphicBlock> *block /* nonnull */) {
         // We have an IGBP now.
-        sp<Fence> fence = new Fence();
         C2AndroidMemoryUsage androidUsage = usage;
-        status_t status;
-        PixelFormat pixelFormat = static_cast<PixelFormat>(format);
-        int slot;
+        status_t status{};
+        int slot{};
+        bool bufferNeedsReallocation{};
+        sp<Fence> fence = new Fence();
         ALOGV("tries to dequeue buffer");
-        Return<void> transStatus = mProducer->dequeueBuffer(
-                width, height, pixelFormat, androidUsage.asGrallocUsage(), false,
-                [&status, &slot, &fence](
-                        int32_t tStatus, int32_t tSlot, hidl_handle const& tFence,
-                        HGraphicBufferProducer::FrameEventHistoryDelta const& tTs) {
-                    status = tStatus;
-                    slot = tSlot;
-                    if (!android::conversion::convertTo(fence.get(), tFence) &&
-                            status == android::NO_ERROR) {
-                        status = android::BAD_VALUE;
+
+        { // Call dequeueBuffer().
+            using Input = HGraphicBufferProducer::DequeueBufferInput;
+            using Output = HGraphicBufferProducer::DequeueBufferOutput;
+            Return<void> transResult = mProducer->dequeueBuffer(
+                    Input{
+                        width,
+                        height,
+                        format,
+                        androidUsage.asGrallocUsage()},
+                    [&status, &slot, &bufferNeedsReallocation,
+                     &fence](HStatus hStatus,
+                             int32_t hSlot,
+                             Output const& hOutput) {
+                        slot = static_cast<int>(hSlot);
+                        if (!h2b(hStatus, &status) ||
+                                !h2b(hOutput.fence, &fence)) {
+                            status = ::android::BAD_VALUE;
+                        } else {
+                            bufferNeedsReallocation =
+                                    hOutput.bufferNeedsReallocation;
+                        }
+                    });
+            if (!transResult.isOk() || status != android::OK) {
+                ALOGD("cannot dequeue buffer %d", status);
+                if (transResult.isOk()) {
+                    if (status == android::INVALID_OPERATION ||
+                        status == android::TIMED_OUT ||
+                        status == android::WOULD_BLOCK) {
+                        // Dequeue buffer is blocked temporarily. Retrying is
+                        // required.
+                        return C2_BLOCKING;
                     }
-                    (void) tTs;
-                });
-        // dequeueBuffer returns flag.
-        if (!transStatus.isOk() || status < android::OK) {
-            ALOGD("cannot dequeue buffer %d", status);
-            if (transStatus.isOk()) {
-                if (status == android::INVALID_OPERATION ||
-                    status == android::TIMED_OUT ||
-                    status == android::WOULD_BLOCK) {
-                    // Dequeue buffer is blocked temporarily. Retrying is
-                    // required.
-                    return C2_BLOCKING;
                 }
+                return C2_BAD_VALUE;
             }
+        }
+        HFenceWrapper hFenceWrapper{};
+        if (!b2h(fence, &hFenceWrapper)) {
+            ALOGE("Invalid fence received from dequeueBuffer.");
             return C2_BAD_VALUE;
         }
         ALOGV("dequeued a buffer successfully");
-        native_handle_t* nh = nullptr;
-        hidl_handle fenceHandle;
-        if (fence) {
-            android::conversion::wrapAs(&fenceHandle, &nh, *fence);
-        }
         if (fence) {
             static constexpr int kFenceWaitTimeMs = 10;
 
             status_t status = fence->wait(kFenceWaitTimeMs);
             if (status == -ETIME) {
                 // fence is not signalled yet.
-                (void)mProducer->cancelBuffer(slot, fenceHandle).isOk();
+                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
                 return C2_BLOCKING;
             }
             if (status != android::NO_ERROR) {
                 ALOGD("buffer fence wait error %d", status);
-                (void)mProducer->cancelBuffer(slot, fenceHandle).isOk();
+                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
                 return C2_BAD_VALUE;
             } else if (mRenderCallback) {
                 nsecs_t signalTime = fence->getSignalTime();
@@ -248,27 +266,31 @@
         }
 
         sp<GraphicBuffer> &slotBuffer = mBuffers[slot];
-        if (status & IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION || !slotBuffer) {
+        if (bufferNeedsReallocation || !slotBuffer) {
             if (!slotBuffer) {
                 slotBuffer = new GraphicBuffer();
             }
             // N.B. This assumes requestBuffer# returns an existing allocation
             // instead of a new allocation.
-            Return<void> transStatus = mProducer->requestBuffer(
+            Return<void> transResult = mProducer->requestBuffer(
                     slot,
-                    [&status, &slotBuffer](int32_t tStatus, AnwBuffer const& tBuffer){
-                        status = tStatus;
-                        if (!android::conversion::convertTo(slotBuffer.get(), tBuffer) &&
-                                status == android::NO_ERROR) {
+                    [&status, &slotBuffer](
+                            HStatus hStatus,
+                            HBuffer const& hBuffer,
+                            uint32_t generationNumber){
+                        if (h2b(hStatus, &status) &&
+                                h2b(hBuffer, &slotBuffer) &&
+                                slotBuffer) {
+                            slotBuffer->setGenerationNumber(generationNumber);
+                        } else {
                             status = android::BAD_VALUE;
                         }
                     });
-
-            if (!transStatus.isOk()) {
+            if (!transResult.isOk()) {
                 return C2_BAD_VALUE;
             } else if (status != android::NO_ERROR) {
                 slotBuffer.clear();
-                (void)mProducer->cancelBuffer(slot, fenceHandle).isOk();
+                (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
                 return C2_BAD_VALUE;
             }
         }
@@ -292,13 +314,14 @@
                 std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
                         std::make_shared<C2BufferQueueBlockPoolData>(
                                 slotBuffer->getGenerationNumber(),
-                                mProducerId, slot, shared_from_this());
+                                mProducerId, slot,
+                                shared_from_this());
                 *block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
                 return C2_OK;
             }
             // Block was not created. call requestBuffer# again next time.
             slotBuffer.clear();
-            (void)mProducer->cancelBuffer(slot, fenceHandle).isOk();
+            (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
         }
         return C2_BAD_VALUE;
     }
@@ -312,10 +335,10 @@
         bool noInit = false;
         for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
             if (!noInit && mProducer) {
-                Return<int32_t> transResult =
+                Return<HStatus> transResult =
                         mProducer->detachBuffer(static_cast<int32_t>(i));
                 noInit = !transResult.isOk() ||
-                         static_cast<int32_t>(transResult) == android::NO_INIT;
+                         static_cast<HStatus>(transResult) == HStatus::NO_INIT;
             }
             mBuffers[i].clear();
         }
@@ -373,32 +396,28 @@
     }
 
     void configureProducer(const sp<HGraphicBufferProducer> &producer) {
-        int32_t status = android::OK;
         uint64_t producerId = 0;
         if (producer) {
-            Return<void> transStatus = producer->getUniqueId(
-                    [&status, &producerId](int32_t tStatus, int64_t tProducerId) {
-                        status = tStatus;
-                        producerId = tProducerId;
-                    });
-            if (!transStatus.isOk()) {
+            Return<uint64_t> transResult = producer->getUniqueId();
+            if (!transResult.isOk()) {
                 ALOGD("configureProducer -- failed to connect to the producer");
                 return;
             }
+            producerId = static_cast<uint64_t>(transResult);
         }
         {
             std::lock_guard<std::mutex> lock(mMutex);
             bool noInit = false;
             for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
                 if (!noInit && mProducer) {
-                    Return<int32_t> transResult =
+                    Return<HStatus> transResult =
                             mProducer->detachBuffer(static_cast<int32_t>(i));
                     noInit = !transResult.isOk() ||
-                             static_cast<int32_t>(transResult) == android::NO_INIT;
+                             static_cast<HStatus>(transResult) == HStatus::NO_INIT;
                 }
                 mBuffers[i].clear();
             }
-            if (producer && status == android::OK) {
+            if (producer) {
                 mProducer = producer;
                 mProducerId = producerId;
             } else {
@@ -414,7 +433,7 @@
     void cancel(uint64_t igbp_id, int32_t igbp_slot) {
         std::lock_guard<std::mutex> lock(mMutex);
         if (igbp_id == mProducerId && mProducer) {
-            (void)mProducer->cancelBuffer(igbp_slot, nullptr).isOk();
+            (void)mProducer->cancelBuffer(igbp_slot, hidl_handle{}).isOk();
         }
     }
 
@@ -455,7 +474,7 @@
     if (local && localPool) {
         localPool->cancel(bqId, bqSlot);
     } else if (igbp) {
-        igbp->cancelBuffer(bqSlot, nullptr);
+        igbp->cancelBuffer(bqSlot, hidl_handle{}).isOk();
     }
 }
 
diff --git a/media/extractors/mkv/MatroskaExtractor.cpp b/media/extractors/mkv/MatroskaExtractor.cpp
index f8aa784..20cc643 100644
--- a/media/extractors/mkv/MatroskaExtractor.cpp
+++ b/media/extractors/mkv/MatroskaExtractor.cpp
@@ -32,6 +32,7 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/MetaDataUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
 #include <utils/String8.h>
 
 #include <arpa/inet.h>
@@ -147,6 +148,7 @@
         AVC,
         AAC,
         HEVC,
+        MP3,
         OTHER
     };
 
@@ -159,6 +161,15 @@
 
     List<MediaBufferHelper *> mPendingFrames;
 
+    int64_t mCurrentTS; // add for mp3
+    uint32_t mMP3Header;
+
+    media_status_t findMP3Header(uint32_t * header,
+        const uint8_t *dataSource, int length, int *outStartPos);
+    media_status_t mp3FrameRead(
+            MediaBufferHelper **out, const ReadOptions *options,
+            int64_t targetSampleTimeUs);
+
     status_t advance();
 
     status_t setWebmBlockCryptoInfo(MediaBufferHelper *mbuf);
@@ -225,7 +236,9 @@
       mBlockIter(mExtractor,
                  mExtractor->mTracks.itemAt(index).mTrackNum,
                  index),
-      mNALSizeLen(-1) {
+      mNALSizeLen(-1),
+      mCurrentTS(0),
+      mMP3Header(0) {
     MatroskaExtractor::TrackInfo &trackInfo = mExtractor->mTracks.editItemAt(index);
     AMediaFormat *meta = trackInfo.mMeta;
 
@@ -254,6 +267,8 @@
         }
     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
         mType = AAC;
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
+        mType = MP3;
     }
 }
 
@@ -270,6 +285,16 @@
     mBufferGroup->init(1 /* number of buffers */, 1024 /* buffer size */, 64 /* growth limit */);
     mBlockIter.reset();
 
+    if (mType == MP3 && mMP3Header == 0) {
+        int start = -1;
+        media_status_t err = findMP3Header(&mMP3Header, NULL, 0, &start);
+        if (err != OK) {
+            ALOGE("No mp3 header found");
+            clearPendingFrames();
+            return err;
+        }
+    }
+
     return AMEDIA_OK;
 }
 
@@ -796,6 +821,188 @@
     return AMEDIA_OK;
 }
 
+//the value of kMP3HeaderMask is from MP3Extractor
+static const uint32_t kMP3HeaderMask = 0xfffe0c00;
+
+media_status_t MatroskaSource::findMP3Header(uint32_t * header,
+        const uint8_t *dataSource, int length, int *outStartPos) {
+    if (NULL == header) {
+        ALOGE("header is null!");
+        return AMEDIA_ERROR_END_OF_STREAM;
+    }
+
+    //to find header start position
+    if (0 != *header) {
+        if (NULL == dataSource) {
+            *outStartPos = -1;
+            return AMEDIA_OK;
+        }
+        uint32_t tmpCode = 0;
+        for (int i = 0; i < length; i++) {
+            tmpCode = (tmpCode << 8) + dataSource[i];
+            if ((tmpCode & kMP3HeaderMask) == (*header & kMP3HeaderMask)) {
+                *outStartPos = i - 3;
+                return AMEDIA_OK;
+            }
+        }
+        *outStartPos = -1;
+        return AMEDIA_OK;
+    }
+
+    //to find mp3 header
+    uint32_t code = 0;
+    while (0 == *header) {
+        while (mPendingFrames.empty()) {
+            media_status_t err = readBlock();
+            if (err != OK) {
+                clearPendingFrames();
+                return err;
+            }
+        }
+        MediaBufferHelper *frame = *mPendingFrames.begin();
+        size_t size = frame->range_length();
+        size_t offset = frame->range_offset();
+        size_t i;
+        size_t frame_size;
+        for (i = 0; i < size; i++) {
+            ALOGV("data[%zu]=%x", i, *((uint8_t*)frame->data() + offset + i));
+            code = (code << 8) + *((uint8_t*)frame->data() + offset + i);
+            if (GetMPEGAudioFrameSize(code, &frame_size, NULL, NULL, NULL)) {
+                *header = code;
+                mBlockIter.reset();
+                clearPendingFrames();
+                return AMEDIA_OK;
+            }
+        }
+    }
+
+    return AMEDIA_ERROR_END_OF_STREAM;
+}
+
+media_status_t MatroskaSource::mp3FrameRead(
+        MediaBufferHelper **out, const ReadOptions *options,
+        int64_t targetSampleTimeUs) {
+    MediaBufferHelper *frame = *mPendingFrames.begin();
+    int64_t seekTimeUs;
+    ReadOptions::SeekMode mode;
+    if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+        CHECK(AMediaFormat_getInt64(frame->meta_data(),
+                    AMEDIAFORMAT_KEY_TIME_US, &mCurrentTS));
+        if (mCurrentTS < 0) {
+            mCurrentTS = 0;
+            AMediaFormat_setInt64(frame->meta_data(),
+                    AMEDIAFORMAT_KEY_TIME_US, mCurrentTS);
+        }
+    }
+
+    int32_t start = -1;
+    while (start < 0) {
+        //find header start position
+        findMP3Header(&mMP3Header,
+            (const uint8_t*)frame->data() + frame->range_offset(),
+            frame->range_length(), &start);
+        ALOGV("start=%d, frame->range_length() = %zu, frame->range_offset() =%zu",
+                      start, frame->range_length(), frame->range_offset());
+        if (start >= 0)
+            break;
+        frame->release();
+        mPendingFrames.erase(mPendingFrames.begin());
+        while (mPendingFrames.empty()) {
+            media_status_t err = readBlock();
+            if (err != OK) {
+                clearPendingFrames();
+                return err;
+            }
+        }
+        frame = *mPendingFrames.begin();
+    }
+
+    frame->set_range(frame->range_offset() + start, frame->range_length() - start);
+
+    uint32_t header = *(uint32_t*)((uint8_t*)frame->data() + frame->range_offset());
+    header = ((header >> 24) & 0xff) | ((header >> 8) & 0xff00) |
+                    ((header << 8) & 0xff0000) | ((header << 24) & 0xff000000);
+    size_t frame_size;
+    int out_sampling_rate;
+    int out_channels;
+    int out_bitrate;
+    if (!GetMPEGAudioFrameSize(header, &frame_size,
+                               &out_sampling_rate, &out_channels, &out_bitrate)) {
+        ALOGE("MP3 Header read fail!!");
+        return AMEDIA_ERROR_UNSUPPORTED;
+    }
+
+    MediaBufferHelper *buffer;
+    mBufferGroup->acquire_buffer(&buffer, false /* nonblocking */, frame_size /* requested size */);
+    buffer->set_range(0, frame_size);
+
+    uint8_t *data = static_cast<uint8_t *>(buffer->data());
+    ALOGV("MP3 frame %zu frame->range_length() %zu", frame_size, frame->range_length());
+
+    if (frame_size > frame->range_length()) {
+        memcpy(data, (uint8_t*)(frame->data()) + frame->range_offset(), frame->range_length());
+        size_t sumSize = 0;
+        sumSize += frame->range_length();
+        size_t needSize = frame_size - frame->range_length();
+        frame->release();
+        mPendingFrames.erase(mPendingFrames.begin());
+        while (mPendingFrames.empty()) {
+            media_status_t err = readBlock();
+            if (err != OK) {
+                clearPendingFrames();
+                return err;
+            }
+        }
+        frame = *mPendingFrames.begin();
+        size_t offset = frame->range_offset();
+        size_t size = frame->range_length();
+
+        // the next buffer frame is not enough to fullfill mp3 frame,
+        // we have to read until mp3 frame is completed.
+        while (size < needSize) {
+            memcpy(data + sumSize, (uint8_t*)(frame->data()) + offset, size);
+            needSize -= size;
+            sumSize += size;
+            frame->release();
+            mPendingFrames.erase(mPendingFrames.begin());
+            while (mPendingFrames.empty()) {
+                media_status_t err = readBlock();
+                if (err != OK) {
+                    clearPendingFrames();
+                    return err;
+                }
+            }
+            frame = *mPendingFrames.begin();
+            offset = frame->range_offset();
+            size = frame->range_length();
+        }
+        memcpy(data + sumSize, (uint8_t*)(frame->data()) + offset, needSize);
+        frame->set_range(offset + needSize, size - needSize);
+     } else {
+        size_t offset = frame->range_offset();
+        size_t size = frame->range_length();
+        memcpy(data, (uint8_t*)(frame->data()) + offset, frame_size);
+        frame->set_range(offset + frame_size, size - frame_size);
+    }
+    if (frame->range_length() < 4) {
+        frame->release();
+        frame = NULL;
+        mPendingFrames.erase(mPendingFrames.begin());
+    }
+    ALOGV("MatroskaSource::read MP3 frame kKeyTime=%lld,kKeyTargetTime=%lld",
+                    (long long)mCurrentTS, (long long)targetSampleTimeUs);
+    AMediaFormat_setInt64(buffer->meta_data(),
+            AMEDIAFORMAT_KEY_TIME_US, mCurrentTS);
+    mCurrentTS += (int64_t)frame_size * 8000ll / out_bitrate;
+
+    if (targetSampleTimeUs >= 0ll)
+        AMediaFormat_setInt64(buffer->meta_data(),
+                AMEDIAFORMAT_KEY_TARGET_TIME, targetSampleTimeUs);
+    *out = buffer;
+    ALOGV("MatroskaSource::read MP3, keyTime=%lld for next frame", (long long)mCurrentTS);
+    return AMEDIA_OK;
+}
+
 media_status_t MatroskaSource::read(
         MediaBufferHelper **out, const ReadOptions *options) {
     *out = NULL;
@@ -833,6 +1040,10 @@
         }
     }
 
+    if (mType == MP3) {
+        return mp3FrameRead(out, options, targetSampleTimeUs);
+    }
+
     MediaBufferHelper *frame = *mPendingFrames.begin();
     mPendingFrames.erase(mPendingFrames.begin());
 
diff --git a/media/extractors/ogg/OggExtractor.cpp b/media/extractors/ogg/OggExtractor.cpp
index d99493d..b63ae6b 100644
--- a/media/extractors/ogg/OggExtractor.cpp
+++ b/media/extractors/ogg/OggExtractor.cpp
@@ -1280,7 +1280,7 @@
         //ALOGI("comment #%d: '%s'", i + 1, mVc.user_comments[i]);
     }
 
-    AMediaFormat_getInt32(mFileMeta, "haptic", &mHapticChannelCount);
+    AMediaFormat_getInt32(mFileMeta, AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT, &mHapticChannelCount);
 }
 
 void MyOggExtractor::setChannelMask(int channelCount) {
@@ -1297,6 +1297,8 @@
             const audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(
                     audioChannelCount) | hapticChannelMask;
             AMediaFormat_setInt32(mMeta, AMEDIAFORMAT_KEY_CHANNEL_MASK, channelMask);
+            AMediaFormat_setInt32(
+                    mMeta, AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT, mHapticChannelCount);
         }
     } else {
         AMediaFormat_setInt32(mMeta, AMEDIAFORMAT_KEY_CHANNEL_MASK,
diff --git a/media/libeffects/lvm/lib/Android.bp b/media/libeffects/lvm/lib/Android.bp
index 7a32d3f..d150f18 100644
--- a/media/libeffects/lvm/lib/Android.bp
+++ b/media/libeffects/lvm/lib/Android.bp
@@ -132,6 +132,9 @@
     shared_libs: [
         "liblog",
     ],
+    header_libs: [
+        "libhardware_headers"
+    ],
     cflags: [
         "-fvisibility=hidden",
         "-DBUILD_FLOAT",
diff --git a/media/libeffects/lvm/lib/Bundle/lib/LVM.h b/media/libeffects/lvm/lib/Bundle/lib/LVM.h
index 83ecae1..5082a53 100644
--- a/media/libeffects/lvm/lib/Bundle/lib/LVM.h
+++ b/media/libeffects/lvm/lib/Bundle/lib/LVM.h
@@ -298,6 +298,7 @@
     LVM_PSA_DecaySpeed_en       PSA_PeakDecayRate;      /* Peak value decay rate*/
 #ifdef SUPPORT_MC
     LVM_INT32                   NrChannels;
+    LVM_INT32                   ChMask;
 #endif
 
 } LVM_ControlParams_t;
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Control.c b/media/libeffects/lvm/lib/Bundle/src/LVM_Control.c
index 62b4c73..1d95342 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Control.c
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Control.c
@@ -93,6 +93,7 @@
 
 #ifdef SUPPORT_MC
     pInstance->Params.NrChannels = pParams->NrChannels;
+    pInstance->Params.ChMask     = pParams->ChMask;
 #endif
     /*
      * Cinema Sound parameters
@@ -584,6 +585,7 @@
 
 #ifdef SUPPORT_MC
     pInstance->NrChannels = LocalParams.NrChannels;
+    pInstance->ChMask = LocalParams.ChMask;
 #endif
 
     /* Clear all internal data if format change*/
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h b/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
index 19d1532..cdd3134 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Private.h
@@ -291,6 +291,7 @@
 
 #ifdef SUPPORT_MC
     LVM_INT16              NrChannels;
+    LVM_INT32              ChMask;
 #endif
 
 } LVM_Instance_t;
diff --git a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.c b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.c
index 94ba278..8d30a61 100644
--- a/media/libeffects/lvm/lib/Bundle/src/LVM_Process.c
+++ b/media/libeffects/lvm/lib/Bundle/src/LVM_Process.c
@@ -21,6 +21,7 @@
 /*  Includes                                                                            */
 /*                                                                                      */
 /****************************************************************************************/
+#include <system/audio.h>
 
 #include "LVM_Private.h"
 #include "VectorArithmetic.h"
@@ -67,6 +68,7 @@
     LVM_ReturnStatus_en  Status;
 #ifdef SUPPORT_MC
     LVM_INT32           NrChannels  = pInstance->NrChannels;
+    LVM_INT32           ChMask      = pInstance->ChMask;
 #define NrFrames SampleCount  // alias for clarity
 #endif
 
@@ -119,6 +121,7 @@
 #ifdef SUPPORT_MC
         /* Update the local variable NrChannels from pInstance->NrChannels value */
         NrChannels = pInstance->NrChannels;
+        ChMask     = pInstance->ChMask;
 #endif
 
         if(Status != LVM_SUCCESS)
@@ -140,6 +143,7 @@
         pToProcess = pOutData;
 #ifdef SUPPORT_MC
         NrChannels = 2;
+        ChMask     = AUDIO_CHANNEL_OUT_STEREO;
 #endif
     }
 
@@ -254,18 +258,24 @@
 
             }
 #ifdef SUPPORT_MC
-            /* TODO - Multichannel support to be added */
-            if (NrChannels == 2)
+            /*
+             * Volume balance
+             */
+            LVC_MixSoft_1St_MC_float_SAT(&pInstance->VC_BalanceMix,
+                                          pProcessed,
+                                          pProcessed,
+                                          NrFrames,
+                                          NrChannels,
+                                          ChMask);
+#else
+            /*
+             * Volume balance
+             */
+            LVC_MixSoft_1St_2i_D16C31_SAT(&pInstance->VC_BalanceMix,
+                                          pProcessed,
+                                          pProcessed,
+                                          SampleCount);
 #endif
-            {
-                /*
-                 * Volume balance
-                 */
-                LVC_MixSoft_1St_2i_D16C31_SAT(&pInstance->VC_BalanceMix,
-                                              pProcessed,
-                                              pProcessed,
-                                              SampleCount);
-            }
 
             /*
              * Perform Parametric Spectum Analysis
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.c b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.c
index eb5755e..db76cd1 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.c
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixHard_1St_2i_D16C31_SAT.c
@@ -59,6 +59,31 @@
 
 
 }
+#ifdef SUPPORT_MC
+void LVC_Core_MixHard_1St_MC_float_SAT (Mix_Private_FLOAT_st **ptrInstance,
+                                         const LVM_FLOAT      *src,
+                                         LVM_FLOAT            *dst,
+                                         LVM_INT16            NrFrames,
+                                         LVM_INT16            NrChannels)
+{
+    LVM_FLOAT  Temp;
+    LVM_INT16 ii, jj;
+    for (ii = NrFrames; ii != 0; ii--)
+    {
+        for (jj = 0; jj < NrChannels; jj++)
+        {
+            Mix_Private_FLOAT_st  *pInstance1 = (Mix_Private_FLOAT_st *)(ptrInstance[jj]);
+            Temp = ((LVM_FLOAT)*(src++) * (LVM_FLOAT)pInstance1->Current);
+            if (Temp > 1.0f)
+                *dst++ = 1.0f;
+            else if (Temp < -1.0f)
+                *dst++ = -1.0f;
+            else
+                *dst++ = (LVM_FLOAT)Temp;
+        }
+    }
+}
+#endif
 #else
 void LVC_Core_MixHard_1St_2i_D16C31_SAT( LVMixer3_st        *ptrInstance1,
                                          LVMixer3_st        *ptrInstance2,
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.c b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.c
index 656a117..56b5dae 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.c
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Core_MixSoft_1St_2i_D16C31_WRA.c
@@ -146,6 +146,51 @@
     pInstanceR->Current = CurrentR;
 
 }
+#ifdef SUPPORT_MC
+void LVC_Core_MixSoft_1St_MC_float_WRA (Mix_Private_FLOAT_st **ptrInstance,
+                                         const LVM_FLOAT      *src,
+                                         LVM_FLOAT            *dst,
+                                         LVM_INT16            NrFrames,
+                                         LVM_INT16            NrChannels)
+{
+    LVM_INT32   ii, ch;
+    LVM_FLOAT   Temp =0.0f;
+    LVM_FLOAT   tempCurrent[NrChannels];
+    for (ch = 0; ch < NrChannels; ch++)
+    {
+        tempCurrent[ch] = ptrInstance[ch]->Current;
+    }
+    for (ii = NrFrames; ii > 0; ii--)
+    {
+        for (ch = 0; ch < NrChannels; ch++)
+        {
+            Mix_Private_FLOAT_st *pInstance = ptrInstance[ch];
+            const LVM_FLOAT   Delta = pInstance->Delta;
+            LVM_FLOAT         Current = tempCurrent[ch];
+            const LVM_FLOAT   Target = pInstance->Target;
+            if (Current < Target)
+            {
+                ADD2_SAT_FLOAT(Current, Delta, Temp);
+                Current = Temp;
+                if (Current > Target)
+                    Current = Target;
+            }
+            else
+            {
+                Current -= Delta;
+                if (Current < Target)
+                    Current = Target;
+            }
+            *dst++ = *src++ * Current;
+            tempCurrent[ch] = Current;
+        }
+    }
+    for (ch = 0; ch < NrChannels; ch++)
+    {
+        ptrInstance[ch]->Current = tempCurrent[ch];
+    }
+}
+#endif
 #else
 void LVC_Core_MixSoft_1St_2i_D16C31_WRA( LVMixer3_st        *ptrInstance1,
                                          LVMixer3_st        *ptrInstance2,
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.c b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.c
index bd5a925..a4682d3 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.c
+++ b/media/libeffects/lvm/lib/Common/src/LVC_MixSoft_1St_2i_D16C31_SAT.c
@@ -19,6 +19,8 @@
    INCLUDE FILES
 ***********************************************************************************/
 
+#include <system/audio.h>
+
 #include "LVC_Mixer_Private.h"
 #include "VectorArithmetic.h"
 #include "ScalarArithmetic.h"
@@ -30,10 +32,207 @@
 #define TRUE          1
 #define FALSE         0
 
+#define ARRAY_SIZE(a) ((sizeof(a)) / (sizeof(*(a))))
+
 /**********************************************************************************
    FUNCTION LVC_MixSoft_1St_2i_D16C31_SAT
 ***********************************************************************************/
 #ifdef BUILD_FLOAT
+#ifdef SUPPORT_MC
+/* This threshold is used to decide on the processing to be applied on
+ * front center and back center channels
+ */
+#define LVM_VOL_BAL_THR (0.000016f)
+void LVC_MixSoft_1St_MC_float_SAT (LVMixer3_2St_FLOAT_st *ptrInstance,
+                                    const LVM_FLOAT       *src,
+                                    LVM_FLOAT             *dst,
+                                    LVM_INT16             NrFrames,
+                                    LVM_INT32             NrChannels,
+                                    LVM_INT32             ChMask)
+{
+    char        HardMixing = TRUE;
+    LVM_FLOAT   TargetGain;
+    Mix_Private_FLOAT_st  Target_lfe = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
+    Mix_Private_FLOAT_st  Target_ctr = {LVM_MAXFLOAT, LVM_MAXFLOAT, LVM_MAXFLOAT};
+    Mix_Private_FLOAT_st  *pInstance1 = \
+                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[0].PrivateParams);
+    Mix_Private_FLOAT_st  *pInstance2 = \
+                              (Mix_Private_FLOAT_st *)(ptrInstance->MixerStream[1].PrivateParams);
+    Mix_Private_FLOAT_st  *pMixPrivInst[4] = {pInstance1, pInstance2, &Target_ctr, &Target_lfe};
+    Mix_Private_FLOAT_st  *pInstance[NrChannels];
+
+    if (audio_channel_mask_get_representation(ChMask)
+            == AUDIO_CHANNEL_REPRESENTATION_INDEX)
+    {
+        for (int i = 0; i < 2; i++)
+        {
+            pInstance[i] = pMixPrivInst[i];
+        }
+        for (int i = 2; i < NrChannels; i++)
+        {
+            pInstance[i] = pMixPrivInst[2];
+        }
+    }
+    else
+    {
+        // TODO: Combine with system/media/audio_utils/Balance.cpp
+        // Constants in system/media/audio/include/system/audio-base.h
+        // 'mixInstIdx' is used to map the appropriate mixer instance for each channel.
+        const int mixInstIdx[] = {
+            0, // AUDIO_CHANNEL_OUT_FRONT_LEFT            = 0x1u,
+            1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT           = 0x2u,
+            2, // AUDIO_CHANNEL_OUT_FRONT_CENTER          = 0x4u,
+            3, // AUDIO_CHANNEL_OUT_LOW_FREQUENCY         = 0x8u,
+            0, // AUDIO_CHANNEL_OUT_BACK_LEFT             = 0x10u,
+            1, // AUDIO_CHANNEL_OUT_BACK_RIGHT            = 0x20u,
+            0, // AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER  = 0x40u,
+            1, // AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80u,
+            2, // AUDIO_CHANNEL_OUT_BACK_CENTER           = 0x100u,
+            0, // AUDIO_CHANNEL_OUT_SIDE_LEFT             = 0x200u,
+            1, // AUDIO_CHANNEL_OUT_SIDE_RIGHT            = 0x400u,
+            2, // AUDIO_CHANNEL_OUT_TOP_CENTER            = 0x800u,
+            0, // AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT        = 0x1000u,
+            2, // AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER      = 0x2000u,
+            1, // AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT       = 0x4000u,
+            0, // AUDIO_CHANNEL_OUT_TOP_BACK_LEFT         = 0x8000u,
+            2, // AUDIO_CHANNEL_OUT_TOP_BACK_CENTER       = 0x10000u,
+            1, // AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT        = 0x20000u,
+            0, // AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT         = 0x40000u,
+            1, // AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT        = 0x80000u
+        };
+        if (pInstance1->Target <= LVM_VOL_BAL_THR ||
+            pInstance2->Target <= LVM_VOL_BAL_THR)
+        {
+            Target_ctr.Target  = 0.0f;
+            Target_ctr.Current = 0.0f;
+            Target_ctr.Delta   = 0.0f;
+        }
+        const unsigned int idxArrSize = ARRAY_SIZE(mixInstIdx);
+        for (unsigned int i = 0, channel = ChMask; channel !=0 ; ++i)
+        {
+            const unsigned int idx = __builtin_ctz(channel);
+            if (idx < idxArrSize)
+            {
+                pInstance[i] = pMixPrivInst[mixInstIdx[idx]];
+            }
+            else
+            {
+                pInstance[i] = pMixPrivInst[2];
+            }
+            channel &= ~(1 << idx);
+        }
+    }
+
+    if (NrFrames <= 0)    return;
+
+    /******************************************************************************
+       SOFT MIXING
+    *******************************************************************************/
+
+    if ((pInstance1->Current != pInstance1->Target) ||
+        (pInstance2->Current != pInstance2->Target))
+    {
+        // TODO: combine similar checks below.
+        if (pInstance1->Delta == LVM_MAXFLOAT
+                || Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
+        {
+            /* Difference is not significant anymore. Make them equal. */
+            pInstance1->Current = pInstance1->Target;
+            TargetGain = pInstance1->Target;
+            LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[0]), TargetGain);
+        }
+        else
+        {
+            /* Soft mixing has to be applied */
+            HardMixing = FALSE;
+        }
+
+        if (HardMixing == TRUE)
+        {
+            if (pInstance2->Delta == LVM_MAXFLOAT
+                    || Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
+            {
+                /* Difference is not significant anymore. Make them equal. */
+                pInstance2->Current = pInstance2->Target;
+                TargetGain = pInstance2->Target;
+                LVC_Mixer_SetTarget(&(ptrInstance->MixerStream[1]), TargetGain);
+            }
+            else
+            {
+                /* Soft mixing has to be applied */
+                HardMixing = FALSE;
+            }
+        }
+
+        if (HardMixing == FALSE)
+        {
+             LVC_Core_MixSoft_1St_MC_float_WRA (&pInstance[0],
+                                                 src, dst, NrFrames, NrChannels);
+        }
+    }
+
+    /******************************************************************************
+       HARD MIXING
+    *******************************************************************************/
+
+    if (HardMixing == TRUE)
+    {
+        if ((pInstance1->Target == LVM_MAXFLOAT) && (pInstance2->Target == LVM_MAXFLOAT))
+        {
+            if (src != dst)
+            {
+                Copy_Float(src, dst, NrFrames*NrChannels);
+            }
+        }
+        else
+        {
+            LVC_Core_MixHard_1St_MC_float_SAT(&(pInstance[0]),
+                                               src, dst, NrFrames, NrChannels);
+        }
+    }
+
+    /******************************************************************************
+       CALL BACK
+    *******************************************************************************/
+
+    if (ptrInstance->MixerStream[0].CallbackSet)
+    {
+        if (Abs_Float(pInstance1->Current - pInstance1->Target) < pInstance1->Delta)
+        {
+            pInstance1->Current = pInstance1->Target; /* Difference is not significant anymore. \
+                                                         Make them equal. */
+            TargetGain = pInstance1->Target;
+            LVC_Mixer_SetTarget(&ptrInstance->MixerStream[0], TargetGain);
+            ptrInstance->MixerStream[0].CallbackSet = FALSE;
+            if (ptrInstance->MixerStream[0].pCallBack != 0)
+            {
+                (*ptrInstance->MixerStream[0].pCallBack) (\
+                    ptrInstance->MixerStream[0].pCallbackHandle,
+                    ptrInstance->MixerStream[0].pGeneralPurpose,
+                    ptrInstance->MixerStream[0].CallbackParam);
+            }
+        }
+    }
+    if (ptrInstance->MixerStream[1].CallbackSet)
+    {
+        if (Abs_Float(pInstance2->Current - pInstance2->Target) < pInstance2->Delta)
+        {
+            pInstance2->Current = pInstance2->Target; /* Difference is not significant anymore.
+                                                         Make them equal. */
+            TargetGain = pInstance2->Target;
+            LVC_Mixer_SetTarget(&ptrInstance->MixerStream[1], TargetGain);
+            ptrInstance->MixerStream[1].CallbackSet = FALSE;
+            if (ptrInstance->MixerStream[1].pCallBack != 0)
+            {
+                (*ptrInstance->MixerStream[1].pCallBack) (\
+                    ptrInstance->MixerStream[1].pCallbackHandle,
+                    ptrInstance->MixerStream[1].pGeneralPurpose,
+                    ptrInstance->MixerStream[1].CallbackParam);
+            }
+        }
+    }
+}
+#endif
 void LVC_MixSoft_1St_2i_D16C31_SAT( LVMixer3_2St_FLOAT_st *ptrInstance,
                                     const LVM_FLOAT             *src,
                                     LVM_FLOAT             *dst,
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h b/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
index 7f18747..199d529 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer.h
@@ -224,6 +224,14 @@
 /* Gain values should not be more that 1.0                                        */
 /**********************************************************************************/
 #ifdef BUILD_FLOAT
+#ifdef SUPPORT_MC
+void LVC_MixSoft_1St_MC_float_SAT(LVMixer3_2St_FLOAT_st *pInstance,
+                                   const   LVM_FLOAT     *src,
+                                   LVM_FLOAT             *dst,   /* dst can be equal to src */
+                                   LVM_INT16             NrFrames,
+                                   LVM_INT32             NrChannels,
+                                   LVM_INT32             ChMask);
+#endif
 void LVC_MixSoft_1St_2i_D16C31_SAT(LVMixer3_2St_FLOAT_st *pInstance,
                                    const   LVM_FLOAT     *src,
                                    LVM_FLOAT             *dst,   /* dst can be equal to src */
diff --git a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
index f10094b..453a6a5 100644
--- a/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
+++ b/media/libeffects/lvm/lib/Common/src/LVC_Mixer_Private.h
@@ -116,6 +116,13 @@
 /* Gain values should not be more that 1.0                                        */
 /**********************************************************************************/
 #ifdef BUILD_FLOAT
+#ifdef SUPPORT_MC
+void LVC_Core_MixSoft_1St_MC_float_WRA(Mix_Private_FLOAT_st **ptrInstance,
+                                         const LVM_FLOAT      *src,
+                                         LVM_FLOAT            *dst,
+                                         LVM_INT16            NrFrames,
+                                         LVM_INT16            NrChannels);
+#endif
 void LVC_Core_MixSoft_1St_2i_D16C31_WRA( LVMixer3_FLOAT_st        *ptrInstance1,
                                          LVMixer3_FLOAT_st        *ptrInstance2,
                                          const LVM_FLOAT    *src,
@@ -136,6 +143,13 @@
 /* Gain values should not be more that 1.0                                        */
 /**********************************************************************************/
 #ifdef BUILD_FLOAT
+#ifdef SUPPORT_MC
+void LVC_Core_MixHard_1St_MC_float_SAT(Mix_Private_FLOAT_st **ptrInstance,
+                                         const LVM_FLOAT      *src,
+                                         LVM_FLOAT            *dst,
+                                         LVM_INT16            NrFrames,
+                                         LVM_INT16            NrChannels);
+#endif
 void LVC_Core_MixHard_1St_2i_D16C31_SAT( LVMixer3_FLOAT_st        *ptrInstance1,
                                          LVMixer3_FLOAT_st        *ptrInstance2,
                                          const LVM_FLOAT    *src,
diff --git a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
index 1a874a3..fd21545 100755
--- a/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
+++ b/media/libeffects/lvm/tests/build_and_run_all_unit_tests.sh
@@ -36,6 +36,10 @@
     "-csE -tE"
     "-csE -eqE" "-tE -eqE"
     "-csE -tE -bE -M -eqE"
+    "-tE -eqE -vcBal:96 -M"
+    "-tE -eqE -vcBal:-96 -M"
+    "-tE -eqE -vcBal:0 -M"
+    "-tE -eqE -bE -vcBal:30 -M"
 )
 
 fs_arr=(
@@ -60,22 +64,22 @@
 do
     for fs in ${fs_arr[*]}
     do
-        for ch in {1..8}
+        for chMask in {0..22}
         do
             adb shell $testdir/lvmtest -i:$testdir/sinesweepraw.raw \
-                -o:$testdir/sinesweep_$((ch))_$((fs)).raw -ch:$ch -fs:$fs $flags
+                -o:$testdir/sinesweep_$((chMask))_$((fs)).raw -chMask:$chMask -fs:$fs $flags
 
             # two channel files should be identical to higher channel
             # computation (first 2 channels).
             # Do not compare cases where -bE is in flags (due to mono computation)
-            if [[ $flags != *"-bE"* ]] && [ "$ch" -gt 2 ]
+            if [[ $flags != *"-bE"* ]] && [[ "$chMask" -gt 1 ]]
             then
-                adb shell cmp $testdir/sinesweep_2_$((fs)).raw \
-                    $testdir/sinesweep_$((ch))_$((fs)).raw
-            elif [[ $flags == *"-bE"* ]] && [ "$ch" -gt 2 ]
+                adb shell cmp $testdir/sinesweep_1_$((fs)).raw \
+                    $testdir/sinesweep_$((chMask))_$((fs)).raw
+            elif [[ $flags == *"-bE"* ]] && [[ "$chMask" -gt 1 ]]
             then
-                adb shell $testdir/snr $testdir/sinesweep_2_$((fs)).raw \
-                    $testdir/sinesweep_$((ch))_$((fs)).raw -thr:90.308998
+                adb shell $testdir/snr $testdir/sinesweep_1_$((fs)).raw \
+                    $testdir/sinesweep_$((chMask))_$((fs)).raw -thr:90.308998
             fi
 
         done
diff --git a/media/libeffects/lvm/tests/lvmtest.cpp b/media/libeffects/lvm/tests/lvmtest.cpp
index 416bdaa..5b58dd1 100644
--- a/media/libeffects/lvm/tests/lvmtest.cpp
+++ b/media/libeffects/lvm/tests/lvmtest.cpp
@@ -24,6 +24,7 @@
 #include <audio_utils/channels.h>
 #include <audio_utils/primitives.h>
 #include <log/log.h>
+#include <system/audio.h>
 
 #include "EffectBundle.h"
 #include "LVM_Private.h"
@@ -76,6 +77,8 @@
 struct lvmConfigParams_t {
   int              samplingFreq    = 44100;
   int              nrChannels      = 2;
+  int              chMask          = AUDIO_CHANNEL_OUT_STEREO;
+  int              vcBal           = 0;
   int              fChannels       = 2;
   bool             monoMode        = false;
   int              bassEffectLevel = 0;
@@ -87,9 +90,36 @@
   LVM_Mode_en      csEnable        = LVM_MODE_OFF;
 };
 
+constexpr audio_channel_mask_t lvmConfigChMask[] = {
+    AUDIO_CHANNEL_OUT_MONO,
+    AUDIO_CHANNEL_OUT_STEREO,
+    AUDIO_CHANNEL_OUT_2POINT1,
+    AUDIO_CHANNEL_OUT_2POINT0POINT2,
+    AUDIO_CHANNEL_OUT_QUAD,
+    AUDIO_CHANNEL_OUT_QUAD_BACK,
+    AUDIO_CHANNEL_OUT_QUAD_SIDE,
+    AUDIO_CHANNEL_OUT_SURROUND,
+    (1 << 4) - 1,
+    AUDIO_CHANNEL_OUT_2POINT1POINT2,
+    AUDIO_CHANNEL_OUT_3POINT0POINT2,
+    AUDIO_CHANNEL_OUT_PENTA,
+    (1 << 5) - 1,
+    AUDIO_CHANNEL_OUT_3POINT1POINT2,
+    AUDIO_CHANNEL_OUT_5POINT1,
+    AUDIO_CHANNEL_OUT_5POINT1_BACK,
+    AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+    (1 << 6) - 1,
+    AUDIO_CHANNEL_OUT_6POINT1,
+    (1 << 7) - 1,
+    AUDIO_CHANNEL_OUT_5POINT1POINT2,
+    AUDIO_CHANNEL_OUT_7POINT1,
+    (1 << 8) - 1,
+};
+
+
 void printUsage() {
   printf("\nUsage: ");
-  printf("\n     <exceutable> -i:<input_file> -o:<out_file> [options]\n");
+  printf("\n     <executable> -i:<input_file> -o:<out_file> [options]\n");
   printf("\nwhere, \n     <inputfile>  is the input file name");
   printf("\n                  on which LVM effects are applied");
   printf("\n     <outputfile> processed output file");
@@ -98,7 +128,34 @@
   printf("\n     -help (or) -h");
   printf("\n           Prints this usage information");
   printf("\n");
-  printf("\n     -ch:<process_channels> (1 through 8)\n\n");
+  printf("\n     -chMask:<channel_mask>\n");
+  printf("\n         0  - AUDIO_CHANNEL_OUT_MONO");
+  printf("\n         1  - AUDIO_CHANNEL_OUT_STEREO");
+  printf("\n         2  - AUDIO_CHANNEL_OUT_2POINT1");
+  printf("\n         3  - AUDIO_CHANNEL_OUT_2POINT0POINT2");
+  printf("\n         4  - AUDIO_CHANNEL_OUT_QUAD");
+  printf("\n         5  - AUDIO_CHANNEL_OUT_QUAD_BACK");
+  printf("\n         6  - AUDIO_CHANNEL_OUT_QUAD_SIDE");
+  printf("\n         7  - AUDIO_CHANNEL_OUT_SURROUND");
+  printf("\n         8  - canonical channel index mask for 4 ch: (1 << 4) - 1");
+  printf("\n         9  - AUDIO_CHANNEL_OUT_2POINT1POINT2");
+  printf("\n         10 - AUDIO_CHANNEL_OUT_3POINT0POINT2");
+  printf("\n         11 - AUDIO_CHANNEL_OUT_PENTA");
+  printf("\n         12 - canonical channel index mask for 5 ch: (1 << 5) - 1");
+  printf("\n         13 - AUDIO_CHANNEL_OUT_3POINT1POINT2");
+  printf("\n         14 - AUDIO_CHANNEL_OUT_5POINT1");
+  printf("\n         15 - AUDIO_CHANNEL_OUT_5POINT1_BACK");
+  printf("\n         16 - AUDIO_CHANNEL_OUT_5POINT1_SIDE");
+  printf("\n         17 - canonical channel index mask for 6 ch: (1 << 6) - 1");
+  printf("\n         18 - AUDIO_CHANNEL_OUT_6POINT1");
+  printf("\n         19 - canonical channel index mask for 7 ch: (1 << 7) - 1");
+  printf("\n         20 - AUDIO_CHANNEL_OUT_5POINT1POINT2");
+  printf("\n         21 - AUDIO_CHANNEL_OUT_7POINT1");
+  printf("\n         22 - canonical channel index mask for 8 ch: (1 << 8) - 1");
+  printf("\n         default 0");
+  printf("\n     -vcBal:<Left Right Balance control in dB [-96 to 96 dB]>");
+  printf("\n            -ve values reduce Right channel while +ve value reduces Left channel");
+  printf("\n                 default 0");
   printf("\n     -fch:<file_channels> (1 through 8)\n\n");
   printf("\n     -M");
   printf("\n           Mono mode (force all input audio channels to be identical)");
@@ -298,6 +355,7 @@
   params->OperatingMode = LVM_MODE_ON;
   params->SampleRate = LVM_FS_44100;
   params->SourceFormat = LVM_STEREO;
+  params->ChMask       = AUDIO_CHANNEL_OUT_STEREO;
   params->SpeakerType = LVM_HEADPHONES;
 
   pContext->pBundledContext->SampleRate = LVM_FS_44100;
@@ -452,13 +510,13 @@
   params->OperatingMode = LVM_MODE_ON;
   params->SpeakerType = LVM_HEADPHONES;
 
-  const int nrChannels = plvmConfigParams->nrChannels;
-  params->NrChannels = nrChannels;
-  if (nrChannels == 1) {
+  params->ChMask     = plvmConfigParams->chMask;
+  params->NrChannels = plvmConfigParams->nrChannels;
+  if (params->NrChannels == 1) {
     params->SourceFormat = LVM_MONO;
-  } else if (nrChannels == 2) {
+  } else if (params->NrChannels == 2) {
     params->SourceFormat = LVM_STEREO;
-  } else if (nrChannels > 2 && nrChannels <= 8) { // FCC_2 FCC_8
+  } else if (params->NrChannels > 2 && params->NrChannels <= 8) { // FCC_2 FCC_8
     params->SourceFormat = LVM_MULTICHANNEL;
   } else {
       return -EINVAL;
@@ -531,7 +589,7 @@
 
   /* Volume Control parameters */
   params->VC_EffectLevel = 0;
-  params->VC_Balance = 0;
+  params->VC_Balance = plvmConfigParams->vcBal;
 
   /* Treble Enhancement parameters */
   params->TE_OperatingMode = plvmConfigParams->trebleEnable;
@@ -667,13 +725,21 @@
         return -1;
       }
       lvmConfigParams.samplingFreq = samplingFreq;
-    } else if (!strncmp(argv[i], "-ch:", 4)) {
-      const int nrChannels = atoi(argv[i] + 4);
-      if (nrChannels > 8 || nrChannels < 1) {
-        printf("Error: Unsupported number of channels : %d\n", nrChannels);
+    } else if (!strncmp(argv[i], "-chMask:", 8)) {
+      const int chMaskConfigIdx = atoi(argv[i] + 8);
+      if (chMaskConfigIdx < 0 || (size_t)chMaskConfigIdx >= std::size(lvmConfigChMask)) {
+        ALOGE("\nError: Unsupported Channel Mask : %d\n", chMaskConfigIdx);
         return -1;
       }
-      lvmConfigParams.nrChannels = nrChannels;
+      const audio_channel_mask_t chMask = lvmConfigChMask[chMaskConfigIdx];
+      lvmConfigParams.chMask = chMask;
+      lvmConfigParams.nrChannels = audio_channel_count_from_out_mask(chMask);
+    } else if (!strncmp(argv[i], "-vcBal:", 7)) {
+      const int vcBalance = atoi(argv[i] + 7);
+      if (vcBalance > 96 || vcBalance < -96) {
+        ALOGE("\nError: Unsupported volume balance value: %d\n", vcBalance);
+      }
+      lvmConfigParams.vcBal = vcBalance;
     } else if (!strncmp(argv[i], "-fch:", 5)) {
       const int fChannels = atoi(argv[i] + 5);
       if (fChannels > 8 || fChannels < 1) {
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index 0c6f8de..3a97905 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -1315,6 +1315,7 @@
 
 #ifdef SUPPORT_MC
         ActiveParams.NrChannels = NrChannels;
+        ActiveParams.ChMask = pConfig->inputCfg.channels;
 #endif
 
         LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 9799cad..a529628 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -1,3 +1,10 @@
+cc_defaults {
+    name: "libmedia_defaults",
+    include_dirs: [
+        "bionic/libc/private",
+    ],
+}
+
 cc_library_headers {
     name: "libmedia_headers",
     vendor_available: true,
@@ -86,6 +93,7 @@
     export_shared_lib_headers: [
         "android.hidl.token@1.0-utils",
         "android.hardware.media.omx@1.0",
+        "libgui",
         "libstagefright_foundation",
         "libui",
     ],
@@ -148,6 +156,8 @@
 cc_library {
     name: "libmedia",
 
+    defaults: [ "libmedia_defaults" ],
+
     srcs: [
         "IDataSource.cpp",
         "BufferingSettings.cpp",
@@ -258,6 +268,8 @@
 cc_library_static {
     name: "libmedia_player2_util",
 
+    defaults: [ "libmedia_defaults" ],
+
     srcs: [
         "AudioParameter.cpp",
         "BufferingSettings.cpp",
diff --git a/media/libmedia/MediaUtils.cpp b/media/libmedia/MediaUtils.cpp
index bcc7ebf..31972fa 100644
--- a/media/libmedia/MediaUtils.cpp
+++ b/media/libmedia/MediaUtils.cpp
@@ -22,23 +22,16 @@
 #include <sys/resource.h>
 #include <unistd.h>
 
+#include <bionic_malloc.h>
+
 #include "MediaUtils.h"
 
-extern "C" size_t __cfi_shadow_size();
 extern "C" void __scudo_set_rss_limit(size_t, int) __attribute__((weak));
 
 namespace android {
 
-void limitProcessMemory(
-    const char *property,
-    size_t numberOfBytes,
-    size_t percentageOfTotalMem) {
-
-    if (running_with_asan()) {
-        ALOGW("Running with (HW)ASan, skip enforcing memory limitations.");
-        return;
-    }
-
+void limitProcessMemory(const char *property, size_t numberOfBytes,
+                        size_t percentageOfTotalMem) {
     long pageSize = sysconf(_SC_PAGESIZE);
     long numPages = sysconf(_SC_PHYS_PAGES);
     size_t maxMem = SIZE_MAX;
@@ -66,38 +59,17 @@
         maxMem = propVal;
     }
 
-    // If 64-bit Scudo is in use, enforce the hard RSS limit (in MB).
-    if (maxMem != SIZE_MAX && sizeof(void *) == 8 &&
-        &__scudo_set_rss_limit != 0) {
+    // If Scudo is in use, enforce the hard RSS limit (in MB).
+    if (maxMem != SIZE_MAX && &__scudo_set_rss_limit != 0) {
       __scudo_set_rss_limit(maxMem >> 20, 1);
       ALOGV("Scudo hard RSS limit set to %zu MB", maxMem >> 20);
       return;
     }
 
-    // Increase by the size of the CFI shadow mapping. Most of the shadow is not
-    // backed with physical pages, and it is possible for the result to be
-    // higher than total physical memory. This is fine for RLIMIT_AS.
-    size_t cfi_size = __cfi_shadow_size();
-    if (cfi_size) {
-      ALOGV("cfi shadow size: %zu", cfi_size);
-      if (maxMem <= SIZE_MAX - cfi_size) {
-        maxMem += cfi_size;
-      } else {
-        maxMem = SIZE_MAX;
-      }
+    if (!android_mallopt(M_SET_ALLOCATION_LIMIT_BYTES, &maxMem,
+                         sizeof(maxMem))) {
+      ALOGW("couldn't set allocation limit");
     }
-    ALOGV("actual limit: %zu", maxMem);
-
-    struct rlimit limit;
-    getrlimit(RLIMIT_AS, &limit);
-    ALOGV("original limits: %lld/%lld", (long long)limit.rlim_cur, (long long)limit.rlim_max);
-    limit.rlim_cur = maxMem;
-    setrlimit(RLIMIT_AS, &limit);
-    limit.rlim_cur = -1;
-    limit.rlim_max = -1;
-    getrlimit(RLIMIT_AS, &limit);
-    ALOGV("new limits: %lld/%lld", (long long)limit.rlim_cur, (long long)limit.rlim_max);
-
 }
 
 } // namespace android
diff --git a/media/libmedia/MediaUtils.h b/media/libmedia/MediaUtils.h
index 26075c4..f80dd30 100644
--- a/media/libmedia/MediaUtils.h
+++ b/media/libmedia/MediaUtils.h
@@ -19,13 +19,6 @@
 
 namespace android {
 
-extern "C" void __asan_init(void) __attribute__((weak));
-extern "C" void __hwasan_init(void) __attribute__((weak));
-
-static inline int running_with_asan() {
-    return &__asan_init != 0 || &__hwasan_init != 0;
-}
-
 /**
    Limit the amount of memory a process can allocate using setrlimit(RLIMIT_AS).
    The value to use will be read from the specified system property, or if the
diff --git a/media/libmedia/NdkWrapper.cpp b/media/libmedia/NdkWrapper.cpp
index ea0547c..c150407 100644
--- a/media/libmedia/NdkWrapper.cpp
+++ b/media/libmedia/NdkWrapper.cpp
@@ -65,6 +65,7 @@
     AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL,
     AMEDIAFORMAT_KEY_GRID_COLUMNS,
     AMEDIAFORMAT_KEY_GRID_ROWS,
+    AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT,
     AMEDIAFORMAT_KEY_HEIGHT,
     AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD,
     AMEDIAFORMAT_KEY_IS_ADTS,
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
index 0301b21..469c5b6 100644
--- a/media/libmedia/TypeConverter.cpp
+++ b/media/libmedia/TypeConverter.cpp
@@ -217,6 +217,7 @@
     MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_ADAPTIVE),
     MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC),
     MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_LHDC_LL),
+    MAKE_STRING_FROM_ENUM(AUDIO_FORMAT_APTX_TWSP),
     TERMINATOR
 };
 
diff --git a/media/libmedia/xsd/api/current.txt b/media/libmedia/xsd/api/current.txt
index 0924dd9..05e8a49 100644
--- a/media/libmedia/xsd/api/current.txt
+++ b/media/libmedia/xsd/api/current.txt
@@ -45,10 +45,17 @@
     ctor public CamcorderProfiles();
     method public int getCameraId();
     method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile();
+    method public java.util.List<media.profiles.CamcorderProfiles.ImageDecoding> getImageDecoding();
     method public java.util.List<media.profiles.CamcorderProfiles.ImageEncoding> getImageEncoding();
     method public void setCameraId(int);
   }
 
+  public static class CamcorderProfiles.ImageDecoding {
+    ctor public CamcorderProfiles.ImageDecoding();
+    method public int getMemCap();
+    method public void setMemCap(int);
+  }
+
   public static class CamcorderProfiles.ImageEncoding {
     ctor public CamcorderProfiles.ImageEncoding();
     method public int getQuality();
diff --git a/media/libmedia/xsd/media_profiles.xsd b/media/libmedia/xsd/media_profiles.xsd
index a9687b0..a02252a 100644
--- a/media/libmedia/xsd/media_profiles.xsd
+++ b/media/libmedia/xsd/media_profiles.xsd
@@ -42,6 +42,11 @@
                     <xs:attribute name="quality" type="xs:int"/>
                 </xs:complexType>
             </xs:element>
+            <xs:element name="ImageDecoding" minOccurs="0" maxOccurs="unbounded">
+                <xs:complexType>
+                    <xs:attribute name="memCap" type="xs:int"/>
+                </xs:complexType>
+            </xs:element>
         </xs:sequence>
         <xs:attribute name="cameraId" type="xs:int"/>
     </xs:complexType>
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index d8b825d..dc51b16 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -225,6 +225,7 @@
     ],
 
     export_shared_lib_headers: [
+        "libgui",
         "libmedia",
         "android.hidl.allocator@1.0",
     ],
diff --git a/media/libstagefright/MetaDataUtils.cpp b/media/libstagefright/MetaDataUtils.cpp
index dbc287e..3f0bc7d 100644
--- a/media/libstagefright/MetaDataUtils.cpp
+++ b/media/libstagefright/MetaDataUtils.cpp
@@ -309,7 +309,6 @@
 void parseVorbisComment(
         AMediaFormat *fileMeta, const char *comment, size_t commentLength) {
     // Haptic tag is only kept here as it will only be used in extractor to generate channel mask.
-    const char* const haptic = "haptic";
     struct {
         const char *const mTag;
         const char *mKey;
@@ -330,7 +329,7 @@
         { "LYRICIST", AMEDIAFORMAT_KEY_LYRICIST },
         { "METADATA_BLOCK_PICTURE", AMEDIAFORMAT_KEY_ALBUMART },
         { "ANDROID_LOOP", AMEDIAFORMAT_KEY_LOOP },
-        { "ANDROID_HAPTIC", haptic },
+        { "ANDROID_HAPTIC", AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT },
     };
 
         for (size_t j = 0; j < sizeof(kMap) / sizeof(kMap[0]); ++j) {
@@ -346,12 +345,12 @@
                     if (!strcasecmp(&comment[tagLen + 1], "true")) {
                         AMediaFormat_setInt32(fileMeta, AMEDIAFORMAT_KEY_LOOP, 1);
                     }
-                } else if (kMap[j].mKey == haptic) {
+                } else if (kMap[j].mKey == AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT) {
                     char *end;
                     errno = 0;
                     const int hapticChannelCount = strtol(&comment[tagLen + 1], &end, 10);
                     if (errno == 0) {
-                        AMediaFormat_setInt32(fileMeta, haptic, hapticChannelCount);
+                        AMediaFormat_setInt32(fileMeta, kMap[j].mKey, hapticChannelCount);
                     } else {
                         ALOGE("Error(%d) when parsing haptic channel count", errno);
                     }
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 64bbf08..c7b2719 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -646,6 +646,14 @@
     }
 };
 
+static std::vector<std::pair<const char *, uint32_t>> CSDMappings {
+    {
+        { "csd-0", kKeyOpaqueCSD0 },
+        { "csd-1", kKeyOpaqueCSD1 },
+        { "csd-2", kKeyOpaqueCSD2 },
+    }
+};
+
 void convertMessageToMetaDataFromMappings(const sp<AMessage> &msg, sp<MetaData> &meta) {
     for (auto elem : stringMappings) {
         AString value;
@@ -682,6 +690,14 @@
                     MetaDataBase::Type::TYPE_NONE, value->data(), value->size());
         }
     }
+
+    for (auto elem : CSDMappings) {
+        sp<ABuffer> value;
+        if (msg->findBuffer(elem.first, &value)) {
+            meta->setData(elem.second,
+                    MetaDataBase::Type::TYPE_NONE, value->data(), value->size());
+        }
+    }
 }
 
 void convertMetaDataToMessageFromMappings(const MetaDataBase *meta, sp<AMessage> format) {
@@ -722,6 +738,18 @@
             format->setBuffer(elem.first, buf);
         }
     }
+
+    for (auto elem : CSDMappings) {
+        uint32_t type;
+        const void* data;
+        size_t size;
+        if (meta->findData(elem.second, &type, &data, &size)) {
+            sp<ABuffer> buf = ABuffer::CreateAsCopy(data, size);
+            buf->meta()->setInt32("csd", true);
+            buf->meta()->setInt64("timeUs", 0);
+            format->setBuffer(elem.first, buf);
+        }
+    }
 }
 
 status_t convertMetaDataToMessage(
@@ -939,6 +967,11 @@
         if (meta->findInt32(kKeyPcmEncoding, &pcmEncoding)) {
             msg->setInt32("pcm-encoding", pcmEncoding);
         }
+
+        int32_t hapticChannelCount;
+        if (meta->findInt32(kKeyHapticChannelCount, &hapticChannelCount)) {
+            msg->setInt32("haptic-channel-count", hapticChannelCount);
+        }
     }
 
     int32_t maxInputSize;
@@ -1249,30 +1282,6 @@
     } else if (meta->findData(kKeyD263, &type, &data, &size)) {
         const uint8_t *ptr = (const uint8_t *)data;
         parseH263ProfileLevelFromD263(ptr, size, msg);
-    } else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
-        sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
-        if (buffer.get() == NULL || buffer->base() == NULL) {
-            return NO_MEMORY;
-        }
-        memcpy(buffer->data(), data, size);
-
-        buffer->meta()->setInt32("csd", true);
-        buffer->meta()->setInt64("timeUs", 0);
-        msg->setBuffer("csd-0", buffer);
-
-        if (!meta->findData(kKeyVorbisBooks, &type, &data, &size)) {
-            return -EINVAL;
-        }
-
-        buffer = new (std::nothrow) ABuffer(size);
-        if (buffer.get() == NULL || buffer->base() == NULL) {
-            return NO_MEMORY;
-        }
-        memcpy(buffer->data(), data, size);
-
-        buffer->meta()->setInt32("csd", true);
-        buffer->meta()->setInt64("timeUs", 0);
-        msg->setBuffer("csd-1", buffer);
     } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) {
         sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
         if (buffer.get() == NULL || buffer->base() == NULL) {
@@ -1311,16 +1320,6 @@
         buffer->meta()->setInt32("csd", true);
         buffer->meta()->setInt64("timeUs", 0);
         msg->setBuffer("csd-2", buffer);
-    } else if (meta->findData(kKeyFlacMetadata, &type, &data, &size)) {
-        sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
-        if (buffer.get() == NULL || buffer->base() == NULL) {
-            return NO_MEMORY;
-        }
-        memcpy(buffer->data(), data, size);
-
-        buffer->meta()->setInt32("csd", true);
-        buffer->meta()->setInt64("timeUs", 0);
-        msg->setBuffer("csd-0", buffer);
     } else if (meta->findData(kKeyVp9CodecPrivate, &type, &data, &size)) {
         sp<ABuffer> buffer = new (std::nothrow) ABuffer(size);
         if (buffer.get() == NULL || buffer->base() == NULL) {
@@ -1714,6 +1713,11 @@
         if (msg->findInt32("pcm-encoding", &pcmEncoding)) {
             meta->setInt32(kKeyPcmEncoding, pcmEncoding);
         }
+
+        int32_t hapticChannelCount;
+        if (msg->findInt32("haptic-channel-count", &hapticChannelCount)) {
+            meta->setInt32(kKeyHapticChannelCount, hapticChannelCount);
+        }
     }
 
     int32_t maxInputSize;
@@ -1797,11 +1801,6 @@
             if (seekPreRollBuf) {
                 meta->setData(kKeyOpusSeekPreRoll, 0, seekPreRollBuf, seekPreRollBufSize);
             }
-        } else if (mime == MEDIA_MIMETYPE_AUDIO_VORBIS) {
-            meta->setData(kKeyVorbisInfo, 0, csd0->data(), csd0->size());
-            if (msg->findBuffer("csd-1", &csd1)) {
-                meta->setData(kKeyVorbisBooks, 0, csd1->data(), csd1->size());
-            }
         } else if (mime == MEDIA_MIMETYPE_AUDIO_ALAC) {
             meta->setData(kKeyAlacMagicCookie, 0, csd0->data(), csd0->size());
         }
diff --git a/media/libstagefright/include/media/stagefright/MetaDataBase.h b/media/libstagefright/include/media/stagefright/MetaDataBase.h
index a0407af..8dc2dd5 100644
--- a/media/libstagefright/include/media/stagefright/MetaDataBase.h
+++ b/media/libstagefright/include/media/stagefright/MetaDataBase.h
@@ -62,12 +62,9 @@
     kKeyAV1C              = 'av1c',  // raw data
     kKeyThumbnailHVCC     = 'thvc',  // raw data
     kKeyD263              = 'd263',  // raw data
-    kKeyVorbisInfo        = 'vinf',  // raw data
-    kKeyVorbisBooks       = 'vboo',  // raw data
     kKeyOpusHeader        = 'ohdr',  // raw data
     kKeyOpusCodecDelay    = 'ocod',  // uint64_t (codec delay in ns)
     kKeyOpusSeekPreRoll   = 'ospr',  // uint64_t (seek preroll in ns)
-    kKeyFlacMetadata      = 'flMd',  // raw data
     kKeyVp9CodecPrivate   = 'vp9p',  // raw data (vp9 csd information)
     kKeyIsSyncFrame       = 'sync',  // int32_t (bool)
     kKeyIsCodecConfig     = 'conf',  // int32_t (bool)
@@ -234,6 +231,13 @@
 
     // AC-4 AudioPresentationInfo
     kKeyAudioPresentationInfo = 'audP',  // raw data
+
+    // opaque codec specific data being passed from extractor to codec
+    kKeyOpaqueCSD0       = 'csd0',
+    kKeyOpaqueCSD1       = 'csd1',
+    kKeyOpaqueCSD2       = 'csd2',
+
+    kKeyHapticChannelCount = 'hapC',
 };
 
 enum {
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index b0a303e..26e0884 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -177,8 +177,8 @@
         const void *headerData3;
         size_t headerSize1, headerSize2 = sizeof(headerData2), headerSize3;
 
-        if (!md->findData(kKeyVorbisInfo, &type, &headerData1, &headerSize1)
-            || !md->findData(kKeyVorbisBooks, &type, &headerData3, &headerSize3)) {
+        if (!md->findData(kKeyOpaqueCSD0, &type, &headerData1, &headerSize1)
+            || !md->findData(kKeyOpaqueCSD1, &type, &headerData3, &headerSize3)) {
             ALOGE("Missing header format keys for vorbis track");
             md->dumpToLog();
             return NULL;
diff --git a/media/libstagefright/xmlparser/api/current.txt b/media/libstagefright/xmlparser/api/current.txt
index f5245c1..5443f2c 100644
--- a/media/libstagefright/xmlparser/api/current.txt
+++ b/media/libstagefright/xmlparser/api/current.txt
@@ -1,6 +1,12 @@
 // Signature format: 2.0
 package media.codecs {
 
+  public class Alias {
+    ctor public Alias();
+    method public String getName();
+    method public void setName(String);
+  }
+
   public class Decoders {
     ctor public Decoders();
     method public java.util.List<media.codecs.MediaCodec> getMediaCodec();
@@ -23,6 +29,23 @@
     method public void setValue(String);
   }
 
+  public class Include {
+    ctor public Include();
+    method public String getHref();
+    method public void setHref(String);
+  }
+
+  public class Included {
+    ctor public Included();
+    method public media.codecs.Decoders getDecoders_optional();
+    method public media.codecs.Encoders getEncoders_optional();
+    method public java.util.List<media.codecs.Include> getInclude_optional();
+    method public media.codecs.Settings getSettings_optional();
+    method public void setDecoders_optional(media.codecs.Decoders);
+    method public void setEncoders_optional(media.codecs.Encoders);
+    method public void setSettings_optional(media.codecs.Settings);
+  }
+
   public class Limit {
     ctor public Limit();
     method public String getIn();
@@ -47,12 +70,13 @@
 
   public class MediaCodec {
     ctor public MediaCodec();
-    method public java.util.List<media.codecs.Feature> getFeature();
-    method public java.util.List<media.codecs.Limit> getLimit();
+    method public java.util.List<media.codecs.Alias> getAlias_optional();
+    method public java.util.List<media.codecs.Feature> getFeature_optional();
+    method public java.util.List<media.codecs.Limit> getLimit_optional();
     method public String getName();
-    method public java.util.List<media.codecs.Quirk> getQuirk();
-    method public java.util.List<media.codecs.Type> getType();
+    method public java.util.List<media.codecs.Quirk> getQuirk_optional();
     method public String getType();
+    method public java.util.List<media.codecs.Type> getType_optional();
     method public String getUpdate();
     method public void setName(String);
     method public void setType(String);
@@ -61,9 +85,13 @@
 
   public class MediaCodecs {
     ctor public MediaCodecs();
-    method public java.util.List<media.codecs.Decoders> getDecoders();
-    method public java.util.List<media.codecs.Encoders> getEncoders();
-    method public java.util.List<media.codecs.Settings> getSettings();
+    method public media.codecs.Decoders getDecoders_optional();
+    method public media.codecs.Encoders getEncoders_optional();
+    method public java.util.List<media.codecs.Include> getInclude_optional();
+    method public media.codecs.Settings getSettings_optional();
+    method public void setDecoders_optional(media.codecs.Decoders);
+    method public void setEncoders_optional(media.codecs.Encoders);
+    method public void setSettings_optional(media.codecs.Settings);
   }
 
   public class Quirk {
@@ -89,6 +117,7 @@
 
   public class Type {
     ctor public Type();
+    method public java.util.List<media.codecs.Alias> getAlias();
     method public java.util.List<media.codecs.Feature> getFeature();
     method public java.util.List<media.codecs.Limit> getLimit();
     method public String getName();
@@ -99,7 +128,8 @@
 
   public class XmlParser {
     ctor public XmlParser();
-    method public static media.codecs.MediaCodecs read(java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+    method public static media.codecs.Included readIncluded(java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+    method public static media.codecs.MediaCodecs readMediaCodecs(java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
     method public static String readText(org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
     method public static void skip(org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
   }
diff --git a/media/libstagefright/xmlparser/media_codecs.xsd b/media/libstagefright/xmlparser/media_codecs.xsd
index 4faba87..77193a2 100644
--- a/media/libstagefright/xmlparser/media_codecs.xsd
+++ b/media/libstagefright/xmlparser/media_codecs.xsd
@@ -20,11 +20,22 @@
            xmlns:xs="http://www.w3.org/2001/XMLSchema">
     <xs:element name="MediaCodecs">
         <xs:complexType>
-            <xs:sequence>
-                <xs:element name="Decoders" type="Decoders" maxOccurs="unbounded"/>
-                <xs:element name="Encoders" type="Encoders" maxOccurs="unbounded"/>
-                <xs:element name="Settings" type="Settings" maxOccurs="unbounded"/>
-            </xs:sequence>
+            <xs:choice minOccurs="0" maxOccurs="unbounded">
+                <xs:element name="Include" type="Include" maxOccurs="unbounded"/>
+                <xs:element name="Settings" type="Settings"/>
+                <xs:element name="Decoders" type="Decoders"/>
+                <xs:element name="Encoders" type="Encoders"/>
+            </xs:choice>
+        </xs:complexType>
+    </xs:element>
+    <xs:element name="Included">
+        <xs:complexType>
+            <xs:choice minOccurs="0" maxOccurs="unbounded">
+                <xs:element name="Include" type="Include" maxOccurs="unbounded"/>
+                <xs:element name="Settings" type="Settings"/>
+                <xs:element name="Decoders" type="Decoders"/>
+                <xs:element name="Encoders" type="Encoders"/>
+            </xs:choice>
         </xs:complexType>
     </xs:element>
     <xs:complexType name="Decoders">
@@ -43,12 +54,13 @@
         </xs:sequence>
     </xs:complexType>
     <xs:complexType name="MediaCodec">
-        <xs:sequence>
-            <xs:element name="Quirk" type="Quirk" maxOccurs="unbounded"/>
-            <xs:element name="Type" type="Type" maxOccurs="unbounded"/>
-            <xs:element name="Limit" type="Limit" maxOccurs="unbounded"/>
-            <xs:element name="Feature" type="Feature" maxOccurs="unbounded"/>
-        </xs:sequence>
+        <xs:choice minOccurs="0" maxOccurs="unbounded">
+            <xs:element name="Quirk" type="Quirk" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Type" type="Type" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Alias" type="Alias" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Limit" type="Limit" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Feature" type="Feature" minOccurs="0" maxOccurs="unbounded"/>
+        </xs:choice>
         <xs:attribute name="name" type="xs:string"/>
         <xs:attribute name="type" type="xs:string"/>
         <xs:attribute name="update" type="xs:string"/>
@@ -58,12 +70,16 @@
     </xs:complexType>
     <xs:complexType name="Type">
         <xs:sequence>
-            <xs:element name="Limit" type="Limit" maxOccurs="unbounded"/>
-            <xs:element name="Feature" type="Feature" maxOccurs="unbounded"/>
+            <xs:element name="Alias" type="Alias" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Limit" type="Limit" minOccurs="0" maxOccurs="unbounded"/>
+            <xs:element name="Feature" type="Feature" minOccurs="0" maxOccurs="unbounded"/>
         </xs:sequence>
         <xs:attribute name="name" type="xs:string"/>
         <xs:attribute name="update" type="xs:string"/>
     </xs:complexType>
+    <xs:complexType name="Alias">
+        <xs:attribute name="name" type="xs:string"/>
+    </xs:complexType>
     <xs:complexType name="Limit">
         <xs:attribute name="name" type="xs:string"/>
         <xs:attribute name="default" type="xs:string"/>
@@ -86,4 +102,7 @@
         <xs:attribute name="value" type="xs:string"/>
         <xs:attribute name="update" type="xs:string"/>
     </xs:complexType>
+    <xs:complexType name="Include">
+        <xs:attribute name="href" type="xs:string"/>
+    </xs:complexType>
 </xs:schema>
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index f9f1acc..f4cc704 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -101,6 +101,10 @@
 
     export_include_dirs: ["include"],
 
+    export_shared_lib_headers: [
+        "libgui",
+    ],
+
     product_variables: {
         pdk: {
             enabled: false,
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
index ed88cf3..51138c8 100644
--- a/media/ndk/NdkMediaFormat.cpp
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -324,6 +324,7 @@
 EXPORT const char* AMEDIAFORMAT_KEY_GENRE = "genre";
 EXPORT const char* AMEDIAFORMAT_KEY_GRID_COLUMNS = "grid-cols";
 EXPORT const char* AMEDIAFORMAT_KEY_GRID_ROWS = "grid-rows";
+EXPORT const char* AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT = "haptic-channel-count";
 EXPORT const char* AMEDIAFORMAT_KEY_HDR_STATIC_INFO = "hdr-static-info";
 EXPORT const char* AMEDIAFORMAT_KEY_HDR10_PLUS_INFO = "hdr10-plus-info";
 EXPORT const char* AMEDIAFORMAT_KEY_HEIGHT = "height";
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index 259481d..fd43f36 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -209,6 +209,7 @@
 extern const char* AMEDIAFORMAT_KEY_EXIF_SIZE __INTRODUCED_IN(29);
 extern const char* AMEDIAFORMAT_KEY_FRAME_COUNT __INTRODUCED_IN(29);
 extern const char* AMEDIAFORMAT_KEY_GENRE __INTRODUCED_IN(29);
+extern const char* AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT __INTRODUCED_IN(29);
 extern const char* AMEDIAFORMAT_KEY_ICC_PROFILE __INTRODUCED_IN(29);
 extern const char* AMEDIAFORMAT_KEY_IS_SYNC_FRAME __INTRODUCED_IN(29);
 extern const char* AMEDIAFORMAT_KEY_LOCATION __INTRODUCED_IN(29);
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index 4725e9e..f666ad0 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -89,6 +89,7 @@
     AMEDIAFORMAT_KEY_GENRE; # var introduced=29
     AMEDIAFORMAT_KEY_GRID_COLUMNS; # var introduced=28
     AMEDIAFORMAT_KEY_GRID_ROWS; # var introduced=28
+    AMEDIAFORMAT_KEY_HAPTIC_CHANNEL_COUNT; # var introduced=29
     AMEDIAFORMAT_KEY_HDR_STATIC_INFO; # var introduced=28
     AMEDIAFORMAT_KEY_HEIGHT; # var introduced=21
     AMEDIAFORMAT_KEY_ICC_PROFILE; # var introduced=29
diff --git a/media/ndk/tests/AImageReaderWindowHandleTest.cpp b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
index 5f11252..5b65064 100644
--- a/media/ndk/tests/AImageReaderWindowHandleTest.cpp
+++ b/media/ndk/tests/AImageReaderWindowHandleTest.cpp
@@ -27,6 +27,8 @@
 
 namespace android {
 
+using HGraphicBufferProducer = hardware::graphics::bufferqueue::V1_0::
+        IGraphicBufferProducer;
 using hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
 using aimg::AImageReader_getHGBPFromHandle;
 
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 5574e1c..e8e9fa6 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2389,14 +2389,9 @@
                                                   audio_devices_t device)
 {
     auto attributes = mEngine->getAttributesForStreamType(stream);
-    auto volumeGroup = mEngine->getVolumeGroupForStreamType(stream);
-    if (volumeGroup == VOLUME_GROUP_NONE) {
-        ALOGE("%s: no group matching with stream %s", __FUNCTION__, toString(stream).c_str());
-        return BAD_VALUE;
-    }
     ALOGV("%s: stream %s attributes=%s", __func__,
           toString(stream).c_str(), toString(attributes).c_str());
-    return setVolumeGroupIndex(getVolumeCurves(stream), volumeGroup, index, device, attributes);
+    return setVolumeIndexForAttributes(attributes, index, device);
 }
 
 status_t AudioPolicyManager::getStreamVolumeIndex(audio_stream_type_t stream,
@@ -2411,27 +2406,19 @@
     return getVolumeIndex(getVolumeCurves(stream), *index, device);
 }
 
-status_t AudioPolicyManager::setVolumeIndexForAttributes(const audio_attributes_t &attr,
+status_t AudioPolicyManager::setVolumeIndexForAttributes(const audio_attributes_t &attributes,
                                                          int index,
                                                          audio_devices_t device)
 {
     // Get Volume group matching the Audio Attributes
-    auto volumeGroup = mEngine->getVolumeGroupForAttributes(attr);
-    if (volumeGroup == VOLUME_GROUP_NONE) {
-        ALOGD("%s: could not find group matching with %s", __FUNCTION__, toString(attr).c_str());
+    auto group = mEngine->getVolumeGroupForAttributes(attributes);
+    if (group == VOLUME_GROUP_NONE) {
+        ALOGD("%s: no group matching with %s", __FUNCTION__, toString(attributes).c_str());
         return BAD_VALUE;
     }
-    ALOGV("%s: group %d matching with %s", __FUNCTION__, volumeGroup, toString(attr).c_str());
-    return setVolumeGroupIndex(getVolumeCurves(attr), volumeGroup, index, device, attr);
-}
-
-status_t AudioPolicyManager::setVolumeGroupIndex(IVolumeCurves &curves, volume_group_t group,
-                                                 int index,
-                                                 audio_devices_t device,
-                                                 const audio_attributes_t attributes)
-{
-    ALOGVV("%s: group=%d", __func__, group);
+    ALOGV("%s: group %d matching with %s", __FUNCTION__, group, toString(attributes).c_str());
     status_t status = NO_ERROR;
+    IVolumeCurves &curves = getVolumeCurves(attributes);
     VolumeSource vs = toVolumeSource(group);
     product_strategy_t strategy = mEngine->getProductStrategyForAttributes(attributes);
 
@@ -2440,6 +2427,21 @@
         ALOGE("%s failed to set curve index for group %d device 0x%X", __func__, group, device);
         return status;
     }
+
+    audio_devices_t curSrcDevice;
+    auto curCurvAttrs = curves.getAttributes();
+    if (!curCurvAttrs.empty() && curCurvAttrs.front() != defaultAttr) {
+        auto attr = curCurvAttrs.front();
+        curSrcDevice = mEngine->getOutputDevicesForAttributes(attr, nullptr, false).types();
+    } else if (!curves.getStreamTypes().empty()) {
+        auto stream = curves.getStreamTypes().front();
+        curSrcDevice = mEngine->getOutputDevicesForStream(stream, false).types();
+    } else {
+        ALOGE("%s: Invalid src %d: no valid attributes nor stream",__func__, vs);
+        return BAD_VALUE;
+    }
+    curSrcDevice = Volume::getDeviceForVolume(curSrcDevice);
+
     // update volume on all outputs and streams matching the following:
     // - The requested stream (or a stream matching for volume control) is active on the output
     // - The device (or devices) selected by the engine for this stream includes
@@ -2450,7 +2452,7 @@
     // no specific device volume value exists for currently selected device.
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-        audio_devices_t curDevice = Volume::getDeviceForVolume(desc->devices().types());
+        audio_devices_t curDevice = desc->devices().types();
 
         // Inter / intra volume group priority management: Loop on strategies arranged by priority
         // If a higher priority strategy is active, and the output is routed to a device with a
@@ -2501,48 +2503,28 @@
             }
             continue;
         }
-        for (auto curVolGroup : getVolumeGroups()) {
-            VolumeSource curVolSrc = toVolumeSource(curVolGroup);
-            if (curVolSrc != vs) {
-                continue;
-            }
-            if (!(desc->isActive(vs) || isInCall())) {
-                continue;
-            }
-            audio_devices_t curSrcDevice;
-            auto &curCurves = getVolumeCurves(curVolSrc);
-            auto curCurvAttrs = curCurves.getAttributes();
-            if (!curCurvAttrs.empty() && curCurvAttrs.front() != defaultAttr) {
-                auto attr = curCurvAttrs.front();
-                curSrcDevice = mEngine->getOutputDevicesForAttributes(attr, nullptr, false).types();
-            } else if (!curCurves.getStreamTypes().empty()) {
-                auto stream = curCurves.getStreamTypes().front();
-                curSrcDevice = mEngine->getOutputDevicesForStream(stream, false).types();
-            } else {
-                ALOGE("%s: Invalid src %d: no valid attributes nor stream",__func__, curVolSrc);
-                continue;
-            }
-            curSrcDevice = Volume::getDeviceForVolume(curSrcDevice);
-            if ((device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) && ((curDevice & device) == 0)) {
-                continue;
-            }
-            if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) {
-                curSrcDevice |= device;
-                applyVolume = (curDevice & curSrcDevice) != 0;
-            } else {
-                applyVolume = !curves.hasVolumeIndexForDevice(curSrcDevice);
-            }
-            if (applyVolume) {
-                //FIXME: workaround for truncated touch sounds
-                // delayed volume change for system stream to be removed when the problem is
-                // handled by system UI
-                status_t volStatus = checkAndSetVolume(
-                            curCurves, curVolSrc, index, desc, curDevice,
-                            ((vs == toVolumeSource(AUDIO_STREAM_SYSTEM))?
-                                 TOUCH_SOUND_FIXED_DELAY_MS : 0));
-                if (volStatus != NO_ERROR) {
-                    status = volStatus;
-                }
+        if (!(desc->isActive(vs) || isInCall())) {
+            continue;
+        }
+        if ((device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) && ((curDevice & device) == 0)) {
+            continue;
+        }
+        if (device != AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME) {
+            curSrcDevice |= device;
+            applyVolume = (Volume::getDeviceForVolume(curDevice) & curSrcDevice) != 0;
+        } else {
+            applyVolume = !curves.hasVolumeIndexForDevice(curSrcDevice);
+        }
+        if (applyVolume) {
+            //FIXME: workaround for truncated touch sounds
+            // delayed volume change for system stream to be removed when the problem is
+            // handled by system UI
+            status_t volStatus = checkAndSetVolume(
+                        curves, vs, index, desc, curDevice,
+                        ((vs == toVolumeSource(AUDIO_STREAM_SYSTEM))?
+                             TOUCH_SOUND_FIXED_DELAY_MS : 0));
+            if (volStatus != NO_ERROR) {
+                status = volStatus;
             }
         }
     }
@@ -5730,14 +5712,14 @@
 }
 
 int AudioPolicyManager::rescaleVolumeIndex(int srcIndex,
-                                           audio_stream_type_t srcStream,
-                                           audio_stream_type_t dstStream)
+                                           VolumeSource fromVolumeSource,
+                                           VolumeSource toVolumeSource)
 {
-    if (srcStream == dstStream) {
+    if (fromVolumeSource == toVolumeSource) {
         return srcIndex;
     }
-    auto &srcCurves = getVolumeCurves(srcStream);
-    auto &dstCurves = getVolumeCurves(dstStream);
+    auto &srcCurves = getVolumeCurves(fromVolumeSource);
+    auto &dstCurves = getVolumeCurves(toVolumeSource);
     float minSrc = (float)srcCurves.getVolumeIndexMin();
     float maxSrc = (float)srcCurves.getVolumeIndexMax();
     float minDst = (float)dstCurves.getVolumeIndexMin();
@@ -5851,12 +5833,8 @@
                                              bool on,
                                              const sp<AudioOutputDescriptor>& outputDesc,
                                              int delayMs,
-                                             audio_devices_t device,
-                                             bool activeOnly)
+                                             audio_devices_t device)
 {
-    if (activeOnly && !outputDesc->isActive(volumeSource)) {
-        return;
-    }
     if (device == AUDIO_DEVICE_NONE) {
         device = outputDesc->devices().types();
     }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 1fc61e5..1c98684 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -171,9 +171,6 @@
 
         virtual status_t getMinVolumeIndexForAttributes(const audio_attributes_t &attr, int &index);
 
-        status_t setVolumeGroupIndex(IVolumeCurves &volumeCurves, volume_group_t group, int index,
-                                     audio_devices_t device, const audio_attributes_t attributes);
-
         status_t setVolumeCurveIndex(int index,
                                      audio_devices_t device,
                                      IVolumeCurves &volumeCurves);
@@ -425,8 +422,8 @@
 
         // rescale volume index from srcStream within range of dstStream
         int rescaleVolumeIndex(int srcIndex,
-                               audio_stream_type_t srcStream,
-                               audio_stream_type_t dstStream);
+                               VolumeSource fromVolumeSource,
+                               VolumeSource toVolumeSource);
         // check that volume change is permitted, compute and send new volume to audio hardware
         virtual status_t checkAndSetVolume(IVolumeCurves &curves,
                                            VolumeSource volumeSource, int index,
@@ -461,14 +458,12 @@
          * @param outputDesc on which the client following the volume group shall be muted/umuted
          * @param delayMs
          * @param device
-         * @param activeOnly if true, mute only if the volume group is active on the output.
          */
         void setVolumeSourceMute(VolumeSource volumeSource,
                                  bool on,
                                  const sp<AudioOutputDescriptor>& outputDesc,
                                  int delayMs = 0,
-                                 audio_devices_t device = AUDIO_DEVICE_NONE,
-                                 bool activeOnly = false);
+                                 audio_devices_t device = AUDIO_DEVICE_NONE);
 
         audio_mode_t getPhoneState();
 
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 0571741..12ff130 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -321,7 +321,7 @@
     // so. As documented in hardware/camera3.h:configure_streams().
     if (mState == STATE_IN_RECONFIG &&
             mOldUsage == mUsage &&
-            mOldMaxBuffers == camera3_stream::max_buffers) {
+            mOldMaxBuffers == camera3_stream::max_buffers && !mDataSpaceOverridden) {
         mState = STATE_CONFIGURED;
         return OK;
     }
diff --git a/services/camera/libcameraservice/hidl/Convert.cpp b/services/camera/libcameraservice/hidl/Convert.cpp
index a87812b..c2ed23a 100644
--- a/services/camera/libcameraservice/hidl/Convert.cpp
+++ b/services/camera/libcameraservice/hidl/Convert.cpp
@@ -97,6 +97,21 @@
     return outputConfiguration;
 }
 
+hardware::camera2::params::SessionConfiguration convertFromHidl(
+    const HSessionConfiguration &hSessionConfiguration) {
+    hardware::camera2::params::SessionConfiguration sessionConfig(
+            hSessionConfiguration.inputWidth, hSessionConfiguration.inputHeight,
+            hSessionConfiguration.inputFormat,
+            static_cast<int>(hSessionConfiguration.operationMode));
+
+    for (const auto& hConfig : hSessionConfiguration.outputStreams) {
+        hardware::camera2::params::OutputConfiguration config = convertFromHidl(hConfig);
+        sessionConfig.addOutputConfiguration(config);
+    }
+
+    return sessionConfig;
+}
+
 // The camera metadata here is cloned. Since we're reading metadata over
 // hwbinder we would need to clone it in order to avoid aligment issues.
 bool convertFromHidl(const HCameraMetadata &src, CameraMetadata *dst) {
diff --git a/services/camera/libcameraservice/hidl/Convert.h b/services/camera/libcameraservice/hidl/Convert.h
index 82937a3..79683f6 100644
--- a/services/camera/libcameraservice/hidl/Convert.h
+++ b/services/camera/libcameraservice/hidl/Convert.h
@@ -53,6 +53,7 @@
 using HOutputConfiguration = frameworks::cameraservice::device::V2_0::OutputConfiguration;
 using HPhysicalCameraSettings = frameworks::cameraservice::device::V2_0::PhysicalCameraSettings;
 using HPhysicalCaptureResultInfo = frameworks::cameraservice::device::V2_0::PhysicalCaptureResultInfo;
+using HSessionConfiguration = frameworks::cameraservice::device::V2_0::SessionConfiguration;
 using HSubmitInfo = frameworks::cameraservice::device::V2_0::SubmitInfo;
 using HStatus = frameworks::cameraservice::common::V2_0::Status;
 using HStreamConfigurationMode = frameworks::cameraservice::device::V2_0::StreamConfigurationMode;
@@ -70,6 +71,9 @@
 hardware::camera2::params::OutputConfiguration convertFromHidl(
     const HOutputConfiguration &hOutputConfiguration);
 
+hardware::camera2::params::SessionConfiguration convertFromHidl(
+    const HSessionConfiguration &hSessionConfiguration);
+
 HCameraDeviceStatus convertToHidlCameraDeviceStatus(int32_t status);
 
 void convertToHidl(const std::vector<hardware::CameraStatus> &src,
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index d22ba5a..675ad24 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -41,6 +41,7 @@
 using hardware::Void;
 using HSubmitInfo = device::V2_0::SubmitInfo;
 using hardware::camera2::params::OutputConfiguration;
+using hardware::camera2::params::SessionConfiguration;
 
 static constexpr int32_t CAMERA_REQUEST_METADATA_QUEUE_SIZE = 1 << 20 /* 1 MB */;
 static constexpr int32_t CAMERA_RESULT_METADATA_QUEUE_SIZE = 1 << 20 /* 1 MB */;
@@ -255,6 +256,18 @@
     return B2HStatus(ret);
 }
 
+Return<void> HidlCameraDeviceUser::isSessionConfigurationSupported(
+    const HSessionConfiguration& hSessionConfiguration,
+    isSessionConfigurationSupported_cb _hidl_cb) {
+    bool supported = false;
+    SessionConfiguration sessionConfiguration = convertFromHidl(hSessionConfiguration);
+    binder::Status ret = mDeviceRemote->isSessionConfigurationSupported(
+            sessionConfiguration, &supported);
+    HStatus status = B2HStatus(ret);
+    _hidl_cb(status, supported);
+    return Void();
+}
+
 } // implementation
 } // V2_0
 } // device
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
index be8f1d6..c3a80fe 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.h
@@ -53,6 +53,7 @@
 using HCameraDeviceUser = device::V2_0::ICameraDeviceUser;
 using HCameraMetadata = cameraservice::service::V2_0::CameraMetadata;
 using HCaptureRequest = device::V2_0::CaptureRequest;
+using HSessionConfiguration = frameworks::cameraservice::device::V2_0::SessionConfiguration;
 using HOutputConfiguration = frameworks::cameraservice::device::V2_0::OutputConfiguration;
 using HPhysicalCameraSettings = frameworks::cameraservice::device::V2_0::PhysicalCameraSettings;
 using HStatus = frameworks::cameraservice::common::V2_0::Status;
@@ -97,6 +98,10 @@
     virtual Return<HStatus> updateOutputConfiguration(
         int32_t streamId, const HOutputConfiguration& outputConfiguration) override;
 
+    virtual Return<void> isSessionConfigurationSupported(
+        const HSessionConfiguration& sessionConfiguration,
+        isSessionConfigurationSupported_cb _hidl_cb) override;
+
     bool initStatus() { return mInitSuccess; }
 
     std::shared_ptr<CaptureResultMetadataQueue> getCaptureResultMetadataQueue() {
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy b/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy
index 87018ed..964acf4 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-arm.policy
@@ -14,6 +14,7 @@
 setpriority: 1
 sigaltstack: 1
 openat: 1
+open: 1
 clone: 1
 read: 1
 clock_gettime: 1
diff --git a/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy b/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy
index d739ba1..56ad8df 100644
--- a/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy
+++ b/services/mediaextractor/seccomp_policy/mediaextractor-x86.policy
@@ -11,6 +11,7 @@
 mmap2: 1
 madvise: 1
 openat: 1
+open: 1
 clock_gettime: 1
 writev: 1
 brk: 1